added orai
This commit is contained in:
parent
95a50296a4
commit
52e7220f4d
BIN
20241106/2023Okt/e_infoismfor_23okt_fl.zip
Normal file
BIN
20241106/2023Okt/e_infoismfor_23okt_fl.zip
Normal file
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,22 @@
|
||||||
|
Példa LAN (200 IP-cím igény)
|
||||||
|
hálózat azonosító: 172.16.0.0/24
|
||||||
|
alhálózati maszk: 255.255.255.0
|
||||||
|
használható címtartomány: 172.16.0.1 - 172.16.0.254
|
||||||
|
szórási cím: 172.16.0.255
|
||||||
|
|
||||||
|
|
||||||
|
1. IRODA (58 IP-cím igény)
|
||||||
|
hálózat azonosító:
|
||||||
|
alhálózati maszk:
|
||||||
|
használható címtartomány:
|
||||||
|
szórási cím:
|
||||||
|
2. NYOMTATO (3 IP-cím igény)
|
||||||
|
hálózat azonosító:
|
||||||
|
alhálózati maszk:
|
||||||
|
használható címtartomány:
|
||||||
|
szórási cím:
|
||||||
|
3. UGYFEL (2 IP-cím igény)
|
||||||
|
hálózat azonosító:
|
||||||
|
alhálózati maszk:
|
||||||
|
használható címtartomány:
|
||||||
|
szórási cím:
|
|
@ -0,0 +1,5 @@
|
||||||
|
6;22;37;56;66
|
||||||
|
8;27;31;50;63
|
||||||
|
2;19;X;47;73
|
||||||
|
12;29;33;52;62
|
||||||
|
11;16;42;51;75
|
|
@ -0,0 +1,5 @@
|
||||||
|
5;16;31;57;62
|
||||||
|
3;22;39;52;70
|
||||||
|
11;26;X;50;71
|
||||||
|
4;24;35;53;72
|
||||||
|
14;25;40;54;61
|
|
@ -0,0 +1,5 @@
|
||||||
|
1;28;34;53;68
|
||||||
|
5;20;44;60;66
|
||||||
|
8;27;X;55;61
|
||||||
|
9;23;40;46;62
|
||||||
|
10;17;37;47;67
|
|
@ -0,0 +1,5 @@
|
||||||
|
7;26;39;46;73
|
||||||
|
1;18;44;54;68
|
||||||
|
3;30;X;57;62
|
||||||
|
12;27;32;52;72
|
||||||
|
8;16;36;55;66
|
|
@ -0,0 +1,5 @@
|
||||||
|
11;25;45;51;75
|
||||||
|
9;24;34;54;63
|
||||||
|
5;16;X;48;65
|
||||||
|
2;29;43;52;62
|
||||||
|
15;28;39;58;74
|
|
@ -0,0 +1,5 @@
|
||||||
|
8;30;36;56;66
|
||||||
|
1;22;43;60;68
|
||||||
|
9;26;X;48;65
|
||||||
|
12;25;33;59;67
|
||||||
|
4;24;41;50;74
|
|
@ -0,0 +1,5 @@
|
||||||
|
2;28;43;59;74
|
||||||
|
7;25;38;53;69
|
||||||
|
1;22;X;47;70
|
||||||
|
12;23;33;57;65
|
||||||
|
15;29;39;52;67
|
|
@ -0,0 +1,5 @@
|
||||||
|
8;21;40;58;67
|
||||||
|
14;30;38;57;75
|
||||||
|
10;19;X;55;72
|
||||||
|
15;28;31;49;70
|
||||||
|
13;25;34;50;74
|
|
@ -0,0 +1,5 @@
|
||||||
|
11;17;43;47;67
|
||||||
|
12;20;44;60;70
|
||||||
|
9;21;X;50;62
|
||||||
|
14;29;45;52;61
|
||||||
|
4;16;36;58;63
|
|
@ -0,0 +1,5 @@
|
||||||
|
12;26;40;56;69
|
||||||
|
8;21;34;50;72
|
||||||
|
10;18;X;53;75
|
||||||
|
9;24;32;58;73
|
||||||
|
11;25;42;52;61
|
|
@ -0,0 +1,5 @@
|
||||||
|
1;22;35;49;73
|
||||||
|
12;23;39;46;67
|
||||||
|
8;25;X;54;62
|
||||||
|
14;28;42;52;63
|
||||||
|
4;16;45;53;68
|
|
@ -0,0 +1,11 @@
|
||||||
|
Andi.txt
|
||||||
|
Anna.txt
|
||||||
|
Bence.txt
|
||||||
|
Edit.txt
|
||||||
|
Gabi.txt
|
||||||
|
Laci.txt
|
||||||
|
Marika.txt
|
||||||
|
Norbi.txt
|
||||||
|
Pali.txt
|
||||||
|
Ricsi.txt
|
||||||
|
Tomi.txt
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
|
@ -0,0 +1,6 @@
|
||||||
|
class: "all pizza"
|
||||||
|
img: "assets/images/f3.png"
|
||||||
|
alt: "Húsimádó pizza"
|
||||||
|
kártya címe: "Húsimádó Pizza"
|
||||||
|
Ismertető: "BBQ hús, sonka, szalámi, kolbász, szalonna úgy, hogy épphogy ne folyjon le a tésztáról :)"
|
||||||
|
Ár: "$17"
|
Binary file not shown.
Binary file not shown.
42
20241106/2023Okt/hamburgermegoldas.sql
Normal file
42
20241106/2023Okt/hamburgermegoldas.sql
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
/*10.feladat*/
|
||||||
|
CREATE DATABASE hamburger CHARACTER SET="utf8" COLLATE="utf8_hungarian_ci";
|
||||||
|
|
||||||
|
/*12.feladat*/
|
||||||
|
SELECT * FROM menutetel
|
||||||
|
WHERE menutetel.ar = 2500;
|
||||||
|
|
||||||
|
/*13*/
|
||||||
|
UPDATE menutetel SET ar = 2300 WHERE nev LIKE "Grill pizza 32cm";
|
||||||
|
|
||||||
|
/*14*/
|
||||||
|
SELECT felhasznalo.nev as `vendeg_nev`,
|
||||||
|
COUNT(rendeles.id) AS `rendeles_db`
|
||||||
|
FROM rendeles
|
||||||
|
INNER JOIN felhasznalo ON felhasznalo.id = rendeles.felhasznaloId
|
||||||
|
GROUP BY felhasznalo.nev
|
||||||
|
ORDER BY `rendeles_db` DESC LIMIT 9;
|
||||||
|
|
||||||
|
/*15*/
|
||||||
|
INSERT INTO menutetel (nev, ar, etelkatId) VALUES ("Boston Tészta", 2200, 3);
|
||||||
|
|
||||||
|
/*16*/
|
||||||
|
|
||||||
|
SELECT rendeles.id as `rendeles_azon`,
|
||||||
|
rendeleselem.id as `rendeleselem_azon`,
|
||||||
|
menutetel.nev as `menutetel_neve`
|
||||||
|
FROM rendeles
|
||||||
|
INNER JOIN felhasznalo ON felhasznalo.id = rendeles.felhasznaloId
|
||||||
|
INNER JOIN rendeleselem ON rendeles.id = rendeleselem.rendelesId
|
||||||
|
INNER JOIN menutetel ON rendeleselem.menutetelId = menutetel.id
|
||||||
|
WHERE felhasznalo.nev LIKE "Balázs Flóra";
|
||||||
|
|
||||||
|
/*17*/
|
||||||
|
|
||||||
|
SELECT felhasznalo.nev as `felhasznalo_nev`,
|
||||||
|
SUM(menutetel.ar * rendeleselem.mennyiseg) * 1.1 as `osszesen`
|
||||||
|
FROM rendeleselem
|
||||||
|
INNER JOIN rendeles ON rendeleselem.rendelesId = rendeles.id
|
||||||
|
INNER JOIN felhasznalo ON rendeles.felhasznaloId = felhasznalo.id
|
||||||
|
INNER JOIN menutetel ON menutetel.id = rendeleselem.menutetelId
|
||||||
|
GROUP BY `felhasznalo_nev`
|
||||||
|
ORDER BY `osszesen` DESC LIMIT 10;
|
BIN
20241106/2024Maj/e_infoismfor_24maj_fl.zip
Normal file
BIN
20241106/2024Maj/e_infoismfor_24maj_fl.zip
Normal file
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,17 @@
|
||||||
|
Példa LAN (200 IP-cím igény)
|
||||||
|
hálózat azonosító: 172.16.0.0/24
|
||||||
|
alhálózati maszk: 255.255.255.0
|
||||||
|
használható címtartomány: 172.16.0.1 - 172.16.0.254
|
||||||
|
szórási cím: 172.16.0.255
|
||||||
|
|
||||||
|
|
||||||
|
1. PC (60 IP-cím igény)
|
||||||
|
hálózat azonosító:
|
||||||
|
alhálózati maszk:
|
||||||
|
használható címtartomány:
|
||||||
|
szórási cím:
|
||||||
|
2. WIFI (2 IP-cím igény)
|
||||||
|
hálózat azonosító:
|
||||||
|
alhálózati maszk:
|
||||||
|
használható címtartomány:
|
||||||
|
szórási cím:
|
|
@ -0,0 +1,25 @@
|
||||||
|
class Feladvany
|
||||||
|
{
|
||||||
|
public void MegoldasokKeresese(int kiralynoSora)
|
||||||
|
{
|
||||||
|
// Ezt a metódust nem kell módosítania!
|
||||||
|
if (kiralynoSora == SorokSzama)
|
||||||
|
{
|
||||||
|
MegoldasSorszama++;
|
||||||
|
TablaKiir();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
for (int aktOszlop = 0; aktOszlop < OszlopokSzama; aktOszlop++)
|
||||||
|
{
|
||||||
|
if (EzJoMezo(kiralynoSora, aktOszlop))
|
||||||
|
{
|
||||||
|
Tabla[kiralynoSora, aktOszlop] = 1;
|
||||||
|
MegoldasokKeresese(kiralynoSora + 1);
|
||||||
|
Tabla[kiralynoSora, aktOszlop] = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
public class Feladvany {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public void megoldasokKeresese( int kiralynoSora){
|
||||||
|
// Ezt a metódust nem kell módosítania!
|
||||||
|
if (kiralynoSora == this.sorokSzama)
|
||||||
|
{
|
||||||
|
this.megoldasSorszama++;
|
||||||
|
this.tablaKiir();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
for (int aktOszlop = 0; aktOszlop < this.oszlopokSzama; aktOszlop++)
|
||||||
|
{
|
||||||
|
if (this.ezJoMezo(kiralynoSora, aktOszlop))
|
||||||
|
{
|
||||||
|
this.tabla[kiralynoSora][aktOszlop] = 1;
|
||||||
|
this.megoldasokKeresese(kiralynoSora + 1);
|
||||||
|
this.tabla[kiralynoSora][aktOszlop] = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
-- Táblák létrehozása
|
||||||
|
|
||||||
|
-- Rendezvénytípusok
|
||||||
|
CREATE TABLE tipus
|
||||||
|
(
|
||||||
|
id int NOT NULL AUTO_INCREMENT,
|
||||||
|
nev varchar(50) NOT NULL UNIQUE,
|
||||||
|
primary key (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE helyszin
|
||||||
|
(
|
||||||
|
id int NOT NULL AUTO_INCREMENT,
|
||||||
|
nev varchar(50) NOT NULL UNIQUE,
|
||||||
|
primary key (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE kapcsolat
|
||||||
|
(
|
||||||
|
id int NOT NULL AUTO_INCREMENT,
|
||||||
|
nev varchar(50) NOT NULL UNIQUE,
|
||||||
|
telefon varchar(30) NOT NULL,
|
||||||
|
email varchar(50) NOT NULL,
|
||||||
|
cegnev varchar(50),
|
||||||
|
primary key (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE rendezveny
|
||||||
|
(
|
||||||
|
id int NOT NULL AUTO_INCREMENT,
|
||||||
|
kapcsolatId int NOT NULL,
|
||||||
|
idopont date NOT NULL,
|
||||||
|
napokszama int NOT NULL,
|
||||||
|
helyszinId int NOT NULL,
|
||||||
|
letszam int NOT NULL,
|
||||||
|
tipusId int NOT NULL,
|
||||||
|
primary key (id),
|
||||||
|
key kapcsolatId (kapcsolatId),
|
||||||
|
CONSTRAINT FK_kapcsolat_kapcsolatId FOREIGN KEY (kapcsolatId) REFERENCES kapcsolat (id),
|
||||||
|
key helyszinId (helyszinId),
|
||||||
|
CONSTRAINT FK_helyszin_helyszinId FOREIGN KEY (helyszinId) REFERENCES helyszin (id),
|
||||||
|
key tipusId (tipusId),
|
||||||
|
CONSTRAINT FK_tipus_tipusId FOREIGN KEY (tipusId) REFERENCES tipus (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Adatok beszúrása
|
||||||
|
INSERT INTO tipus (id, nev)
|
||||||
|
VALUES (1, 'szabadtéri'),
|
||||||
|
(2, 'beltéri'),
|
||||||
|
(3, 'online'),
|
||||||
|
(4, 'tréning'),
|
||||||
|
(5, 'céges rendezvény');
|
||||||
|
|
||||||
|
INSERT INTO helyszin (id, nev)
|
||||||
|
VALUES (1, 'Megrendelő telephelye'),
|
||||||
|
(2, 'Budapest'),
|
||||||
|
(3, 'Szeged'),
|
||||||
|
(4, 'Debrecen'),
|
||||||
|
(5, 'Pécs'),
|
||||||
|
(6, 'Balaton'),
|
||||||
|
(7, 'Velencei tó'),
|
||||||
|
(8, 'Egyéb');
|
||||||
|
|
||||||
|
INSERT INTO kapcsolat (id, nev, telefon, email, cegnev)
|
||||||
|
VALUES (1, 'Kiss Piroska', '+3620123456', 'kiss.piroska@paprika.hu', 'Paprika Paradicsom'),
|
||||||
|
(2, 'Nagy Béla', '+3670523456', 'nagy.bela@bugfix.hu', NULL),
|
||||||
|
(3, 'Vass Alajos', '+36309998877', 'vass.alajos@vaskalapos.hu', 'Vaskalapos Hulladékhasznosító'),
|
||||||
|
(4, 'Nagy Lilla', '+36308768768', 'nagy.lilla@szoke-ciklon.hu', 'Szőke Ciklon Illatszergyár'),
|
||||||
|
(5, 'Major Anna', '+36201347761', 'major.anna@organic.hu', 'Organic Gyógyszergyár'),
|
||||||
|
(6, 'Balogh Béla', '+36304673753', 'balogh.bela@nadpalca.hu', 'Nádpálca Oktatástechnikai Kereskelem'),
|
||||||
|
(7, 'Szabó Krisztina', '+36205049928', 'szabo.krisztina@kaqkk.hu', 'Kaqkk Kft.'),
|
||||||
|
(8, 'Hanta Balázs', '+36705463728', 'hanta.balazs@hanta.hu', 'Hanta Pályázatíró Kft.'),
|
||||||
|
(9, 'Mekk Elek', '+36305161721', 'mekk.elek@talan-holnap.hu', 'Talán Holnap Karbantartás');
|
||||||
|
|
||||||
|
INSERT INTO rendezveny (id, kapcsolatId, idopont, napokszama, helyszinId, letszam, tipusID)
|
||||||
|
VALUES (1, 5, '2023-12-01', 3, 3, 100, 4),
|
||||||
|
(2, 3, '2023-12-01', 1, 1, 120, 5),
|
||||||
|
(3, 1, '2023-12-06', 1, 1, 40, 5),
|
||||||
|
(4, 7, '2023-12-07', 3, 3, 20, 4),
|
||||||
|
(5, 4, '2023-12-08', 1, 2, 70, 2),
|
||||||
|
(6, 6, '2023-12-09', 1, 1, 30, 3),
|
||||||
|
(7, 8, '2023-12-11', 3, 4, 10, 4),
|
||||||
|
(8, 9, '2023-12-12', 1, 1, 30, 2),
|
||||||
|
(9, 2, '2023-12-13', 5, 5, 30, 4),
|
||||||
|
(10, 5, '2023-12-14', 1, 2, 230, 5),
|
||||||
|
(11, 7, '2023-12-15', 1, 1, 65, 5),
|
||||||
|
(12, 8, '2023-12-15', 1, 2, 40, 5);
|
|
@ -0,0 +1,11 @@
|
||||||
|
Kép:
|
||||||
|
assets/img/online.jpg
|
||||||
|
|
||||||
|
Kép helyettesítő szöveg:
|
||||||
|
Online
|
||||||
|
|
||||||
|
Cím:
|
||||||
|
Online csapatépítés
|
||||||
|
|
||||||
|
Leírás:
|
||||||
|
Ha a munkatársak túl távol dolgoznak egymástól, vagy valamilyen okból nem lehet személyes találkozással megvalósítani a csapatépítést, akkor számos online lehetőséget tudunk ajánlani.
|
Binary file not shown.
|
@ -0,0 +1,51 @@
|
||||||
|
var createError = require('http-errors');
|
||||||
|
var express = require('express');
|
||||||
|
var path = require('path');
|
||||||
|
var cookieParser = require('cookie-parser');
|
||||||
|
var logger = require('morgan');
|
||||||
|
|
||||||
|
var router = require('./routes/api');
|
||||||
|
|
||||||
|
var app = express();
|
||||||
|
|
||||||
|
//Jade motor az error page generálásához
|
||||||
|
app.set('views', path.join(__dirname, 'views'));
|
||||||
|
app.set('view engine', 'jade');
|
||||||
|
|
||||||
|
// Statikus oldalak
|
||||||
|
app.use(express.static(path.join(__dirname, 'web')));
|
||||||
|
|
||||||
|
// Extra eszközök
|
||||||
|
app.use(logger('dev'));
|
||||||
|
app.use(express.json());
|
||||||
|
app.use(express.urlencoded({ extended: false }));
|
||||||
|
app.use(cookieParser());
|
||||||
|
|
||||||
|
// CORS engedélyezése
|
||||||
|
app.use(function (req, res, next) {
|
||||||
|
res.header('Access-Control-Allow-Origin', '*');
|
||||||
|
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept');
|
||||||
|
next()
|
||||||
|
});
|
||||||
|
|
||||||
|
// Router beállítása
|
||||||
|
app.use('/api', router);
|
||||||
|
|
||||||
|
// 404-es hiba elkapása & kezelése
|
||||||
|
app.use(function(req, res, next) {
|
||||||
|
next(createError(404));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Hibakezelő
|
||||||
|
app.use(function(err, req, res, next) {
|
||||||
|
// set locals, only providing error in development
|
||||||
|
res.locals.message = err.message;
|
||||||
|
res.locals.error = req.app.get('env') === 'development' ? err : {};
|
||||||
|
|
||||||
|
// render the error page
|
||||||
|
res.status(err.status || 500);
|
||||||
|
console.error("Hiba történt! ", err.message)
|
||||||
|
res.render('error');
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = app;
|
|
@ -0,0 +1,33 @@
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
let sqlTasks = require('./sqlTasks.json');
|
||||||
|
const os = require('os');
|
||||||
|
|
||||||
|
const getSqlTasks = () => {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
fs.readFile(path.join(__dirname, "../lekerdezesek/lekerdezesek.sql"), 'utf8', (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err)
|
||||||
|
} else {
|
||||||
|
let sqlQueriesFromFile = data.split("***").splice(1);
|
||||||
|
sqlQueriesFromFile.forEach(sqlQuery => {
|
||||||
|
let lines = sqlQuery.split(os.EOL);
|
||||||
|
const id = lines[1].split(".")[0];
|
||||||
|
lines = lines.splice(2);
|
||||||
|
if (lines.length > 0) {
|
||||||
|
const sqlQueryText = lines.reduce((sql, line) => sql + " " + line).trim();
|
||||||
|
let sqlTask = sqlTasks.filter(task => task.id === Number.parseInt(id))[0];
|
||||||
|
if (sqlTask) {
|
||||||
|
sqlTask.sql = sqlQueryText
|
||||||
|
} else {
|
||||||
|
console.error("Nem található ilyen ID-jű lekérdezés: ", id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
resolve(sqlTasks)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = getSqlTasks;
|
|
@ -0,0 +1,37 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 12,
|
||||||
|
"description": "Adatbázis létrehozása",
|
||||||
|
"adminPage": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 14,
|
||||||
|
"description": "Legalább 100 fős rendezvények száma",
|
||||||
|
"adminPage": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 15,
|
||||||
|
"description": "Szegeden tartott rendezvények",
|
||||||
|
"adminPage": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 16,
|
||||||
|
"description": "Nagy Bélához tartozó cégnév megadása",
|
||||||
|
"adminPage": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 17,
|
||||||
|
"description": "A 2 legnagyobb átlaglétszámú rendezvénytípus",
|
||||||
|
"adminPage": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 18,
|
||||||
|
"description": "Helyszínek, ahol nem tartottak rendezvényt",
|
||||||
|
"adminPage": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 19,
|
||||||
|
"description": "Tréningek szervezési díja",
|
||||||
|
"adminPage": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,90 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var app = require('../app');
|
||||||
|
var http = require('http');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get port from environment and store in Express.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var port = normalizePort(process.env.PORT || '8080');
|
||||||
|
app.set('port', port);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create HTTP server.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var server = http.createServer(app);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Listen on provided port, on all network interfaces.
|
||||||
|
*/
|
||||||
|
|
||||||
|
server.listen(port);
|
||||||
|
server.on('error', onError);
|
||||||
|
server.on('listening', onListening);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize a port into a number, string, or false.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function normalizePort(val) {
|
||||||
|
var port = parseInt(val, 10);
|
||||||
|
|
||||||
|
if (isNaN(port)) {
|
||||||
|
// named pipe
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (port >= 0) {
|
||||||
|
// port number
|
||||||
|
return port;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event listener for HTTP server "error" event.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function onError(error) {
|
||||||
|
if (error.syscall !== 'listen') {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
var bind = typeof port === 'string'
|
||||||
|
? 'Pipe ' + port
|
||||||
|
: 'Port ' + port;
|
||||||
|
|
||||||
|
// handle specific listen errors with friendly messages
|
||||||
|
switch (error.code) {
|
||||||
|
case 'EACCES':
|
||||||
|
console.error(bind + ' requires elevated privileges');
|
||||||
|
process.exit(1);
|
||||||
|
break;
|
||||||
|
case 'EADDRINUSE':
|
||||||
|
console.error(bind + ' is already in use');
|
||||||
|
process.exit(1);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event listener for HTTP server "listening" event.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function onListening() {
|
||||||
|
var addr = server.address();
|
||||||
|
var bind = typeof addr === 'string'
|
||||||
|
? 'pipe ' + addr
|
||||||
|
: 'port ' + addr.port;
|
||||||
|
console.log('A szerver az alábbi címen érhető el: http://localhost:' + addr.port)
|
||||||
|
console.log('Az admin felület pedig: http://localhost:' + addr.port + '/admin')
|
||||||
|
}
|
|
@ -0,0 +1,23 @@
|
||||||
|
A feladatok megoldására elkészített SQL parancsokat illessze be a feladat sorszáma után!
|
||||||
|
***
|
||||||
|
12. feladat
|
||||||
|
|
||||||
|
***
|
||||||
|
14. feladat
|
||||||
|
|
||||||
|
***
|
||||||
|
15. feladat
|
||||||
|
|
||||||
|
***
|
||||||
|
16. feladat
|
||||||
|
|
||||||
|
***
|
||||||
|
17. feladat
|
||||||
|
|
||||||
|
***
|
||||||
|
18. feladat
|
||||||
|
|
||||||
|
***
|
||||||
|
19. feladat
|
||||||
|
|
||||||
|
***
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../acorn/bin/acorn" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../acorn/bin/acorn" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
|
@ -0,0 +1,7 @@
|
||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\acorn\bin\acorn" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\acorn\bin\acorn" %*
|
||||||
|
)
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../clean-css/bin/cleancss" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../clean-css/bin/cleancss" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
|
@ -0,0 +1,7 @@
|
||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\clean-css\bin\cleancss" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\clean-css\bin\cleancss" %*
|
||||||
|
)
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../jade/bin/jade.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../jade/bin/jade.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
|
@ -0,0 +1,7 @@
|
||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\jade\bin\jade.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\jade\bin\jade.js" %*
|
||||||
|
)
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../mime/cli.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../mime/cli.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
|
@ -0,0 +1,7 @@
|
||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\mime\cli.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\mime\cli.js" %*
|
||||||
|
)
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../mkdirp/bin/cmd.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
|
@ -0,0 +1,7 @@
|
||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\mkdirp\bin\cmd.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\mkdirp\bin\cmd.js" %*
|
||||||
|
)
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../uglify-js/bin/uglifyjs" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../uglify-js/bin/uglifyjs" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
|
@ -0,0 +1,7 @@
|
||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\uglify-js\bin\uglifyjs" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\uglify-js\bin\uglifyjs" %*
|
||||||
|
)
|
|
@ -0,0 +1,236 @@
|
||||||
|
1.3.7 / 2019-04-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.6.2
|
||||||
|
- Fix sorting charset, encoding, and language with extra parameters
|
||||||
|
|
||||||
|
1.3.6 / 2019-04-28
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.24
|
||||||
|
- deps: mime-db@~1.40.0
|
||||||
|
|
||||||
|
1.3.5 / 2018-02-28
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.18
|
||||||
|
- deps: mime-db@~1.33.0
|
||||||
|
|
||||||
|
1.3.4 / 2017-08-22
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.16
|
||||||
|
- deps: mime-db@~1.29.0
|
||||||
|
|
||||||
|
1.3.3 / 2016-05-02
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.11
|
||||||
|
- deps: mime-db@~1.23.0
|
||||||
|
* deps: negotiator@0.6.1
|
||||||
|
- perf: improve `Accept` parsing speed
|
||||||
|
- perf: improve `Accept-Charset` parsing speed
|
||||||
|
- perf: improve `Accept-Encoding` parsing speed
|
||||||
|
- perf: improve `Accept-Language` parsing speed
|
||||||
|
|
||||||
|
1.3.2 / 2016-03-08
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.10
|
||||||
|
- Fix extension of `application/dash+xml`
|
||||||
|
- Update primary extension for `audio/mp4`
|
||||||
|
- deps: mime-db@~1.22.0
|
||||||
|
|
||||||
|
1.3.1 / 2016-01-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.9
|
||||||
|
- deps: mime-db@~1.21.0
|
||||||
|
|
||||||
|
1.3.0 / 2015-09-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.7
|
||||||
|
- deps: mime-db@~1.19.0
|
||||||
|
* deps: negotiator@0.6.0
|
||||||
|
- Fix including type extensions in parameters in `Accept` parsing
|
||||||
|
- Fix parsing `Accept` parameters with quoted equals
|
||||||
|
- Fix parsing `Accept` parameters with quoted semicolons
|
||||||
|
- Lazy-load modules from main entry point
|
||||||
|
- perf: delay type concatenation until needed
|
||||||
|
- perf: enable strict mode
|
||||||
|
- perf: hoist regular expressions
|
||||||
|
- perf: remove closures getting spec properties
|
||||||
|
- perf: remove a closure from media type parsing
|
||||||
|
- perf: remove property delete from media type parsing
|
||||||
|
|
||||||
|
1.2.13 / 2015-09-06
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.6
|
||||||
|
- deps: mime-db@~1.18.0
|
||||||
|
|
||||||
|
1.2.12 / 2015-07-30
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.4
|
||||||
|
- deps: mime-db@~1.16.0
|
||||||
|
|
||||||
|
1.2.11 / 2015-07-16
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.3
|
||||||
|
- deps: mime-db@~1.15.0
|
||||||
|
|
||||||
|
1.2.10 / 2015-07-01
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.2
|
||||||
|
- deps: mime-db@~1.14.0
|
||||||
|
|
||||||
|
1.2.9 / 2015-06-08
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.1
|
||||||
|
- perf: fix deopt during mapping
|
||||||
|
|
||||||
|
1.2.8 / 2015-06-07
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.0
|
||||||
|
- deps: mime-db@~1.13.0
|
||||||
|
* perf: avoid argument reassignment & argument slice
|
||||||
|
* perf: avoid negotiator recursive construction
|
||||||
|
* perf: enable strict mode
|
||||||
|
* perf: remove unnecessary bitwise operator
|
||||||
|
|
||||||
|
1.2.7 / 2015-05-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.5.3
|
||||||
|
- Fix media type parameter matching to be case-insensitive
|
||||||
|
|
||||||
|
1.2.6 / 2015-05-07
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.11
|
||||||
|
- deps: mime-db@~1.9.1
|
||||||
|
* deps: negotiator@0.5.2
|
||||||
|
- Fix comparing media types with quoted values
|
||||||
|
- Fix splitting media types with quoted commas
|
||||||
|
|
||||||
|
1.2.5 / 2015-03-13
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.10
|
||||||
|
- deps: mime-db@~1.8.0
|
||||||
|
|
||||||
|
1.2.4 / 2015-02-14
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Support Node.js 0.6
|
||||||
|
* deps: mime-types@~2.0.9
|
||||||
|
- deps: mime-db@~1.7.0
|
||||||
|
* deps: negotiator@0.5.1
|
||||||
|
- Fix preference sorting to be stable for long acceptable lists
|
||||||
|
|
||||||
|
1.2.3 / 2015-01-31
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.8
|
||||||
|
- deps: mime-db@~1.6.0
|
||||||
|
|
||||||
|
1.2.2 / 2014-12-30
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.7
|
||||||
|
- deps: mime-db@~1.5.0
|
||||||
|
|
||||||
|
1.2.1 / 2014-12-30
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.5
|
||||||
|
- deps: mime-db@~1.3.1
|
||||||
|
|
||||||
|
1.2.0 / 2014-12-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.5.0
|
||||||
|
- Fix list return order when large accepted list
|
||||||
|
- Fix missing identity encoding when q=0 exists
|
||||||
|
- Remove dynamic building of Negotiator class
|
||||||
|
|
||||||
|
1.1.4 / 2014-12-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.4
|
||||||
|
- deps: mime-db@~1.3.0
|
||||||
|
|
||||||
|
1.1.3 / 2014-11-09
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.3
|
||||||
|
- deps: mime-db@~1.2.0
|
||||||
|
|
||||||
|
1.1.2 / 2014-10-14
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.9
|
||||||
|
- Fix error when media type has invalid parameter
|
||||||
|
|
||||||
|
1.1.1 / 2014-09-28
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.2
|
||||||
|
- deps: mime-db@~1.1.0
|
||||||
|
* deps: negotiator@0.4.8
|
||||||
|
- Fix all negotiations to be case-insensitive
|
||||||
|
- Stable sort preferences of same quality according to client order
|
||||||
|
|
||||||
|
1.1.0 / 2014-09-02
|
||||||
|
==================
|
||||||
|
|
||||||
|
* update `mime-types`
|
||||||
|
|
||||||
|
1.0.7 / 2014-07-04
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Fix wrong type returned from `type` when match after unknown extension
|
||||||
|
|
||||||
|
1.0.6 / 2014-06-24
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.7
|
||||||
|
|
||||||
|
1.0.5 / 2014-06-20
|
||||||
|
==================
|
||||||
|
|
||||||
|
* fix crash when unknown extension given
|
||||||
|
|
||||||
|
1.0.4 / 2014-06-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* use `mime-types`
|
||||||
|
|
||||||
|
1.0.3 / 2014-06-11
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.6
|
||||||
|
- Order by specificity when quality is the same
|
||||||
|
|
||||||
|
1.0.2 / 2014-05-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Fix interpretation when header not in request
|
||||||
|
* deps: pin negotiator@0.4.5
|
||||||
|
|
||||||
|
1.0.1 / 2014-01-18
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Identity encoding isn't always acceptable
|
||||||
|
* deps: negotiator@~0.4.0
|
||||||
|
|
||||||
|
1.0.0 / 2013-12-27
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Genesis
|
|
@ -0,0 +1,23 @@
|
||||||
|
(The MIT License)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||||
|
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
'Software'), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,142 @@
|
||||||
|
# accepts
|
||||||
|
|
||||||
|
[![NPM Version][npm-version-image]][npm-url]
|
||||||
|
[![NPM Downloads][npm-downloads-image]][npm-url]
|
||||||
|
[![Node.js Version][node-version-image]][node-version-url]
|
||||||
|
[![Build Status][travis-image]][travis-url]
|
||||||
|
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||||
|
|
||||||
|
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator).
|
||||||
|
Extracted from [koa](https://www.npmjs.com/package/koa) for general use.
|
||||||
|
|
||||||
|
In addition to negotiator, it allows:
|
||||||
|
|
||||||
|
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])`
|
||||||
|
as well as `('text/html', 'application/json')`.
|
||||||
|
- Allows type shorthands such as `json`.
|
||||||
|
- Returns `false` when no types match
|
||||||
|
- Treats non-existent headers as `*`
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This is a [Node.js](https://nodejs.org/en/) module available through the
|
||||||
|
[npm registry](https://www.npmjs.com/). Installation is done using the
|
||||||
|
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm install accepts
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
<!-- eslint-disable no-unused-vars -->
|
||||||
|
|
||||||
|
```js
|
||||||
|
var accepts = require('accepts')
|
||||||
|
```
|
||||||
|
|
||||||
|
### accepts(req)
|
||||||
|
|
||||||
|
Create a new `Accepts` object for the given `req`.
|
||||||
|
|
||||||
|
#### .charset(charsets)
|
||||||
|
|
||||||
|
Return the first accepted charset. If nothing in `charsets` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .charsets()
|
||||||
|
|
||||||
|
Return the charsets that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .encoding(encodings)
|
||||||
|
|
||||||
|
Return the first accepted encoding. If nothing in `encodings` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .encodings()
|
||||||
|
|
||||||
|
Return the encodings that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .language(languages)
|
||||||
|
|
||||||
|
Return the first accepted language. If nothing in `languages` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .languages()
|
||||||
|
|
||||||
|
Return the languages that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .type(types)
|
||||||
|
|
||||||
|
Return the first accepted type (and it is returned as the same text as what
|
||||||
|
appears in the `types` array). If nothing in `types` is accepted, then `false`
|
||||||
|
is returned.
|
||||||
|
|
||||||
|
The `types` array can contain full MIME types or file extensions. Any value
|
||||||
|
that is not a full MIME types is passed to `require('mime-types').lookup`.
|
||||||
|
|
||||||
|
#### .types()
|
||||||
|
|
||||||
|
Return the types that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Simple type negotiation
|
||||||
|
|
||||||
|
This simple example shows how to use `accepts` to return a different typed
|
||||||
|
respond body based on what the client wants to accept. The server lists it's
|
||||||
|
preferences in order and will get back the best match between the client and
|
||||||
|
server.
|
||||||
|
|
||||||
|
```js
|
||||||
|
var accepts = require('accepts')
|
||||||
|
var http = require('http')
|
||||||
|
|
||||||
|
function app (req, res) {
|
||||||
|
var accept = accepts(req)
|
||||||
|
|
||||||
|
// the order of this list is significant; should be server preferred order
|
||||||
|
switch (accept.type(['json', 'html'])) {
|
||||||
|
case 'json':
|
||||||
|
res.setHeader('Content-Type', 'application/json')
|
||||||
|
res.write('{"hello":"world!"}')
|
||||||
|
break
|
||||||
|
case 'html':
|
||||||
|
res.setHeader('Content-Type', 'text/html')
|
||||||
|
res.write('<b>hello, world!</b>')
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
// the fallback is text/plain, so no need to specify it above
|
||||||
|
res.setHeader('Content-Type', 'text/plain')
|
||||||
|
res.write('hello, world!')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
res.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
http.createServer(app).listen(3000)
|
||||||
|
```
|
||||||
|
|
||||||
|
You can test this out with the cURL program:
|
||||||
|
```sh
|
||||||
|
curl -I -H'Accept: text/html' http://localhost:3000/
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
[MIT](LICENSE)
|
||||||
|
|
||||||
|
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master
|
||||||
|
[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master
|
||||||
|
[node-version-image]: https://badgen.net/npm/node/accepts
|
||||||
|
[node-version-url]: https://nodejs.org/en/download
|
||||||
|
[npm-downloads-image]: https://badgen.net/npm/dm/accepts
|
||||||
|
[npm-url]: https://npmjs.org/package/accepts
|
||||||
|
[npm-version-image]: https://badgen.net/npm/v/accepts
|
||||||
|
[travis-image]: https://badgen.net/travis/jshttp/accepts/master
|
||||||
|
[travis-url]: https://travis-ci.org/jshttp/accepts
|
|
@ -0,0 +1,238 @@
|
||||||
|
/*!
|
||||||
|
* accepts
|
||||||
|
* Copyright(c) 2014 Jonathan Ong
|
||||||
|
* Copyright(c) 2015 Douglas Christopher Wilson
|
||||||
|
* MIT Licensed
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var Negotiator = require('negotiator')
|
||||||
|
var mime = require('mime-types')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module exports.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = Accepts
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new Accepts object for the given req.
|
||||||
|
*
|
||||||
|
* @param {object} req
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
function Accepts (req) {
|
||||||
|
if (!(this instanceof Accepts)) {
|
||||||
|
return new Accepts(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.headers = req.headers
|
||||||
|
this.negotiator = new Negotiator(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the given `type(s)` is acceptable, returning
|
||||||
|
* the best match when true, otherwise `undefined`, in which
|
||||||
|
* case you should respond with 406 "Not Acceptable".
|
||||||
|
*
|
||||||
|
* The `type` value may be a single mime type string
|
||||||
|
* such as "application/json", the extension name
|
||||||
|
* such as "json" or an array `["json", "html", "text/plain"]`. When a list
|
||||||
|
* or array is given the _best_ match, if any is returned.
|
||||||
|
*
|
||||||
|
* Examples:
|
||||||
|
*
|
||||||
|
* // Accept: text/html
|
||||||
|
* this.types('html');
|
||||||
|
* // => "html"
|
||||||
|
*
|
||||||
|
* // Accept: text/*, application/json
|
||||||
|
* this.types('html');
|
||||||
|
* // => "html"
|
||||||
|
* this.types('text/html');
|
||||||
|
* // => "text/html"
|
||||||
|
* this.types('json', 'text');
|
||||||
|
* // => "json"
|
||||||
|
* this.types('application/json');
|
||||||
|
* // => "application/json"
|
||||||
|
*
|
||||||
|
* // Accept: text/*, application/json
|
||||||
|
* this.types('image/png');
|
||||||
|
* this.types('png');
|
||||||
|
* // => undefined
|
||||||
|
*
|
||||||
|
* // Accept: text/*;q=.5, application/json
|
||||||
|
* this.types(['html', 'json']);
|
||||||
|
* this.types('html', 'json');
|
||||||
|
* // => "json"
|
||||||
|
*
|
||||||
|
* @param {String|Array} types...
|
||||||
|
* @return {String|Array|Boolean}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.type =
|
||||||
|
Accepts.prototype.types = function (types_) {
|
||||||
|
var types = types_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (types && !Array.isArray(types)) {
|
||||||
|
types = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < types.length; i++) {
|
||||||
|
types[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no types, return all requested types
|
||||||
|
if (!types || types.length === 0) {
|
||||||
|
return this.negotiator.mediaTypes()
|
||||||
|
}
|
||||||
|
|
||||||
|
// no accept header, return first given type
|
||||||
|
if (!this.headers.accept) {
|
||||||
|
return types[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
var mimes = types.map(extToMime)
|
||||||
|
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime))
|
||||||
|
var first = accepts[0]
|
||||||
|
|
||||||
|
return first
|
||||||
|
? types[mimes.indexOf(first)]
|
||||||
|
: false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted encodings or best fit based on `encodings`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Encoding: gzip, deflate`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['gzip', 'deflate']
|
||||||
|
*
|
||||||
|
* @param {String|Array} encodings...
|
||||||
|
* @return {String|Array}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.encoding =
|
||||||
|
Accepts.prototype.encodings = function (encodings_) {
|
||||||
|
var encodings = encodings_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (encodings && !Array.isArray(encodings)) {
|
||||||
|
encodings = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < encodings.length; i++) {
|
||||||
|
encodings[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no encodings, return all requested encodings
|
||||||
|
if (!encodings || encodings.length === 0) {
|
||||||
|
return this.negotiator.encodings()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.encodings(encodings)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted charsets or best fit based on `charsets`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['utf-8', 'utf-7', 'iso-8859-1']
|
||||||
|
*
|
||||||
|
* @param {String|Array} charsets...
|
||||||
|
* @return {String|Array}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.charset =
|
||||||
|
Accepts.prototype.charsets = function (charsets_) {
|
||||||
|
var charsets = charsets_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (charsets && !Array.isArray(charsets)) {
|
||||||
|
charsets = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < charsets.length; i++) {
|
||||||
|
charsets[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no charsets, return all requested charsets
|
||||||
|
if (!charsets || charsets.length === 0) {
|
||||||
|
return this.negotiator.charsets()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.charsets(charsets)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted languages or best fit based on `langs`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Language: en;q=0.8, es, pt`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['es', 'pt', 'en']
|
||||||
|
*
|
||||||
|
* @param {String|Array} langs...
|
||||||
|
* @return {Array|String}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.lang =
|
||||||
|
Accepts.prototype.langs =
|
||||||
|
Accepts.prototype.language =
|
||||||
|
Accepts.prototype.languages = function (languages_) {
|
||||||
|
var languages = languages_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (languages && !Array.isArray(languages)) {
|
||||||
|
languages = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < languages.length; i++) {
|
||||||
|
languages[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no languages, return all requested languages
|
||||||
|
if (!languages || languages.length === 0) {
|
||||||
|
return this.negotiator.languages()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.languages(languages)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert extnames to mime.
|
||||||
|
*
|
||||||
|
* @param {String} type
|
||||||
|
* @return {String}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function extToMime (type) {
|
||||||
|
return type.indexOf('/') === -1
|
||||||
|
? mime.lookup(type)
|
||||||
|
: type
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if mime is valid.
|
||||||
|
*
|
||||||
|
* @param {String} type
|
||||||
|
* @return {String}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function validMime (type) {
|
||||||
|
return typeof type === 'string'
|
||||||
|
}
|
|
@ -0,0 +1,86 @@
|
||||||
|
{
|
||||||
|
"_from": "accepts@~1.3.7",
|
||||||
|
"_id": "accepts@1.3.7",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==",
|
||||||
|
"_location": "/accepts",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"type": "range",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "accepts@~1.3.7",
|
||||||
|
"name": "accepts",
|
||||||
|
"escapedName": "accepts",
|
||||||
|
"rawSpec": "~1.3.7",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "~1.3.7"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/express"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz",
|
||||||
|
"_shasum": "531bc726517a3b2b41f850021c6cc15eaab507cd",
|
||||||
|
"_spec": "accepts@~1.3.7",
|
||||||
|
"_where": "C:\\Users\\oracle\\Desktop\\Forest\\kekhegy\\kekhegy\\node_modules\\express",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/jshttp/accepts/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Douglas Christopher Wilson",
|
||||||
|
"email": "doug@somethingdoug.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jonathan Ong",
|
||||||
|
"email": "me@jongleberry.com",
|
||||||
|
"url": "http://jongleberry.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"mime-types": "~2.1.24",
|
||||||
|
"negotiator": "0.6.2"
|
||||||
|
},
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "Higher-level content negotiation",
|
||||||
|
"devDependencies": {
|
||||||
|
"deep-equal": "1.0.1",
|
||||||
|
"eslint": "5.16.0",
|
||||||
|
"eslint-config-standard": "12.0.0",
|
||||||
|
"eslint-plugin-import": "2.17.2",
|
||||||
|
"eslint-plugin-markdown": "1.0.0",
|
||||||
|
"eslint-plugin-node": "8.0.1",
|
||||||
|
"eslint-plugin-promise": "4.1.1",
|
||||||
|
"eslint-plugin-standard": "4.0.0",
|
||||||
|
"mocha": "6.1.4",
|
||||||
|
"nyc": "14.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"LICENSE",
|
||||||
|
"HISTORY.md",
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"homepage": "https://github.com/jshttp/accepts#readme",
|
||||||
|
"keywords": [
|
||||||
|
"content",
|
||||||
|
"negotiation",
|
||||||
|
"accept",
|
||||||
|
"accepts"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"name": "accepts",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jshttp/accepts.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"lint": "eslint --plugin markdown --ext js,md .",
|
||||||
|
"test": "mocha --reporter spec --check-leaks --bail test/",
|
||||||
|
"test-cov": "nyc --reporter=html --reporter=text npm test",
|
||||||
|
"test-travis": "nyc --reporter=text npm test"
|
||||||
|
},
|
||||||
|
"version": "1.3.7"
|
||||||
|
}
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright (c) 2014 Forbes Lindesay
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
|
@ -0,0 +1,76 @@
|
||||||
|
# acorn-globals
|
||||||
|
|
||||||
|
Detect global variables in JavaScript using acorn
|
||||||
|
|
||||||
|
[![Build Status](https://img.shields.io/travis/ForbesLindesay/acorn-globals/master.svg)](https://travis-ci.org/ForbesLindesay/acorn-globals)
|
||||||
|
[![Dependency Status](https://img.shields.io/david/ForbesLindesay/acorn-globals.svg)](https://david-dm.org/ForbesLindesay/acorn-globals)
|
||||||
|
[![NPM version](https://img.shields.io/npm/v/acorn-globals.svg)](https://www.npmjs.org/package/acorn-globals)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
npm install acorn-globals
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
detect.js
|
||||||
|
|
||||||
|
```js
|
||||||
|
var fs = require('fs');
|
||||||
|
var detect = require('acorn-globals');
|
||||||
|
|
||||||
|
var src = fs.readFileSync(__dirname + '/input.js', 'utf8');
|
||||||
|
|
||||||
|
var scope = detect(src);
|
||||||
|
console.dir(scope);
|
||||||
|
```
|
||||||
|
|
||||||
|
input.js
|
||||||
|
|
||||||
|
```js
|
||||||
|
var x = 5;
|
||||||
|
var y = 3, z = 2;
|
||||||
|
|
||||||
|
w.foo();
|
||||||
|
w = 2;
|
||||||
|
|
||||||
|
RAWR=444;
|
||||||
|
RAWR.foo();
|
||||||
|
|
||||||
|
BLARG=3;
|
||||||
|
|
||||||
|
foo(function () {
|
||||||
|
var BAR = 3;
|
||||||
|
process.nextTick(function (ZZZZZZZZZZZZ) {
|
||||||
|
console.log('beep boop');
|
||||||
|
var xyz = 4;
|
||||||
|
x += 10;
|
||||||
|
x.zzzzzz;
|
||||||
|
ZZZ=6;
|
||||||
|
});
|
||||||
|
function doom () {
|
||||||
|
}
|
||||||
|
ZZZ.foo();
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(xyz);
|
||||||
|
```
|
||||||
|
|
||||||
|
output:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ node example/detect.js
|
||||||
|
[ { name: 'BLARG', nodes: [ [Object] ] },
|
||||||
|
{ name: 'RAWR', nodes: [ [Object], [Object] ] },
|
||||||
|
{ name: 'ZZZ', nodes: [ [Object], [Object] ] },
|
||||||
|
{ name: 'console', nodes: [ [Object], [Object] ] },
|
||||||
|
{ name: 'foo', nodes: [ [Object] ] },
|
||||||
|
{ name: 'process', nodes: [ [Object] ] },
|
||||||
|
{ name: 'w', nodes: [ [Object], [Object] ] },
|
||||||
|
{ name: 'xyz', nodes: [ [Object] ] } ]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
|
@ -0,0 +1,180 @@
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
var acorn = require('acorn');
|
||||||
|
var walk = require('acorn/dist/walk');
|
||||||
|
|
||||||
|
function isScope(node) {
|
||||||
|
return node.type === 'FunctionExpression' || node.type === 'FunctionDeclaration' || node.type === 'ArrowFunctionExpression' || node.type === 'Program';
|
||||||
|
}
|
||||||
|
function isBlockScope(node) {
|
||||||
|
return node.type === 'BlockStatement' || isScope(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
function declaresArguments(node) {
|
||||||
|
return node.type === 'FunctionExpression' || node.type === 'FunctionDeclaration';
|
||||||
|
}
|
||||||
|
|
||||||
|
function declaresThis(node) {
|
||||||
|
return node.type === 'FunctionExpression' || node.type === 'FunctionDeclaration';
|
||||||
|
}
|
||||||
|
|
||||||
|
function reallyParse(source) {
|
||||||
|
try {
|
||||||
|
return acorn.parse(source, {
|
||||||
|
ecmaVersion: 6,
|
||||||
|
allowReturnOutsideFunction: true,
|
||||||
|
allowImportExportEverywhere: true,
|
||||||
|
allowHashBang: true
|
||||||
|
});
|
||||||
|
} catch (ex) {
|
||||||
|
return acorn.parse(source, {
|
||||||
|
ecmaVersion: 5,
|
||||||
|
allowReturnOutsideFunction: true,
|
||||||
|
allowImportExportEverywhere: true,
|
||||||
|
allowHashBang: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = findGlobals;
|
||||||
|
module.exports.parse = reallyParse;
|
||||||
|
function findGlobals(source) {
|
||||||
|
var globals = [];
|
||||||
|
var ast;
|
||||||
|
// istanbul ignore else
|
||||||
|
if (typeof source === 'string') {
|
||||||
|
ast = reallyParse(source);
|
||||||
|
} else {
|
||||||
|
ast = source;
|
||||||
|
}
|
||||||
|
// istanbul ignore if
|
||||||
|
if (!(ast && typeof ast === 'object' && ast.type === 'Program')) {
|
||||||
|
throw new TypeError('Source must be either a string of JavaScript or an acorn AST');
|
||||||
|
}
|
||||||
|
var declareFunction = function (node) {
|
||||||
|
var fn = node;
|
||||||
|
fn.locals = fn.locals || {};
|
||||||
|
node.params.forEach(function (node) {
|
||||||
|
declarePattern(node, fn);
|
||||||
|
});
|
||||||
|
if (node.id) {
|
||||||
|
fn.locals[node.id.name] = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var declarePattern = function (node, parent) {
|
||||||
|
switch (node.type) {
|
||||||
|
case 'Identifier':
|
||||||
|
parent.locals[node.name] = true;
|
||||||
|
break;
|
||||||
|
case 'ObjectPattern':
|
||||||
|
node.properties.forEach(function (node) {
|
||||||
|
declarePattern(node.value, parent);
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case 'ArrayPattern':
|
||||||
|
node.elements.forEach(function (node) {
|
||||||
|
if (node) declarePattern(node, parent);
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case 'RestElement':
|
||||||
|
declarePattern(node.argument, parent);
|
||||||
|
break;
|
||||||
|
case 'AssignmentPattern':
|
||||||
|
declarePattern(node.left, parent);
|
||||||
|
break;
|
||||||
|
// istanbul ignore next
|
||||||
|
default:
|
||||||
|
throw new Error('Unrecognized pattern type: ' + node.type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var declareModuleSpecifier = function (node, parents) {
|
||||||
|
ast.locals = ast.locals || {};
|
||||||
|
ast.locals[node.local.name] = true;
|
||||||
|
}
|
||||||
|
walk.ancestor(ast, {
|
||||||
|
'VariableDeclaration': function (node, parents) {
|
||||||
|
var parent = null;
|
||||||
|
for (var i = parents.length - 1; i >= 0 && parent === null; i--) {
|
||||||
|
if (node.kind === 'var' ? isScope(parents[i]) : isBlockScope(parents[i])) {
|
||||||
|
parent = parents[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parent.locals = parent.locals || {};
|
||||||
|
node.declarations.forEach(function (declaration) {
|
||||||
|
declarePattern(declaration.id, parent);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
'FunctionDeclaration': function (node, parents) {
|
||||||
|
var parent = null;
|
||||||
|
for (var i = parents.length - 2; i >= 0 && parent === null; i--) {
|
||||||
|
if (isScope(parents[i])) {
|
||||||
|
parent = parents[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parent.locals = parent.locals || {};
|
||||||
|
parent.locals[node.id.name] = true;
|
||||||
|
declareFunction(node);
|
||||||
|
},
|
||||||
|
'Function': declareFunction,
|
||||||
|
'ClassDeclaration': function (node, parents) {
|
||||||
|
var parent = null;
|
||||||
|
for (var i = parents.length - 2; i >= 0 && parent === null; i--) {
|
||||||
|
if (isScope(parents[i])) {
|
||||||
|
parent = parents[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parent.locals = parent.locals || {};
|
||||||
|
parent.locals[node.id.name] = true;
|
||||||
|
},
|
||||||
|
'TryStatement': function (node) {
|
||||||
|
if (node.handler === null) return;
|
||||||
|
node.handler.body.locals = node.handler.body.locals || {};
|
||||||
|
node.handler.body.locals[node.handler.param.name] = true;
|
||||||
|
},
|
||||||
|
'ImportDefaultSpecifier': declareModuleSpecifier,
|
||||||
|
'ImportSpecifier': declareModuleSpecifier,
|
||||||
|
'ImportNamespaceSpecifier': declareModuleSpecifier
|
||||||
|
});
|
||||||
|
function identifier(node, parents) {
|
||||||
|
var name = node.name;
|
||||||
|
if (name === 'undefined') return;
|
||||||
|
for (var i = 0; i < parents.length; i++) {
|
||||||
|
if (name === 'arguments' && declaresArguments(parents[i])) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (parents[i].locals && name in parents[i].locals) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
parents[parents.length - 2] &&
|
||||||
|
parents[parents.length - 2].type === 'TryStatement' &&
|
||||||
|
parents[parents.length - 2].handler &&
|
||||||
|
node === parents[parents.length - 2].handler.param
|
||||||
|
) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
node.parents = parents;
|
||||||
|
globals.push(node);
|
||||||
|
}
|
||||||
|
walk.ancestor(ast, {
|
||||||
|
'VariablePattern': identifier,
|
||||||
|
'Identifier': identifier,
|
||||||
|
'ThisExpression': function (node, parents) {
|
||||||
|
for (var i = 0; i < parents.length; i++) {
|
||||||
|
if (declaresThis(parents[i])) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
node.parents = parents;
|
||||||
|
globals.push(node);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
var groupedGlobals = {};
|
||||||
|
globals.forEach(function (node) {
|
||||||
|
groupedGlobals[node.name] = (groupedGlobals[node.name] || []);
|
||||||
|
groupedGlobals[node.name].push(node);
|
||||||
|
});
|
||||||
|
return Object.keys(groupedGlobals).sort().map(function (name) {
|
||||||
|
return {name: name, nodes: groupedGlobals[name]};
|
||||||
|
});
|
||||||
|
}
|
|
@ -0,0 +1,65 @@
|
||||||
|
{
|
||||||
|
"_from": "acorn-globals@^1.0.3",
|
||||||
|
"_id": "acorn-globals@1.0.9",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha1-VbtemGkVB7dFedBRNBMhfDgMVM8=",
|
||||||
|
"_location": "/acorn-globals",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"type": "range",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "acorn-globals@^1.0.3",
|
||||||
|
"name": "acorn-globals",
|
||||||
|
"escapedName": "acorn-globals",
|
||||||
|
"rawSpec": "^1.0.3",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "^1.0.3"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/with"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-1.0.9.tgz",
|
||||||
|
"_shasum": "55bb5e98691507b74579d0513413217c380c54cf",
|
||||||
|
"_spec": "acorn-globals@^1.0.3",
|
||||||
|
"_where": "C:\\Users\\oracle\\Desktop\\Forest\\kekhegy\\kekhegy\\node_modules\\with",
|
||||||
|
"author": {
|
||||||
|
"name": "ForbesLindesay"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/ForbesLindesay/acorn-globals/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"dependencies": {
|
||||||
|
"acorn": "^2.1.0"
|
||||||
|
},
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "Detect global variables in JavaScript using acorn",
|
||||||
|
"devDependencies": {
|
||||||
|
"testit": "^2.0.2"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"LICENSE"
|
||||||
|
],
|
||||||
|
"homepage": "https://github.com/ForbesLindesay/acorn-globals#readme",
|
||||||
|
"keywords": [
|
||||||
|
"ast",
|
||||||
|
"variable",
|
||||||
|
"name",
|
||||||
|
"lexical",
|
||||||
|
"scope",
|
||||||
|
"local",
|
||||||
|
"global",
|
||||||
|
"implicit"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"name": "acorn-globals",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/ForbesLindesay/acorn-globals.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "node test"
|
||||||
|
},
|
||||||
|
"version": "1.0.9"
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
|
@ -0,0 +1 @@
|
||||||
|
* text eol=lf
|
|
@ -0,0 +1,3 @@
|
||||||
|
/.tern-port
|
||||||
|
/test
|
||||||
|
/local
|
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"plugins": {
|
||||||
|
"node": true,
|
||||||
|
"es_modules": true
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
language: node_js
|
||||||
|
sudo: false
|
||||||
|
node_js:
|
||||||
|
- '0.10'
|
||||||
|
- '0.12'
|
||||||
|
- '4'
|
|
@ -0,0 +1,43 @@
|
||||||
|
List of Acorn contributors. Updated before every release.
|
||||||
|
|
||||||
|
Adrian Rakovsky
|
||||||
|
Alistair Braidwood
|
||||||
|
Andres Suarez
|
||||||
|
Aparajita Fishman
|
||||||
|
Arian Stolwijk
|
||||||
|
Artem Govorov
|
||||||
|
Brandon Mills
|
||||||
|
Charles Hughes
|
||||||
|
Conrad Irwin
|
||||||
|
David Bonnet
|
||||||
|
ForbesLindesay
|
||||||
|
Forbes Lindesay
|
||||||
|
Gilad Peleg
|
||||||
|
impinball
|
||||||
|
Ingvar Stepanyan
|
||||||
|
Jesse McCarthy
|
||||||
|
Jiaxing Wang
|
||||||
|
Joel Kemp
|
||||||
|
Johannes Herr
|
||||||
|
Jürg Lehni
|
||||||
|
keeyipchan
|
||||||
|
Kevin Kwok
|
||||||
|
krator
|
||||||
|
Marijn Haverbeke
|
||||||
|
Martin Carlberg
|
||||||
|
Mathias Bynens
|
||||||
|
Mathieu 'p01' Henri
|
||||||
|
Max Schaefer
|
||||||
|
Max Zerzouri
|
||||||
|
Mihai Bazon
|
||||||
|
Mike Rennie
|
||||||
|
Nick Fitzgerald
|
||||||
|
Oskar Schöldström
|
||||||
|
Paul Harper
|
||||||
|
Peter Rust
|
||||||
|
PlNG
|
||||||
|
r-e-d
|
||||||
|
Rich Harris
|
||||||
|
Sebastian McKenzie
|
||||||
|
Timothy Gu
|
||||||
|
zsjforcn
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright (C) 2012-2014 by various contributors (see AUTHORS)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
|
@ -0,0 +1,396 @@
|
||||||
|
# Acorn
|
||||||
|
|
||||||
|
[![Build Status](https://travis-ci.org/ternjs/acorn.svg?branch=master)](https://travis-ci.org/ternjs/acorn)
|
||||||
|
[![NPM version](https://img.shields.io/npm/v/acorn.svg)](https://www.npmjs.com/package/acorn)
|
||||||
|
[Author funding status: ![maintainer happiness](https://marijnhaverbeke.nl/fund/status_s.png?force)](https://marijnhaverbeke.nl/fund/)
|
||||||
|
|
||||||
|
A tiny, fast JavaScript parser, written completely in JavaScript.
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
Acorn is open source software released under an
|
||||||
|
[MIT license](https://github.com/ternjs/acorn/blob/master/LICENSE).
|
||||||
|
|
||||||
|
You are welcome to
|
||||||
|
[report bugs](https://github.com/ternjs/acorn/issues) or create pull
|
||||||
|
requests on [github](https://github.com/ternjs/acorn). For questions
|
||||||
|
and discussion, please use the
|
||||||
|
[Tern discussion forum](https://discuss.ternjs.net).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
The easiest way to install acorn is with [`npm`][npm].
|
||||||
|
|
||||||
|
[npm]: https://www.npmjs.com/
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install acorn
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternately, download the source.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone https://github.com/ternjs/acorn.git
|
||||||
|
```
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
When run in a CommonJS (node.js) or AMD environment, exported values
|
||||||
|
appear in the interfaces exposed by the individual files, as usual.
|
||||||
|
When loaded in the browser (Acorn works in any JS-enabled browser more
|
||||||
|
recent than IE5) without any kind of module management, a single
|
||||||
|
global object `acorn` will be defined, and all the exported properties
|
||||||
|
will be added to that.
|
||||||
|
|
||||||
|
### Main parser
|
||||||
|
|
||||||
|
This is implemented in `dist/acorn.js`, and is what you get when you
|
||||||
|
`require("acorn")` in node.js.
|
||||||
|
|
||||||
|
**parse**`(input, options)` is used to parse a JavaScript program.
|
||||||
|
The `input` parameter is a string, `options` can be undefined or an
|
||||||
|
object setting some of the options listed below. The return value will
|
||||||
|
be an abstract syntax tree object as specified by the
|
||||||
|
[ESTree spec][estree].
|
||||||
|
|
||||||
|
When encountering a syntax error, the parser will raise a
|
||||||
|
`SyntaxError` object with a meaningful message. The error object will
|
||||||
|
have a `pos` property that indicates the character offset at which the
|
||||||
|
error occurred, and a `loc` object that contains a `{line, column}`
|
||||||
|
object referring to that same position.
|
||||||
|
|
||||||
|
[estree]: https://github.com/estree/estree
|
||||||
|
|
||||||
|
- **ecmaVersion**: Indicates the ECMAScript version to parse. Must be
|
||||||
|
either 3, 5, or 6. This influences support for strict mode, the set
|
||||||
|
of reserved words, and support for new syntax features. Default is 5.
|
||||||
|
|
||||||
|
- **sourceType**: Indicate the mode the code should be parsed in. Can be
|
||||||
|
either `"script"` or `"module"`.
|
||||||
|
|
||||||
|
- **onInsertedSemicolon**: If given a callback, that callback will be
|
||||||
|
called whenever a missing semicolon is inserted by the parser. The
|
||||||
|
callback will be given the character offset of the point where the
|
||||||
|
semicolon is inserted as argument, and if `locations` is on, also a
|
||||||
|
`{line, column}` object representing this position.
|
||||||
|
|
||||||
|
- **onTrailingComma**: Like `onInsertedSemicolon`, but for trailing
|
||||||
|
commas.
|
||||||
|
|
||||||
|
- **allowReserved**: If `false`, using a reserved word will generate
|
||||||
|
an error. Defaults to `true` for `ecmaVersion` 3, `false` for higher
|
||||||
|
versions. When given the value `"never"`, reserved words and
|
||||||
|
keywords can also not be used as property names (as in Internet
|
||||||
|
Explorer's old parser).
|
||||||
|
|
||||||
|
- **allowReturnOutsideFunction**: By default, a return statement at
|
||||||
|
the top level raises an error. Set this to `true` to accept such
|
||||||
|
code.
|
||||||
|
|
||||||
|
- **allowImportExportEverywhere**: By default, `import` and `export`
|
||||||
|
declarations can only appear at a program's top level. Setting this
|
||||||
|
option to `true` allows them anywhere where a statement is allowed.
|
||||||
|
|
||||||
|
- **allowHashBang**: When this is enabled (off by default), if the
|
||||||
|
code starts with the characters `#!` (as in a shellscript), the
|
||||||
|
first line will be treated as a comment.
|
||||||
|
|
||||||
|
- **locations**: When `true`, each node has a `loc` object attached
|
||||||
|
with `start` and `end` subobjects, each of which contains the
|
||||||
|
one-based line and zero-based column numbers in `{line, column}`
|
||||||
|
form. Default is `false`.
|
||||||
|
|
||||||
|
- **onToken**: If a function is passed for this option, each found
|
||||||
|
token will be passed in same format as tokens returned from
|
||||||
|
`tokenizer().getToken()`.
|
||||||
|
|
||||||
|
If array is passed, each found token is pushed to it.
|
||||||
|
|
||||||
|
Note that you are not allowed to call the parser from the
|
||||||
|
callback—that will corrupt its internal state.
|
||||||
|
|
||||||
|
- **onComment**: If a function is passed for this option, whenever a
|
||||||
|
comment is encountered the function will be called with the
|
||||||
|
following parameters:
|
||||||
|
|
||||||
|
- `block`: `true` if the comment is a block comment, false if it
|
||||||
|
is a line comment.
|
||||||
|
- `text`: The content of the comment.
|
||||||
|
- `start`: Character offset of the start of the comment.
|
||||||
|
- `end`: Character offset of the end of the comment.
|
||||||
|
|
||||||
|
When the `locations` options is on, the `{line, column}` locations
|
||||||
|
of the comment’s start and end are passed as two additional
|
||||||
|
parameters.
|
||||||
|
|
||||||
|
If array is passed for this option, each found comment is pushed
|
||||||
|
to it as object in Esprima format:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
"type": "Line" | "Block",
|
||||||
|
"value": "comment text",
|
||||||
|
"start": Number,
|
||||||
|
"end": Number,
|
||||||
|
// If `locations` option is on:
|
||||||
|
"loc": {
|
||||||
|
"start": {line: Number, column: Number}
|
||||||
|
"end": {line: Number, column: Number}
|
||||||
|
},
|
||||||
|
// If `ranges` option is on:
|
||||||
|
"range": [Number, Number]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that you are not allowed to call the parser from the
|
||||||
|
callback—that will corrupt its internal state.
|
||||||
|
|
||||||
|
- **ranges**: Nodes have their start and end characters offsets
|
||||||
|
recorded in `start` and `end` properties (directly on the node,
|
||||||
|
rather than the `loc` object, which holds line/column data. To also
|
||||||
|
add a [semi-standardized][range] `range` property holding a
|
||||||
|
`[start, end]` array with the same numbers, set the `ranges` option
|
||||||
|
to `true`.
|
||||||
|
|
||||||
|
- **program**: It is possible to parse multiple files into a single
|
||||||
|
AST by passing the tree produced by parsing the first file as the
|
||||||
|
`program` option in subsequent parses. This will add the toplevel
|
||||||
|
forms of the parsed file to the "Program" (top) node of an existing
|
||||||
|
parse tree.
|
||||||
|
|
||||||
|
- **sourceFile**: When the `locations` option is `true`, you can pass
|
||||||
|
this option to add a `source` attribute in every node’s `loc`
|
||||||
|
object. Note that the contents of this option are not examined or
|
||||||
|
processed in any way; you are free to use whatever format you
|
||||||
|
choose.
|
||||||
|
|
||||||
|
- **directSourceFile**: Like `sourceFile`, but a `sourceFile` property
|
||||||
|
will be added directly to the nodes, rather than the `loc` object.
|
||||||
|
|
||||||
|
- **preserveParens**: If this option is `true`, parenthesized expressions
|
||||||
|
are represented by (non-standard) `ParenthesizedExpression` nodes
|
||||||
|
that have a single `expression` property containing the expression
|
||||||
|
inside parentheses.
|
||||||
|
|
||||||
|
[range]: https://bugzilla.mozilla.org/show_bug.cgi?id=745678
|
||||||
|
|
||||||
|
**parseExpressionAt**`(input, offset, options)` will parse a single
|
||||||
|
expression in a string, and return its AST. It will not complain if
|
||||||
|
there is more of the string left after the expression.
|
||||||
|
|
||||||
|
**getLineInfo**`(input, offset)` can be used to get a `{line,
|
||||||
|
column}` object for a given program string and character offset.
|
||||||
|
|
||||||
|
**tokenizer**`(input, options)` returns an object with a `getToken`
|
||||||
|
method that can be called repeatedly to get the next token, a `{start,
|
||||||
|
end, type, value}` object (with added `loc` property when the
|
||||||
|
`locations` option is enabled and `range` property when the `ranges`
|
||||||
|
option is enabled). When the token's type is `tokTypes.eof`, you
|
||||||
|
should stop calling the method, since it will keep returning that same
|
||||||
|
token forever.
|
||||||
|
|
||||||
|
In ES6 environment, returned result can be used as any other
|
||||||
|
protocol-compliant iterable:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
for (let token of acorn.tokenizer(str)) {
|
||||||
|
// iterate over the tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
// transform code to array of tokens:
|
||||||
|
var tokens = [...acorn.tokenizer(str)];
|
||||||
|
```
|
||||||
|
|
||||||
|
**tokTypes** holds an object mapping names to the token type objects
|
||||||
|
that end up in the `type` properties of tokens.
|
||||||
|
|
||||||
|
#### Note on using with [Escodegen][escodegen]
|
||||||
|
|
||||||
|
Escodegen supports generating comments from AST, attached in
|
||||||
|
Esprima-specific format. In order to simulate same format in
|
||||||
|
Acorn, consider following example:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var comments = [], tokens = [];
|
||||||
|
|
||||||
|
var ast = acorn.parse('var x = 42; // answer', {
|
||||||
|
// collect ranges for each node
|
||||||
|
ranges: true,
|
||||||
|
// collect comments in Esprima's format
|
||||||
|
onComment: comments,
|
||||||
|
// collect token ranges
|
||||||
|
onToken: tokens
|
||||||
|
});
|
||||||
|
|
||||||
|
// attach comments using collected information
|
||||||
|
escodegen.attachComments(ast, comments, tokens);
|
||||||
|
|
||||||
|
// generate code
|
||||||
|
console.log(escodegen.generate(ast, {comment: true}));
|
||||||
|
// > 'var x = 42; // answer'
|
||||||
|
```
|
||||||
|
|
||||||
|
[escodegen]: https://github.com/estools/escodegen
|
||||||
|
|
||||||
|
### dist/acorn_loose.js ###
|
||||||
|
|
||||||
|
This file implements an error-tolerant parser. It exposes a single
|
||||||
|
function. The loose parser is accessible in node.js via `require("acorn/dist/acorn_loose")`.
|
||||||
|
|
||||||
|
**parse_dammit**`(input, options)` takes the same arguments and
|
||||||
|
returns the same syntax tree as the `parse` function in `acorn.js`,
|
||||||
|
but never raises an error, and will do its best to parse syntactically
|
||||||
|
invalid code in as meaningful a way as it can. It'll insert identifier
|
||||||
|
nodes with name `"✖"` as placeholders in places where it can't make
|
||||||
|
sense of the input. Depends on `acorn.js`, because it uses the same
|
||||||
|
tokenizer.
|
||||||
|
|
||||||
|
### dist/walk.js ###
|
||||||
|
|
||||||
|
Implements an abstract syntax tree walker. Will store its interface in
|
||||||
|
`acorn.walk` when loaded without a module system.
|
||||||
|
|
||||||
|
**simple**`(node, visitors, base, state)` does a 'simple' walk over
|
||||||
|
a tree. `node` should be the AST node to walk, and `visitors` an
|
||||||
|
object with properties whose names correspond to node types in the
|
||||||
|
[ESTree spec][estree]. The properties should contain functions
|
||||||
|
that will be called with the node object and, if applicable the state
|
||||||
|
at that point. The last two arguments are optional. `base` is a walker
|
||||||
|
algorithm, and `state` is a start state. The default walker will
|
||||||
|
simply visit all statements and expressions and not produce a
|
||||||
|
meaningful state. (An example of a use of state is to track scope at
|
||||||
|
each point in the tree.)
|
||||||
|
|
||||||
|
**ancestor**`(node, visitors, base, state)` does a 'simple' walk over
|
||||||
|
a tree, building up an array of ancestor nodes (including the current node)
|
||||||
|
and passing the array to callbacks in the `state` parameter.
|
||||||
|
|
||||||
|
**recursive**`(node, state, functions, base)` does a 'recursive'
|
||||||
|
walk, where the walker functions are responsible for continuing the
|
||||||
|
walk on the child nodes of their target node. `state` is the start
|
||||||
|
state, and `functions` should contain an object that maps node types
|
||||||
|
to walker functions. Such functions are called with `(node, state, c)`
|
||||||
|
arguments, and can cause the walk to continue on a sub-node by calling
|
||||||
|
the `c` argument on it with `(node, state)` arguments. The optional
|
||||||
|
`base` argument provides the fallback walker functions for node types
|
||||||
|
that aren't handled in the `functions` object. If not given, the
|
||||||
|
default walkers will be used.
|
||||||
|
|
||||||
|
**make**`(functions, base)` builds a new walker object by using the
|
||||||
|
walker functions in `functions` and filling in the missing ones by
|
||||||
|
taking defaults from `base`.
|
||||||
|
|
||||||
|
**findNodeAt**`(node, start, end, test, base, state)` tries to
|
||||||
|
locate a node in a tree at the given start and/or end offsets, which
|
||||||
|
satisfies the predicate `test`. `start` and `end` can be either `null`
|
||||||
|
(as wildcard) or a number. `test` may be a string (indicating a node
|
||||||
|
type) or a function that takes `(nodeType, node)` arguments and
|
||||||
|
returns a boolean indicating whether this node is interesting. `base`
|
||||||
|
and `state` are optional, and can be used to specify a custom walker.
|
||||||
|
Nodes are tested from inner to outer, so if two nodes match the
|
||||||
|
boundaries, the inner one will be preferred.
|
||||||
|
|
||||||
|
**findNodeAround**`(node, pos, test, base, state)` is a lot like
|
||||||
|
`findNodeAt`, but will match any node that exists 'around' (spanning)
|
||||||
|
the given position.
|
||||||
|
|
||||||
|
**findNodeAfter**`(node, pos, test, base, state)` is similar to
|
||||||
|
`findNodeAround`, but will match all nodes *after* the given position
|
||||||
|
(testing outer nodes before inner nodes).
|
||||||
|
|
||||||
|
## Command line interface
|
||||||
|
|
||||||
|
The `bin/acorn` utility can be used to parse a file from the command
|
||||||
|
line. It accepts as arguments its input file and the following
|
||||||
|
options:
|
||||||
|
|
||||||
|
- `--ecma3|--ecma5|--ecma6`: Sets the ECMAScript version to parse. Default is
|
||||||
|
version 5.
|
||||||
|
|
||||||
|
- `--module`: Sets the parsing mode to `"module"`. Is set to `"script"` otherwise.
|
||||||
|
|
||||||
|
- `--locations`: Attaches a "loc" object to each node with "start" and
|
||||||
|
"end" subobjects, each of which contains the one-based line and
|
||||||
|
zero-based column numbers in `{line, column}` form.
|
||||||
|
|
||||||
|
- `--allow-hash-bang`: If the code starts with the characters #! (as in a shellscript), the first line will be treated as a comment.
|
||||||
|
|
||||||
|
- `--compact`: No whitespace is used in the AST output.
|
||||||
|
|
||||||
|
- `--silent`: Do not output the AST, just return the exit status.
|
||||||
|
|
||||||
|
- `--help`: Print the usage information and quit.
|
||||||
|
|
||||||
|
The utility spits out the syntax tree as JSON data.
|
||||||
|
|
||||||
|
## Build system
|
||||||
|
|
||||||
|
Acorn is written in ECMAScript 6, as a set of small modules, in the
|
||||||
|
project's `src` directory, and compiled down to bigger ECMAScript 3
|
||||||
|
files in `dist` using [Browserify](http://browserify.org) and
|
||||||
|
[Babel](http://babeljs.io/). If you are already using Babel, you can
|
||||||
|
consider including the modules directly.
|
||||||
|
|
||||||
|
The command-line test runner (`npm test`) uses the ES6 modules. The
|
||||||
|
browser-based test page (`test/index.html`) uses the compiled modules.
|
||||||
|
The `bin/build-acorn.js` script builds the latter from the former.
|
||||||
|
|
||||||
|
If you are working on Acorn, you'll probably want to try the code out
|
||||||
|
directly, without an intermediate build step. In your scripts, you can
|
||||||
|
register the Babel require shim like this:
|
||||||
|
|
||||||
|
require("babel-core/register")
|
||||||
|
|
||||||
|
That will allow you to directly `require` the ES6 modules.
|
||||||
|
|
||||||
|
## Plugins
|
||||||
|
|
||||||
|
Acorn is designed support allow plugins which, within reasonable
|
||||||
|
bounds, redefine the way the parser works. Plugins can add new token
|
||||||
|
types and new tokenizer contexts (if necessary), and extend methods in
|
||||||
|
the parser object. This is not a clean, elegant API—using it requires
|
||||||
|
an understanding of Acorn's internals, and plugins are likely to break
|
||||||
|
whenever those internals are significantly changed. But still, it is
|
||||||
|
_possible_, in this way, to create parsers for JavaScript dialects
|
||||||
|
without forking all of Acorn. And in principle it is even possible to
|
||||||
|
combine such plugins, so that if you have, for example, a plugin for
|
||||||
|
parsing types and a plugin for parsing JSX-style XML literals, you
|
||||||
|
could load them both and parse code with both JSX tags and types.
|
||||||
|
|
||||||
|
A plugin should register itself by adding a property to
|
||||||
|
`acorn.plugins`, which holds a function. Calling `acorn.parse`, a
|
||||||
|
`plugins` option can be passed, holding an object mapping plugin names
|
||||||
|
to configuration values (or just `true` for plugins that don't take
|
||||||
|
options). After the parser object has been created, the initialization
|
||||||
|
functions for the chosen plugins are called with `(parser,
|
||||||
|
configValue)` arguments. They are expected to use the `parser.extend`
|
||||||
|
method to extend parser methods. For example, the `readToken` method
|
||||||
|
could be extended like this:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
parser.extend("readToken", function(nextMethod) {
|
||||||
|
return function(code) {
|
||||||
|
console.log("Reading a token!")
|
||||||
|
return nextMethod.call(this, code)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
The `nextMethod` argument passed to `extend`'s second argument is the
|
||||||
|
previous value of this method, and should usually be called through to
|
||||||
|
whenever the extended method does not handle the call itself.
|
||||||
|
|
||||||
|
Similarly, the loose parser allows plugins to register themselves via
|
||||||
|
`acorn.pluginsLoose`. The extension mechanism is the same as for the
|
||||||
|
normal parser:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
looseParser.extend("readToken", function(nextMethod) {
|
||||||
|
return function() {
|
||||||
|
console.log("Reading a token in the loose parser!")
|
||||||
|
return nextMethod.call(this)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
There is a proof-of-concept JSX plugin in the [`acorn-jsx`](https://github.com/RReverser/acorn-jsx) project.
|
|
@ -0,0 +1,71 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj["default"] = obj; return newObj; } }
|
||||||
|
|
||||||
|
var _path = require("path");
|
||||||
|
|
||||||
|
var _fs = require("fs");
|
||||||
|
|
||||||
|
var _distAcornJs = require("../dist/acorn.js");
|
||||||
|
|
||||||
|
var acorn = _interopRequireWildcard(_distAcornJs);
|
||||||
|
|
||||||
|
var infile = undefined,
|
||||||
|
forceFile = undefined,
|
||||||
|
silent = false,
|
||||||
|
compact = false,
|
||||||
|
tokenize = false;
|
||||||
|
var options = {};
|
||||||
|
|
||||||
|
function help(status) {
|
||||||
|
var print = status == 0 ? console.log : console.error;
|
||||||
|
print("usage: " + (0, _path.basename)(process.argv[1]) + " [--ecma3|--ecma5|--ecma6]");
|
||||||
|
print(" [--tokenize] [--locations] [---allow-hash-bang] [--compact] [--silent] [--module] [--help] [--] [infile]");
|
||||||
|
process.exit(status);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i = 2; i < process.argv.length; ++i) {
|
||||||
|
var arg = process.argv[i];
|
||||||
|
if ((arg == "-" || arg[0] != "-") && !infile) infile = arg;else if (arg == "--" && !infile && i + 2 == process.argv.length) forceFile = infile = process.argv[++i];else if (arg == "--ecma3") options.ecmaVersion = 3;else if (arg == "--ecma5") options.ecmaVersion = 5;else if (arg == "--ecma6") options.ecmaVersion = 6;else if (arg == "--locations") options.locations = true;else if (arg == "--allow-hash-bang") options.allowHashBang = true;else if (arg == "--silent") silent = true;else if (arg == "--compact") compact = true;else if (arg == "--help") help(0);else if (arg == "--tokenize") tokenize = true;else if (arg == "--module") options.sourceType = 'module';else help(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function run(code) {
|
||||||
|
var result = undefined;
|
||||||
|
if (!tokenize) {
|
||||||
|
try {
|
||||||
|
result = acorn.parse(code, options);
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e.message);process.exit(1);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
result = [];
|
||||||
|
var tokenizer = acorn.tokenizer(code, options),
|
||||||
|
token = undefined;
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
token = tokenizer.getToken();
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e.message);process.exit(1);
|
||||||
|
}
|
||||||
|
result.push(token);
|
||||||
|
if (token.type == acorn.tokTypes.eof) break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!silent) console.log(JSON.stringify(result, null, compact ? null : 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (forceFile || infile && infile != "-") {
|
||||||
|
run((0, _fs.readFileSync)(infile, "utf8"));
|
||||||
|
} else {
|
||||||
|
(function () {
|
||||||
|
var code = "";
|
||||||
|
process.stdin.resume();
|
||||||
|
process.stdin.on("data", function (chunk) {
|
||||||
|
return code += chunk;
|
||||||
|
});
|
||||||
|
process.stdin.on("end", function () {
|
||||||
|
return run(code);
|
||||||
|
});
|
||||||
|
})();
|
||||||
|
}
|
|
@ -0,0 +1,82 @@
|
||||||
|
var fs = require("fs"), path = require("path")
|
||||||
|
var stream = require("stream")
|
||||||
|
|
||||||
|
var browserify = require("browserify")
|
||||||
|
var babel = require('babel-core')
|
||||||
|
var babelify = require("babelify").configure({loose: "all"})
|
||||||
|
|
||||||
|
process.chdir(path.resolve(__dirname, ".."))
|
||||||
|
|
||||||
|
browserify({standalone: "acorn"})
|
||||||
|
.plugin(require('browserify-derequire'))
|
||||||
|
.transform(babelify)
|
||||||
|
.require("./src/index.js", {entry: true})
|
||||||
|
.bundle()
|
||||||
|
.on("error", function (err) { console.log("Error: " + err.message) })
|
||||||
|
.pipe(fs.createWriteStream("dist/acorn.js"))
|
||||||
|
|
||||||
|
var ACORN_PLACEHOLDER = "this_function_call_should_be_replaced_with_a_call_to_load_acorn()";
|
||||||
|
function acornShimPrepare(file) {
|
||||||
|
var tr = new stream.Transform
|
||||||
|
if (file == path.resolve(__dirname, "../src/index.js")) {
|
||||||
|
var sent = false
|
||||||
|
tr._transform = function(chunk, _, callback) {
|
||||||
|
if (!sent) {
|
||||||
|
sent = true
|
||||||
|
callback(null, ACORN_PLACEHOLDER);
|
||||||
|
} else {
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tr._transform = function(chunk, _, callback) { callback(null, chunk) }
|
||||||
|
}
|
||||||
|
return tr
|
||||||
|
}
|
||||||
|
function acornShimComplete() {
|
||||||
|
var tr = new stream.Transform
|
||||||
|
var buffer = "";
|
||||||
|
tr._transform = function(chunk, _, callback) {
|
||||||
|
buffer += chunk.toString("utf8");
|
||||||
|
callback();
|
||||||
|
};
|
||||||
|
tr._flush = function (callback) {
|
||||||
|
tr.push(buffer.replace(ACORN_PLACEHOLDER, "module.exports = typeof acorn != 'undefined' ? acorn : require(\"./acorn\")"));
|
||||||
|
callback(null);
|
||||||
|
};
|
||||||
|
return tr;
|
||||||
|
}
|
||||||
|
|
||||||
|
browserify({standalone: "acorn.loose"})
|
||||||
|
.plugin(require('browserify-derequire'))
|
||||||
|
.transform(acornShimPrepare)
|
||||||
|
.transform(babelify)
|
||||||
|
.require("./src/loose/index.js", {entry: true})
|
||||||
|
.bundle()
|
||||||
|
.on("error", function (err) { console.log("Error: " + err.message) })
|
||||||
|
.pipe(acornShimComplete())
|
||||||
|
.pipe(fs.createWriteStream("dist/acorn_loose.js"))
|
||||||
|
|
||||||
|
browserify({standalone: "acorn.walk"})
|
||||||
|
.plugin(require('browserify-derequire'))
|
||||||
|
.transform(acornShimPrepare)
|
||||||
|
.transform(babelify)
|
||||||
|
.require("./src/walk/index.js", {entry: true})
|
||||||
|
.bundle()
|
||||||
|
.on("error", function (err) { console.log("Error: " + err.message) })
|
||||||
|
.pipe(acornShimComplete())
|
||||||
|
.pipe(fs.createWriteStream("dist/walk.js"))
|
||||||
|
|
||||||
|
babel.transformFile("./src/bin/acorn.js", function (err, result) {
|
||||||
|
if (err) return console.log("Error: " + err.message)
|
||||||
|
fs.writeFile("bin/acorn", result.code, function (err) {
|
||||||
|
if (err) return console.log("Error: " + err.message)
|
||||||
|
|
||||||
|
// Make bin/acorn executable
|
||||||
|
if (process.platform === 'win32')
|
||||||
|
return
|
||||||
|
var stat = fs.statSync("bin/acorn")
|
||||||
|
var newPerm = stat.mode | parseInt('111', 8)
|
||||||
|
fs.chmodSync("bin/acorn", newPerm)
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,47 @@
|
||||||
|
// Note: run `npm install unicode-7.0.0` first.
|
||||||
|
|
||||||
|
// Which Unicode version should be used?
|
||||||
|
var version = '7.0.0';
|
||||||
|
|
||||||
|
var start = require('unicode-' + version + '/properties/ID_Start/code-points')
|
||||||
|
.filter(function(ch) { return ch > 127; });
|
||||||
|
var cont = [0x200c, 0x200d].concat(require('unicode-' + version + '/properties/ID_Continue/code-points')
|
||||||
|
.filter(function(ch) { return ch > 127 && start.indexOf(ch) == -1; }));
|
||||||
|
|
||||||
|
function pad(str, width) {
|
||||||
|
while (str.length < width) str = "0" + str;
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
|
function esc(code) {
|
||||||
|
var hex = code.toString(16);
|
||||||
|
if (hex.length <= 2) return "\\x" + pad(hex, 2);
|
||||||
|
else return "\\u" + pad(hex, 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
function generate(chars) {
|
||||||
|
var astral = [], re = "";
|
||||||
|
for (var i = 0, at = 0x10000; i < chars.length; i++) {
|
||||||
|
var from = chars[i], to = from;
|
||||||
|
while (i < chars.length - 1 && chars[i + 1] == to + 1) {
|
||||||
|
i++;
|
||||||
|
to++;
|
||||||
|
}
|
||||||
|
if (to <= 0xffff) {
|
||||||
|
if (from == to) re += esc(from);
|
||||||
|
else if (from + 1 == to) re += esc(from) + esc(to);
|
||||||
|
else re += esc(from) + "-" + esc(to);
|
||||||
|
} else {
|
||||||
|
astral.push(from - at, to - from);
|
||||||
|
at = to;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {nonASCII: re, astral: astral};
|
||||||
|
}
|
||||||
|
|
||||||
|
var startData = generate(start), contData = generate(cont);
|
||||||
|
|
||||||
|
console.log(" var nonASCIIidentifierStartChars = \"" + startData.nonASCII + "\";");
|
||||||
|
console.log(" var nonASCIIidentifierChars = \"" + contData.nonASCII + "\";");
|
||||||
|
console.log(" var astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";");
|
||||||
|
console.log(" var astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";");
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Combine existing list of authors with everyone known in git, sort, add header.
|
||||||
|
tail --lines=+3 AUTHORS > AUTHORS.tmp
|
||||||
|
git log --format='%aN' | grep -v abraidwood >> AUTHORS.tmp
|
||||||
|
echo -e "List of Acorn contributors. Updated before every release.\n" > AUTHORS
|
||||||
|
sort -u AUTHORS.tmp >> AUTHORS
|
||||||
|
rm -f AUTHORS.tmp
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,377 @@
|
||||||
|
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}(g.acorn || (g.acorn = {})).walk = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
|
||||||
|
// AST walker module for Mozilla Parser API compatible trees
|
||||||
|
|
||||||
|
// A simple walk is one where you simply specify callbacks to be
|
||||||
|
// called on specific nodes. The last two arguments are optional. A
|
||||||
|
// simple use would be
|
||||||
|
//
|
||||||
|
// walk.simple(myTree, {
|
||||||
|
// Expression: function(node) { ... }
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// to do something with all expressions. All Parser API node types
|
||||||
|
// can be used to identify node types, as well as Expression,
|
||||||
|
// Statement, and ScopeBody, which denote categories of nodes.
|
||||||
|
//
|
||||||
|
// The base argument can be used to pass a custom (recursive)
|
||||||
|
// walker, and state can be used to give this walked an initial
|
||||||
|
// state.
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
exports.__esModule = true;
|
||||||
|
exports.simple = simple;
|
||||||
|
exports.ancestor = ancestor;
|
||||||
|
exports.recursive = recursive;
|
||||||
|
exports.findNodeAt = findNodeAt;
|
||||||
|
exports.findNodeAround = findNodeAround;
|
||||||
|
exports.findNodeAfter = findNodeAfter;
|
||||||
|
exports.findNodeBefore = findNodeBefore;
|
||||||
|
exports.make = make;
|
||||||
|
|
||||||
|
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||||||
|
|
||||||
|
function simple(node, visitors, base, state, override) {
|
||||||
|
if (!base) base = exports.base;(function c(node, st, override) {
|
||||||
|
var type = override || node.type,
|
||||||
|
found = visitors[type];
|
||||||
|
base[type](node, st, c);
|
||||||
|
if (found) found(node, st);
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
// An ancestor walk builds up an array of ancestor nodes (including
|
||||||
|
// the current node) and passes them to the callback as the state parameter.
|
||||||
|
|
||||||
|
function ancestor(node, visitors, base, state) {
|
||||||
|
if (!base) base = exports.base;
|
||||||
|
if (!state) state = [];(function c(node, st, override) {
|
||||||
|
var type = override || node.type,
|
||||||
|
found = visitors[type];
|
||||||
|
if (node != st[st.length - 1]) {
|
||||||
|
st = st.slice();
|
||||||
|
st.push(node);
|
||||||
|
}
|
||||||
|
base[type](node, st, c);
|
||||||
|
if (found) found(node, st);
|
||||||
|
})(node, state);
|
||||||
|
}
|
||||||
|
|
||||||
|
// A recursive walk is one where your functions override the default
|
||||||
|
// walkers. They can modify and replace the state parameter that's
|
||||||
|
// threaded through the walk, and can opt how and whether to walk
|
||||||
|
// their child nodes (by calling their third argument on these
|
||||||
|
// nodes).
|
||||||
|
|
||||||
|
function recursive(node, state, funcs, base, override) {
|
||||||
|
var visitor = funcs ? exports.make(funcs, base) : base;(function c(node, st, override) {
|
||||||
|
visitor[override || node.type](node, st, c);
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeTest(test) {
|
||||||
|
if (typeof test == "string") return function (type) {
|
||||||
|
return type == test;
|
||||||
|
};else if (!test) return function () {
|
||||||
|
return true;
|
||||||
|
};else return test;
|
||||||
|
}
|
||||||
|
|
||||||
|
var Found = function Found(node, state) {
|
||||||
|
_classCallCheck(this, Found);
|
||||||
|
|
||||||
|
this.node = node;this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find a node with a given start, end, and type (all are optional,
|
||||||
|
// null can be used as wildcard). Returns a {node, state} object, or
|
||||||
|
// undefined when it doesn't find a matching node.
|
||||||
|
;
|
||||||
|
|
||||||
|
function findNodeAt(node, start, end, test, base, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!base) base = exports.base;
|
||||||
|
try {
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
if ((start == null || node.start <= start) && (end == null || node.end >= end)) base[type](node, st, c);
|
||||||
|
if ((start == null || node.start == start) && (end == null || node.end == end) && test(type, node)) throw new Found(node, st);
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) return e;
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the innermost node of a given type that contains the given
|
||||||
|
// position. Interface similar to findNodeAt.
|
||||||
|
|
||||||
|
function findNodeAround(node, pos, test, base, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!base) base = exports.base;
|
||||||
|
try {
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.start > pos || node.end < pos) return;
|
||||||
|
base[type](node, st, c);
|
||||||
|
if (test(type, node)) throw new Found(node, st);
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) return e;
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the outermost matching node after a given position.
|
||||||
|
|
||||||
|
function findNodeAfter(node, pos, test, base, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!base) base = exports.base;
|
||||||
|
try {
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
if (node.end < pos) return;
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.start >= pos && test(type, node)) throw new Found(node, st);
|
||||||
|
base[type](node, st, c);
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) return e;
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the outermost matching node before a given position.
|
||||||
|
|
||||||
|
function findNodeBefore(node, pos, test, base, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!base) base = exports.base;
|
||||||
|
var max = undefined;(function c(node, st, override) {
|
||||||
|
if (node.start > pos) return;
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node)) max = new Found(node, st);
|
||||||
|
base[type](node, st, c);
|
||||||
|
})(node, state);
|
||||||
|
return max;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used to create a custom walker. Will fill in all missing node
|
||||||
|
// type properties with the defaults.
|
||||||
|
|
||||||
|
function make(funcs, base) {
|
||||||
|
if (!base) base = exports.base;
|
||||||
|
var visitor = {};
|
||||||
|
for (var type in base) visitor[type] = base[type];
|
||||||
|
for (var type in funcs) visitor[type] = funcs[type];
|
||||||
|
return visitor;
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipThrough(node, st, c) {
|
||||||
|
c(node, st);
|
||||||
|
}
|
||||||
|
function ignore(_node, _st, _c) {}
|
||||||
|
|
||||||
|
// Node walkers.
|
||||||
|
|
||||||
|
var base = {};
|
||||||
|
|
||||||
|
exports.base = base;
|
||||||
|
base.Program = base.BlockStatement = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.body.length; ++i) {
|
||||||
|
c(node.body[i], st, "Statement");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.Statement = skipThrough;
|
||||||
|
base.EmptyStatement = ignore;
|
||||||
|
base.ExpressionStatement = base.ParenthesizedExpression = function (node, st, c) {
|
||||||
|
return c(node.expression, st, "Expression");
|
||||||
|
};
|
||||||
|
base.IfStatement = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.consequent, st, "Statement");
|
||||||
|
if (node.alternate) c(node.alternate, st, "Statement");
|
||||||
|
};
|
||||||
|
base.LabeledStatement = function (node, st, c) {
|
||||||
|
return c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.BreakStatement = base.ContinueStatement = ignore;
|
||||||
|
base.WithStatement = function (node, st, c) {
|
||||||
|
c(node.object, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.SwitchStatement = function (node, st, c) {
|
||||||
|
c(node.discriminant, st, "Expression");
|
||||||
|
for (var i = 0; i < node.cases.length; ++i) {
|
||||||
|
var cs = node.cases[i];
|
||||||
|
if (cs.test) c(cs.test, st, "Expression");
|
||||||
|
for (var j = 0; j < cs.consequent.length; ++j) {
|
||||||
|
c(cs.consequent[j], st, "Statement");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ReturnStatement = base.YieldExpression = function (node, st, c) {
|
||||||
|
if (node.argument) c(node.argument, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ThrowStatement = base.SpreadElement = function (node, st, c) {
|
||||||
|
return c(node.argument, st, "Expression");
|
||||||
|
};
|
||||||
|
base.TryStatement = function (node, st, c) {
|
||||||
|
c(node.block, st, "Statement");
|
||||||
|
if (node.handler) {
|
||||||
|
c(node.handler.param, st, "Pattern");
|
||||||
|
c(node.handler.body, st, "ScopeBody");
|
||||||
|
}
|
||||||
|
if (node.finalizer) c(node.finalizer, st, "Statement");
|
||||||
|
};
|
||||||
|
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForStatement = function (node, st, c) {
|
||||||
|
if (node.init) c(node.init, st, "ForInit");
|
||||||
|
if (node.test) c(node.test, st, "Expression");
|
||||||
|
if (node.update) c(node.update, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
|
||||||
|
c(node.left, st, "ForInit");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForInit = function (node, st, c) {
|
||||||
|
if (node.type == "VariableDeclaration") c(node, st);else c(node, st, "Expression");
|
||||||
|
};
|
||||||
|
base.DebuggerStatement = ignore;
|
||||||
|
|
||||||
|
base.FunctionDeclaration = function (node, st, c) {
|
||||||
|
return c(node, st, "Function");
|
||||||
|
};
|
||||||
|
base.VariableDeclaration = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.declarations.length; ++i) {
|
||||||
|
c(node.declarations[i], st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.VariableDeclarator = function (node, st, c) {
|
||||||
|
c(node.id, st, "Pattern");
|
||||||
|
if (node.init) c(node.init, st, "Expression");
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Function = function (node, st, c) {
|
||||||
|
if (node.id) c(node.id, st, "Pattern");
|
||||||
|
for (var i = 0; i < node.params.length; i++) {
|
||||||
|
c(node.params[i], st, "Pattern");
|
||||||
|
}c(node.body, st, node.expression ? "ScopeExpression" : "ScopeBody");
|
||||||
|
};
|
||||||
|
// FIXME drop these node types in next major version
|
||||||
|
// (They are awkward, and in ES6 every block can be a scope.)
|
||||||
|
base.ScopeBody = function (node, st, c) {
|
||||||
|
return c(node, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ScopeExpression = function (node, st, c) {
|
||||||
|
return c(node, st, "Expression");
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Pattern = function (node, st, c) {
|
||||||
|
if (node.type == "Identifier") c(node, st, "VariablePattern");else if (node.type == "MemberExpression") c(node, st, "MemberPattern");else c(node, st);
|
||||||
|
};
|
||||||
|
base.VariablePattern = ignore;
|
||||||
|
base.MemberPattern = skipThrough;
|
||||||
|
base.RestElement = function (node, st, c) {
|
||||||
|
return c(node.argument, st, "Pattern");
|
||||||
|
};
|
||||||
|
base.ArrayPattern = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.elements.length; ++i) {
|
||||||
|
var elt = node.elements[i];
|
||||||
|
if (elt) c(elt, st, "Pattern");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ObjectPattern = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.properties.length; ++i) {
|
||||||
|
c(node.properties[i].value, st, "Pattern");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Expression = skipThrough;
|
||||||
|
base.ThisExpression = base.Super = base.MetaProperty = ignore;
|
||||||
|
base.ArrayExpression = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.elements.length; ++i) {
|
||||||
|
var elt = node.elements[i];
|
||||||
|
if (elt) c(elt, st, "Expression");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ObjectExpression = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.properties.length; ++i) {
|
||||||
|
c(node.properties[i], st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
|
||||||
|
base.SequenceExpression = base.TemplateLiteral = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.expressions.length; ++i) {
|
||||||
|
c(node.expressions[i], st, "Expression");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
|
||||||
|
c(node.argument, st, "Expression");
|
||||||
|
};
|
||||||
|
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
|
||||||
|
c(node.left, st, "Expression");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
};
|
||||||
|
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
|
||||||
|
c(node.left, st, "Pattern");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ConditionalExpression = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.consequent, st, "Expression");
|
||||||
|
c(node.alternate, st, "Expression");
|
||||||
|
};
|
||||||
|
base.NewExpression = base.CallExpression = function (node, st, c) {
|
||||||
|
c(node.callee, st, "Expression");
|
||||||
|
if (node.arguments) for (var i = 0; i < node.arguments.length; ++i) {
|
||||||
|
c(node.arguments[i], st, "Expression");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.MemberExpression = function (node, st, c) {
|
||||||
|
c(node.object, st, "Expression");
|
||||||
|
if (node.computed) c(node.property, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
|
||||||
|
if (node.declaration) c(node.declaration, st, node.type == "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression");
|
||||||
|
if (node.source) c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ExportAllDeclaration = function (node, st, c) {
|
||||||
|
c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ImportDeclaration = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.specifiers.length; i++) {
|
||||||
|
c(node.specifiers[i], st);
|
||||||
|
}c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore;
|
||||||
|
|
||||||
|
base.TaggedTemplateExpression = function (node, st, c) {
|
||||||
|
c(node.tag, st, "Expression");
|
||||||
|
c(node.quasi, st);
|
||||||
|
};
|
||||||
|
base.ClassDeclaration = base.ClassExpression = function (node, st, c) {
|
||||||
|
return c(node, st, "Class");
|
||||||
|
};
|
||||||
|
base.Class = function (node, st, c) {
|
||||||
|
if (node.id) c(node.id, st, "Pattern");
|
||||||
|
if (node.superClass) c(node.superClass, st, "Expression");
|
||||||
|
for (var i = 0; i < node.body.body.length; i++) {
|
||||||
|
c(node.body.body[i], st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.MethodDefinition = base.Property = function (node, st, c) {
|
||||||
|
if (node.computed) c(node.key, st, "Expression");
|
||||||
|
c(node.value, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ComprehensionExpression = function (node, st, c) {
|
||||||
|
for (var i = 0; i < node.blocks.length; i++) {
|
||||||
|
c(node.blocks[i].right, st, "Expression");
|
||||||
|
}c(node.body, st, "Expression");
|
||||||
|
};
|
||||||
|
|
||||||
|
},{}]},{},[1])(1)
|
||||||
|
});
|
|
@ -0,0 +1,198 @@
|
||||||
|
{
|
||||||
|
"_from": "acorn@^2.1.0",
|
||||||
|
"_id": "acorn@2.7.0",
|
||||||
|
"_inBundle": false,
|
||||||
|
"_integrity": "sha1-q259nYhqrKiwhbwzEreaGYQz8Oc=",
|
||||||
|
"_location": "/acorn",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"type": "range",
|
||||||
|
"registry": true,
|
||||||
|
"raw": "acorn@^2.1.0",
|
||||||
|
"name": "acorn",
|
||||||
|
"escapedName": "acorn",
|
||||||
|
"rawSpec": "^2.1.0",
|
||||||
|
"saveSpec": null,
|
||||||
|
"fetchSpec": "^2.1.0"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/acorn-globals",
|
||||||
|
"/constantinople"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/acorn/-/acorn-2.7.0.tgz",
|
||||||
|
"_shasum": "ab6e7d9d886aaca8b085bc3312b79a198433f0e7",
|
||||||
|
"_spec": "acorn@^2.1.0",
|
||||||
|
"_where": "C:\\Users\\oracle\\Desktop\\Forest\\kekhegy\\kekhegy\\node_modules\\constantinople",
|
||||||
|
"bin": {
|
||||||
|
"acorn": "./bin/acorn"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/ternjs/acorn/issues"
|
||||||
|
},
|
||||||
|
"bundleDependencies": false,
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "List of Acorn contributors. Updated before every release."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Adrian Rakovsky"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Alistair Braidwood"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Andres Suarez"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Aparajita Fishman"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Arian Stolwijk"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Artem Govorov"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Brandon Mills"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Charles Hughes"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Conrad Irwin"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "David Bonnet"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "ForbesLindesay"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Forbes Lindesay"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Gilad Peleg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "impinball"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Ingvar Stepanyan"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jesse McCarthy"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jiaxing Wang"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Joel Kemp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Johannes Herr"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jürg Lehni"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "keeyipchan"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Kevin Kwok"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "krator"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Marijn Haverbeke"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Martin Carlberg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Mathias Bynens"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Mathieu 'p01' Henri"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Max Schaefer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Max Zerzouri"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Mihai Bazon"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Mike Rennie"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Nick Fitzgerald"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Oskar Schöldström"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Paul Harper"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Peter Rust"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "PlNG"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "r-e-d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Rich Harris"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Sebastian McKenzie"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Timothy Gu"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "zsjforcn"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"deprecated": false,
|
||||||
|
"description": "ECMAScript parser",
|
||||||
|
"devDependencies": {
|
||||||
|
"babel-core": "^5.6.15",
|
||||||
|
"babelify": "^6.1.2",
|
||||||
|
"browserify": "^10.2.4",
|
||||||
|
"browserify-derequire": "^0.9.4",
|
||||||
|
"unicode-7.0.0": "~0.1.5"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.4.0"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/ternjs/acorn",
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "dist/acorn.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "Marijn Haverbeke",
|
||||||
|
"email": "marijnh@gmail.com",
|
||||||
|
"url": "http://marijnhaverbeke.nl"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Ingvar Stepanyan",
|
||||||
|
"email": "me@rreverser.com",
|
||||||
|
"url": "http://rreverser.com/"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "acorn",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/ternjs/acorn.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prepublish": "node bin/build-acorn.js",
|
||||||
|
"test": "node test/run.js"
|
||||||
|
},
|
||||||
|
"version": "2.7.0"
|
||||||
|
}
|
|
@ -0,0 +1,59 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import {basename} from "path"
|
||||||
|
import {readFileSync as readFile} from "fs"
|
||||||
|
import * as acorn from "../dist/acorn.js"
|
||||||
|
|
||||||
|
let infile, forceFile, silent = false, compact = false, tokenize = false
|
||||||
|
const options = {}
|
||||||
|
|
||||||
|
function help(status) {
|
||||||
|
const print = (status == 0) ? console.log : console.error
|
||||||
|
print("usage: " + basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6]")
|
||||||
|
print(" [--tokenize] [--locations] [---allow-hash-bang] [--compact] [--silent] [--module] [--help] [--] [infile]")
|
||||||
|
process.exit(status)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 2; i < process.argv.length; ++i) {
|
||||||
|
const arg = process.argv[i]
|
||||||
|
if ((arg == "-" || arg[0] != "-") && !infile) infile = arg
|
||||||
|
else if (arg == "--" && !infile && i + 2 == process.argv.length) forceFile = infile = process.argv[++i]
|
||||||
|
else if (arg == "--ecma3") options.ecmaVersion = 3
|
||||||
|
else if (arg == "--ecma5") options.ecmaVersion = 5
|
||||||
|
else if (arg == "--ecma6") options.ecmaVersion = 6
|
||||||
|
else if (arg == "--locations") options.locations = true
|
||||||
|
else if (arg == "--allow-hash-bang") options.allowHashBang = true
|
||||||
|
else if (arg == "--silent") silent = true
|
||||||
|
else if (arg == "--compact") compact = true
|
||||||
|
else if (arg == "--help") help(0)
|
||||||
|
else if (arg == "--tokenize") tokenize = true
|
||||||
|
else if (arg == "--module") options.sourceType = 'module'
|
||||||
|
else help(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
function run(code) {
|
||||||
|
let result
|
||||||
|
if (!tokenize) {
|
||||||
|
try { result = acorn.parse(code, options) }
|
||||||
|
catch(e) { console.error(e.message); process.exit(1) }
|
||||||
|
} else {
|
||||||
|
result = []
|
||||||
|
let tokenizer = acorn.tokenizer(code, options), token
|
||||||
|
while (true) {
|
||||||
|
try { token = tokenizer.getToken() }
|
||||||
|
catch(e) { console.error(e.message); process.exit(1) }
|
||||||
|
result.push(token)
|
||||||
|
if (token.type == acorn.tokTypes.eof) break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!silent) console.log(JSON.stringify(result, null, compact ? null : 2))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (forceFile || infile && infile != "-") {
|
||||||
|
run(readFile(infile, "utf8"))
|
||||||
|
} else {
|
||||||
|
let code = ""
|
||||||
|
process.stdin.resume()
|
||||||
|
process.stdin.on("data", chunk => code += chunk)
|
||||||
|
process.stdin.on("end", () => run(code))
|
||||||
|
}
|
|
@ -0,0 +1,707 @@
|
||||||
|
// A recursive descent parser operates by defining functions for all
|
||||||
|
// syntactic elements, and recursively calling those, each function
|
||||||
|
// advancing the input stream and returning an AST node. Precedence
|
||||||
|
// of constructs (for example, the fact that `!x[1]` means `!(x[1])`
|
||||||
|
// instead of `(!x)[1]` is handled by the fact that the parser
|
||||||
|
// function that parses unary prefix operators is called first, and
|
||||||
|
// in turn calls the function that parses `[]` subscripts — that
|
||||||
|
// way, it'll receive the node for `x[1]` already parsed, and wraps
|
||||||
|
// *that* in the unary operator node.
|
||||||
|
//
|
||||||
|
// Acorn uses an [operator precedence parser][opp] to handle binary
|
||||||
|
// operator precedence, because it is much more compact than using
|
||||||
|
// the technique outlined above, which uses different, nesting
|
||||||
|
// functions to specify precedence, for all of the ten binary
|
||||||
|
// precedence levels that JavaScript defines.
|
||||||
|
//
|
||||||
|
// [opp]: http://en.wikipedia.org/wiki/Operator-precedence_parser
|
||||||
|
|
||||||
|
import {types as tt} from "./tokentype"
|
||||||
|
import {Parser} from "./state"
|
||||||
|
|
||||||
|
const pp = Parser.prototype
|
||||||
|
|
||||||
|
// Check if property name clashes with already added.
|
||||||
|
// Object/class getters and setters are not allowed to clash —
|
||||||
|
// either with each other or with an init property — and in
|
||||||
|
// strict mode, init properties are also not allowed to be repeated.
|
||||||
|
|
||||||
|
pp.checkPropClash = function(prop, propHash) {
|
||||||
|
if (this.options.ecmaVersion >= 6 && (prop.computed || prop.method || prop.shorthand))
|
||||||
|
return
|
||||||
|
let {key} = prop, name
|
||||||
|
switch (key.type) {
|
||||||
|
case "Identifier": name = key.name; break
|
||||||
|
case "Literal": name = String(key.value); break
|
||||||
|
default: return
|
||||||
|
}
|
||||||
|
let {kind} = prop
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
if (name === "__proto__" && kind === "init") {
|
||||||
|
if (propHash.proto) this.raise(key.start, "Redefinition of __proto__ property");
|
||||||
|
propHash.proto = true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
name = "$" + name
|
||||||
|
let other = propHash[name]
|
||||||
|
if (other) {
|
||||||
|
let isGetSet = kind !== "init"
|
||||||
|
if ((this.strict || isGetSet) && other[kind] || !(isGetSet ^ other.init))
|
||||||
|
this.raise(key.start, "Redefinition of property")
|
||||||
|
} else {
|
||||||
|
other = propHash[name] = {
|
||||||
|
init: false,
|
||||||
|
get: false,
|
||||||
|
set: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
other[kind] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### Expression parsing
|
||||||
|
|
||||||
|
// These nest, from the most general expression type at the top to
|
||||||
|
// 'atomic', nondivisible expression types at the bottom. Most of
|
||||||
|
// the functions will simply let the function(s) below them parse,
|
||||||
|
// and, *if* the syntactic construct they handle is present, wrap
|
||||||
|
// the AST node that the inner parser gave them in another node.
|
||||||
|
|
||||||
|
// Parse a full expression. The optional arguments are used to
|
||||||
|
// forbid the `in` operator (in for loops initalization expressions)
|
||||||
|
// and provide reference for storing '=' operator inside shorthand
|
||||||
|
// property assignment in contexts where both object expression
|
||||||
|
// and object pattern might appear (so it's possible to raise
|
||||||
|
// delayed syntax error at correct position).
|
||||||
|
|
||||||
|
pp.parseExpression = function(noIn, refDestructuringErrors) {
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
let expr = this.parseMaybeAssign(noIn, refDestructuringErrors)
|
||||||
|
if (this.type === tt.comma) {
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.expressions = [expr]
|
||||||
|
while (this.eat(tt.comma)) node.expressions.push(this.parseMaybeAssign(noIn, refDestructuringErrors))
|
||||||
|
return this.finishNode(node, "SequenceExpression")
|
||||||
|
}
|
||||||
|
return expr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse an assignment expression. This includes applications of
|
||||||
|
// operators like `+=`.
|
||||||
|
|
||||||
|
pp.parseMaybeAssign = function(noIn, refDestructuringErrors, afterLeftParse) {
|
||||||
|
if (this.type == tt._yield && this.inGenerator) return this.parseYield()
|
||||||
|
|
||||||
|
let validateDestructuring = false
|
||||||
|
if (!refDestructuringErrors) {
|
||||||
|
refDestructuringErrors = {shorthandAssign: 0, trailingComma: 0}
|
||||||
|
validateDestructuring = true
|
||||||
|
}
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
if (this.type == tt.parenL || this.type == tt.name)
|
||||||
|
this.potentialArrowAt = this.start
|
||||||
|
let left = this.parseMaybeConditional(noIn, refDestructuringErrors)
|
||||||
|
if (afterLeftParse) left = afterLeftParse.call(this, left, startPos, startLoc)
|
||||||
|
if (this.type.isAssign) {
|
||||||
|
if (validateDestructuring) this.checkPatternErrors(refDestructuringErrors, true)
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.operator = this.value
|
||||||
|
node.left = this.type === tt.eq ? this.toAssignable(left) : left
|
||||||
|
refDestructuringErrors.shorthandAssign = 0 // reset because shorthand default was used correctly
|
||||||
|
this.checkLVal(left)
|
||||||
|
this.next()
|
||||||
|
node.right = this.parseMaybeAssign(noIn)
|
||||||
|
return this.finishNode(node, "AssignmentExpression")
|
||||||
|
} else {
|
||||||
|
if (validateDestructuring) this.checkExpressionErrors(refDestructuringErrors, true)
|
||||||
|
}
|
||||||
|
return left
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a ternary conditional (`?:`) operator.
|
||||||
|
|
||||||
|
pp.parseMaybeConditional = function(noIn, refDestructuringErrors) {
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
let expr = this.parseExprOps(noIn, refDestructuringErrors)
|
||||||
|
if (this.checkExpressionErrors(refDestructuringErrors)) return expr
|
||||||
|
if (this.eat(tt.question)) {
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.test = expr
|
||||||
|
node.consequent = this.parseMaybeAssign()
|
||||||
|
this.expect(tt.colon)
|
||||||
|
node.alternate = this.parseMaybeAssign(noIn)
|
||||||
|
return this.finishNode(node, "ConditionalExpression")
|
||||||
|
}
|
||||||
|
return expr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the precedence parser.
|
||||||
|
|
||||||
|
pp.parseExprOps = function(noIn, refDestructuringErrors) {
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
let expr = this.parseMaybeUnary(refDestructuringErrors)
|
||||||
|
if (this.checkExpressionErrors(refDestructuringErrors)) return expr
|
||||||
|
return this.parseExprOp(expr, startPos, startLoc, -1, noIn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse binary operators with the operator precedence parsing
|
||||||
|
// algorithm. `left` is the left-hand side of the operator.
|
||||||
|
// `minPrec` provides context that allows the function to stop and
|
||||||
|
// defer further parser to one of its callers when it encounters an
|
||||||
|
// operator that has a lower precedence than the set it is parsing.
|
||||||
|
|
||||||
|
pp.parseExprOp = function(left, leftStartPos, leftStartLoc, minPrec, noIn) {
|
||||||
|
let prec = this.type.binop
|
||||||
|
if (prec != null && (!noIn || this.type !== tt._in)) {
|
||||||
|
if (prec > minPrec) {
|
||||||
|
let node = this.startNodeAt(leftStartPos, leftStartLoc)
|
||||||
|
node.left = left
|
||||||
|
node.operator = this.value
|
||||||
|
let op = this.type
|
||||||
|
this.next()
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
node.right = this.parseExprOp(this.parseMaybeUnary(), startPos, startLoc, prec, noIn)
|
||||||
|
this.finishNode(node, (op === tt.logicalOR || op === tt.logicalAND) ? "LogicalExpression" : "BinaryExpression")
|
||||||
|
return this.parseExprOp(node, leftStartPos, leftStartLoc, minPrec, noIn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return left
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse unary operators, both prefix and postfix.
|
||||||
|
|
||||||
|
pp.parseMaybeUnary = function(refDestructuringErrors) {
|
||||||
|
if (this.type.prefix) {
|
||||||
|
let node = this.startNode(), update = this.type === tt.incDec
|
||||||
|
node.operator = this.value
|
||||||
|
node.prefix = true
|
||||||
|
this.next()
|
||||||
|
node.argument = this.parseMaybeUnary()
|
||||||
|
this.checkExpressionErrors(refDestructuringErrors, true)
|
||||||
|
if (update) this.checkLVal(node.argument)
|
||||||
|
else if (this.strict && node.operator === "delete" &&
|
||||||
|
node.argument.type === "Identifier")
|
||||||
|
this.raise(node.start, "Deleting local variable in strict mode")
|
||||||
|
return this.finishNode(node, update ? "UpdateExpression" : "UnaryExpression")
|
||||||
|
}
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
let expr = this.parseExprSubscripts(refDestructuringErrors)
|
||||||
|
if (this.checkExpressionErrors(refDestructuringErrors)) return expr
|
||||||
|
while (this.type.postfix && !this.canInsertSemicolon()) {
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.operator = this.value
|
||||||
|
node.prefix = false
|
||||||
|
node.argument = expr
|
||||||
|
this.checkLVal(expr)
|
||||||
|
this.next()
|
||||||
|
expr = this.finishNode(node, "UpdateExpression")
|
||||||
|
}
|
||||||
|
return expr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse call, dot, and `[]`-subscript expressions.
|
||||||
|
|
||||||
|
pp.parseExprSubscripts = function(refDestructuringErrors) {
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
let expr = this.parseExprAtom(refDestructuringErrors)
|
||||||
|
let skipArrowSubscripts = expr.type === "ArrowFunctionExpression" && this.input.slice(this.lastTokStart, this.lastTokEnd) !== ")";
|
||||||
|
if (this.checkExpressionErrors(refDestructuringErrors) || skipArrowSubscripts) return expr
|
||||||
|
return this.parseSubscripts(expr, startPos, startLoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseSubscripts = function(base, startPos, startLoc, noCalls) {
|
||||||
|
for (;;) {
|
||||||
|
if (this.eat(tt.dot)) {
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.object = base
|
||||||
|
node.property = this.parseIdent(true)
|
||||||
|
node.computed = false
|
||||||
|
base = this.finishNode(node, "MemberExpression")
|
||||||
|
} else if (this.eat(tt.bracketL)) {
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.object = base
|
||||||
|
node.property = this.parseExpression()
|
||||||
|
node.computed = true
|
||||||
|
this.expect(tt.bracketR)
|
||||||
|
base = this.finishNode(node, "MemberExpression")
|
||||||
|
} else if (!noCalls && this.eat(tt.parenL)) {
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.callee = base
|
||||||
|
node.arguments = this.parseExprList(tt.parenR, false)
|
||||||
|
base = this.finishNode(node, "CallExpression")
|
||||||
|
} else if (this.type === tt.backQuote) {
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.tag = base
|
||||||
|
node.quasi = this.parseTemplate()
|
||||||
|
base = this.finishNode(node, "TaggedTemplateExpression")
|
||||||
|
} else {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse an atomic expression — either a single token that is an
|
||||||
|
// expression, an expression started by a keyword like `function` or
|
||||||
|
// `new`, or an expression wrapped in punctuation like `()`, `[]`,
|
||||||
|
// or `{}`.
|
||||||
|
|
||||||
|
pp.parseExprAtom = function(refDestructuringErrors) {
|
||||||
|
let node, canBeArrow = this.potentialArrowAt == this.start
|
||||||
|
switch (this.type) {
|
||||||
|
case tt._super:
|
||||||
|
if (!this.inFunction)
|
||||||
|
this.raise(this.start, "'super' outside of function or class")
|
||||||
|
case tt._this:
|
||||||
|
let type = this.type === tt._this ? "ThisExpression" : "Super"
|
||||||
|
node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, type)
|
||||||
|
|
||||||
|
case tt._yield:
|
||||||
|
if (this.inGenerator) this.unexpected()
|
||||||
|
|
||||||
|
case tt.name:
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
let id = this.parseIdent(this.type !== tt.name)
|
||||||
|
if (canBeArrow && !this.canInsertSemicolon() && this.eat(tt.arrow))
|
||||||
|
return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), [id])
|
||||||
|
return id
|
||||||
|
|
||||||
|
case tt.regexp:
|
||||||
|
let value = this.value
|
||||||
|
node = this.parseLiteral(value.value)
|
||||||
|
node.regex = {pattern: value.pattern, flags: value.flags}
|
||||||
|
return node
|
||||||
|
|
||||||
|
case tt.num: case tt.string:
|
||||||
|
return this.parseLiteral(this.value)
|
||||||
|
|
||||||
|
case tt._null: case tt._true: case tt._false:
|
||||||
|
node = this.startNode()
|
||||||
|
node.value = this.type === tt._null ? null : this.type === tt._true
|
||||||
|
node.raw = this.type.keyword
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "Literal")
|
||||||
|
|
||||||
|
case tt.parenL:
|
||||||
|
return this.parseParenAndDistinguishExpression(canBeArrow)
|
||||||
|
|
||||||
|
case tt.bracketL:
|
||||||
|
node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
// check whether this is array comprehension or regular array
|
||||||
|
if (this.options.ecmaVersion >= 7 && this.type === tt._for) {
|
||||||
|
return this.parseComprehension(node, false)
|
||||||
|
}
|
||||||
|
node.elements = this.parseExprList(tt.bracketR, true, true, refDestructuringErrors)
|
||||||
|
return this.finishNode(node, "ArrayExpression")
|
||||||
|
|
||||||
|
case tt.braceL:
|
||||||
|
return this.parseObj(false, refDestructuringErrors)
|
||||||
|
|
||||||
|
case tt._function:
|
||||||
|
node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
return this.parseFunction(node, false)
|
||||||
|
|
||||||
|
case tt._class:
|
||||||
|
return this.parseClass(this.startNode(), false)
|
||||||
|
|
||||||
|
case tt._new:
|
||||||
|
return this.parseNew()
|
||||||
|
|
||||||
|
case tt.backQuote:
|
||||||
|
return this.parseTemplate()
|
||||||
|
|
||||||
|
default:
|
||||||
|
this.unexpected()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseLiteral = function(value) {
|
||||||
|
let node = this.startNode()
|
||||||
|
node.value = value
|
||||||
|
node.raw = this.input.slice(this.start, this.end)
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "Literal")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseParenExpression = function() {
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
let val = this.parseExpression()
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseParenAndDistinguishExpression = function(canBeArrow) {
|
||||||
|
let startPos = this.start, startLoc = this.startLoc, val
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
this.next()
|
||||||
|
|
||||||
|
if (this.options.ecmaVersion >= 7 && this.type === tt._for) {
|
||||||
|
return this.parseComprehension(this.startNodeAt(startPos, startLoc), true)
|
||||||
|
}
|
||||||
|
|
||||||
|
let innerStartPos = this.start, innerStartLoc = this.startLoc
|
||||||
|
let exprList = [], first = true
|
||||||
|
let refDestructuringErrors = {shorthandAssign: 0, trailingComma: 0}, spreadStart, innerParenStart
|
||||||
|
while (this.type !== tt.parenR) {
|
||||||
|
first ? first = false : this.expect(tt.comma)
|
||||||
|
if (this.type === tt.ellipsis) {
|
||||||
|
spreadStart = this.start
|
||||||
|
exprList.push(this.parseParenItem(this.parseRest()))
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
if (this.type === tt.parenL && !innerParenStart) {
|
||||||
|
innerParenStart = this.start
|
||||||
|
}
|
||||||
|
exprList.push(this.parseMaybeAssign(false, refDestructuringErrors, this.parseParenItem))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let innerEndPos = this.start, innerEndLoc = this.startLoc
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
|
||||||
|
if (canBeArrow && !this.canInsertSemicolon() && this.eat(tt.arrow)) {
|
||||||
|
this.checkPatternErrors(refDestructuringErrors, true)
|
||||||
|
if (innerParenStart) this.unexpected(innerParenStart)
|
||||||
|
return this.parseParenArrowList(startPos, startLoc, exprList)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!exprList.length) this.unexpected(this.lastTokStart)
|
||||||
|
if (spreadStart) this.unexpected(spreadStart)
|
||||||
|
this.checkExpressionErrors(refDestructuringErrors, true)
|
||||||
|
|
||||||
|
if (exprList.length > 1) {
|
||||||
|
val = this.startNodeAt(innerStartPos, innerStartLoc)
|
||||||
|
val.expressions = exprList
|
||||||
|
this.finishNodeAt(val, "SequenceExpression", innerEndPos, innerEndLoc)
|
||||||
|
} else {
|
||||||
|
val = exprList[0]
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
val = this.parseParenExpression()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.options.preserveParens) {
|
||||||
|
let par = this.startNodeAt(startPos, startLoc)
|
||||||
|
par.expression = val
|
||||||
|
return this.finishNode(par, "ParenthesizedExpression")
|
||||||
|
} else {
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseParenItem = function(item) {
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseParenArrowList = function(startPos, startLoc, exprList) {
|
||||||
|
return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), exprList)
|
||||||
|
}
|
||||||
|
|
||||||
|
// New's precedence is slightly tricky. It must allow its argument to
|
||||||
|
// be a `[]` or dot subscript expression, but not a call — at least,
|
||||||
|
// not without wrapping it in parentheses. Thus, it uses the noCalls
|
||||||
|
// argument to parseSubscripts to prevent it from consuming the
|
||||||
|
// argument list.
|
||||||
|
|
||||||
|
const empty = []
|
||||||
|
|
||||||
|
pp.parseNew = function() {
|
||||||
|
let node = this.startNode()
|
||||||
|
let meta = this.parseIdent(true)
|
||||||
|
if (this.options.ecmaVersion >= 6 && this.eat(tt.dot)) {
|
||||||
|
node.meta = meta
|
||||||
|
node.property = this.parseIdent(true)
|
||||||
|
if (node.property.name !== "target")
|
||||||
|
this.raise(node.property.start, "The only valid meta property for new is new.target")
|
||||||
|
if (!this.inFunction)
|
||||||
|
this.raise(node.start, "new.target can only be used in functions")
|
||||||
|
return this.finishNode(node, "MetaProperty")
|
||||||
|
}
|
||||||
|
let startPos = this.start, startLoc = this.startLoc
|
||||||
|
node.callee = this.parseSubscripts(this.parseExprAtom(), startPos, startLoc, true)
|
||||||
|
if (this.eat(tt.parenL)) node.arguments = this.parseExprList(tt.parenR, false)
|
||||||
|
else node.arguments = empty
|
||||||
|
return this.finishNode(node, "NewExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse template expression.
|
||||||
|
|
||||||
|
pp.parseTemplateElement = function() {
|
||||||
|
let elem = this.startNode()
|
||||||
|
elem.value = {
|
||||||
|
raw: this.input.slice(this.start, this.end).replace(/\r\n?/g, '\n'),
|
||||||
|
cooked: this.value
|
||||||
|
}
|
||||||
|
this.next()
|
||||||
|
elem.tail = this.type === tt.backQuote
|
||||||
|
return this.finishNode(elem, "TemplateElement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseTemplate = function() {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
node.expressions = []
|
||||||
|
let curElt = this.parseTemplateElement()
|
||||||
|
node.quasis = [curElt]
|
||||||
|
while (!curElt.tail) {
|
||||||
|
this.expect(tt.dollarBraceL)
|
||||||
|
node.expressions.push(this.parseExpression())
|
||||||
|
this.expect(tt.braceR)
|
||||||
|
node.quasis.push(curElt = this.parseTemplateElement())
|
||||||
|
}
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "TemplateLiteral")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse an object literal or binding pattern.
|
||||||
|
|
||||||
|
pp.parseObj = function(isPattern, refDestructuringErrors) {
|
||||||
|
let node = this.startNode(), first = true, propHash = {}
|
||||||
|
node.properties = []
|
||||||
|
this.next()
|
||||||
|
while (!this.eat(tt.braceR)) {
|
||||||
|
if (!first) {
|
||||||
|
this.expect(tt.comma)
|
||||||
|
if (this.afterTrailingComma(tt.braceR)) break
|
||||||
|
} else first = false
|
||||||
|
|
||||||
|
let prop = this.startNode(), isGenerator, startPos, startLoc
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
prop.method = false
|
||||||
|
prop.shorthand = false
|
||||||
|
if (isPattern || refDestructuringErrors) {
|
||||||
|
startPos = this.start
|
||||||
|
startLoc = this.startLoc
|
||||||
|
}
|
||||||
|
if (!isPattern)
|
||||||
|
isGenerator = this.eat(tt.star)
|
||||||
|
}
|
||||||
|
this.parsePropertyName(prop)
|
||||||
|
this.parsePropertyValue(prop, isPattern, isGenerator, startPos, startLoc, refDestructuringErrors)
|
||||||
|
this.checkPropClash(prop, propHash)
|
||||||
|
node.properties.push(this.finishNode(prop, "Property"))
|
||||||
|
}
|
||||||
|
return this.finishNode(node, isPattern ? "ObjectPattern" : "ObjectExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parsePropertyValue = function(prop, isPattern, isGenerator, startPos, startLoc, refDestructuringErrors) {
|
||||||
|
if (this.eat(tt.colon)) {
|
||||||
|
prop.value = isPattern ? this.parseMaybeDefault(this.start, this.startLoc) : this.parseMaybeAssign(false, refDestructuringErrors)
|
||||||
|
prop.kind = "init"
|
||||||
|
} else if (this.options.ecmaVersion >= 6 && this.type === tt.parenL) {
|
||||||
|
if (isPattern) this.unexpected()
|
||||||
|
prop.kind = "init"
|
||||||
|
prop.method = true
|
||||||
|
prop.value = this.parseMethod(isGenerator)
|
||||||
|
} else if (this.options.ecmaVersion >= 5 && !prop.computed && prop.key.type === "Identifier" &&
|
||||||
|
(prop.key.name === "get" || prop.key.name === "set") &&
|
||||||
|
(this.type != tt.comma && this.type != tt.braceR)) {
|
||||||
|
if (isGenerator || isPattern) this.unexpected()
|
||||||
|
prop.kind = prop.key.name
|
||||||
|
this.parsePropertyName(prop)
|
||||||
|
prop.value = this.parseMethod(false)
|
||||||
|
let paramCount = prop.kind === "get" ? 0 : 1
|
||||||
|
if (prop.value.params.length !== paramCount) {
|
||||||
|
let start = prop.value.start
|
||||||
|
if (prop.kind === "get")
|
||||||
|
this.raise(start, "getter should have no params");
|
||||||
|
else
|
||||||
|
this.raise(start, "setter should have exactly one param")
|
||||||
|
}
|
||||||
|
if (prop.kind === "set" && prop.value.params[0].type === "RestElement")
|
||||||
|
this.raise(prop.value.params[0].start, "Setter cannot use rest params")
|
||||||
|
} else if (this.options.ecmaVersion >= 6 && !prop.computed && prop.key.type === "Identifier") {
|
||||||
|
prop.kind = "init"
|
||||||
|
if (isPattern) {
|
||||||
|
if (this.keywords.test(prop.key.name) ||
|
||||||
|
(this.strict ? this.reservedWordsStrictBind : this.reservedWords).test(prop.key.name))
|
||||||
|
this.raise(prop.key.start, "Binding " + prop.key.name)
|
||||||
|
prop.value = this.parseMaybeDefault(startPos, startLoc, prop.key)
|
||||||
|
} else if (this.type === tt.eq && refDestructuringErrors) {
|
||||||
|
if (!refDestructuringErrors.shorthandAssign)
|
||||||
|
refDestructuringErrors.shorthandAssign = this.start
|
||||||
|
prop.value = this.parseMaybeDefault(startPos, startLoc, prop.key)
|
||||||
|
} else {
|
||||||
|
prop.value = prop.key
|
||||||
|
}
|
||||||
|
prop.shorthand = true
|
||||||
|
} else this.unexpected()
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parsePropertyName = function(prop) {
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
if (this.eat(tt.bracketL)) {
|
||||||
|
prop.computed = true
|
||||||
|
prop.key = this.parseMaybeAssign()
|
||||||
|
this.expect(tt.bracketR)
|
||||||
|
return prop.key
|
||||||
|
} else {
|
||||||
|
prop.computed = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return prop.key = (this.type === tt.num || this.type === tt.string) ? this.parseExprAtom() : this.parseIdent(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize empty function node.
|
||||||
|
|
||||||
|
pp.initFunction = function(node) {
|
||||||
|
node.id = null
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
node.generator = false
|
||||||
|
node.expression = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse object or class method.
|
||||||
|
|
||||||
|
pp.parseMethod = function(isGenerator) {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.initFunction(node)
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
node.params = this.parseBindingList(tt.parenR, false, false)
|
||||||
|
if (this.options.ecmaVersion >= 6)
|
||||||
|
node.generator = isGenerator
|
||||||
|
this.parseFunctionBody(node, false)
|
||||||
|
return this.finishNode(node, "FunctionExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse arrow function expression with given parameters.
|
||||||
|
|
||||||
|
pp.parseArrowExpression = function(node, params) {
|
||||||
|
this.initFunction(node)
|
||||||
|
node.params = this.toAssignableList(params, true)
|
||||||
|
this.parseFunctionBody(node, true)
|
||||||
|
return this.finishNode(node, "ArrowFunctionExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse function body and check parameters.
|
||||||
|
|
||||||
|
pp.parseFunctionBody = function(node, isArrowFunction) {
|
||||||
|
let isExpression = isArrowFunction && this.type !== tt.braceL
|
||||||
|
|
||||||
|
if (isExpression) {
|
||||||
|
node.body = this.parseMaybeAssign()
|
||||||
|
node.expression = true
|
||||||
|
} else {
|
||||||
|
// Start a new scope with regard to labels and the `inFunction`
|
||||||
|
// flag (restore them to their old value afterwards).
|
||||||
|
let oldInFunc = this.inFunction, oldInGen = this.inGenerator, oldLabels = this.labels
|
||||||
|
this.inFunction = true; this.inGenerator = node.generator; this.labels = []
|
||||||
|
node.body = this.parseBlock(true)
|
||||||
|
node.expression = false
|
||||||
|
this.inFunction = oldInFunc; this.inGenerator = oldInGen; this.labels = oldLabels
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this is a strict mode function, verify that argument names
|
||||||
|
// are not repeated, and it does not try to bind the words `eval`
|
||||||
|
// or `arguments`.
|
||||||
|
if (this.strict || !isExpression && node.body.body.length && this.isUseStrict(node.body.body[0])) {
|
||||||
|
let oldStrict = this.strict
|
||||||
|
this.strict = true
|
||||||
|
if (node.id)
|
||||||
|
this.checkLVal(node.id, true)
|
||||||
|
this.checkParams(node);
|
||||||
|
this.strict = oldStrict
|
||||||
|
} else if (isArrowFunction) {
|
||||||
|
this.checkParams(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks function params for various disallowed patterns such as using "eval"
|
||||||
|
// or "arguments" and duplicate parameters.
|
||||||
|
|
||||||
|
pp.checkParams = function(node) {
|
||||||
|
let nameHash = {};
|
||||||
|
for (let i = 0; i < node.params.length; i++)
|
||||||
|
this.checkLVal(node.params[i], true, nameHash)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parses a comma-separated list of expressions, and returns them as
|
||||||
|
// an array. `close` is the token type that ends the list, and
|
||||||
|
// `allowEmpty` can be turned on to allow subsequent commas with
|
||||||
|
// nothing in between them to be parsed as `null` (which is needed
|
||||||
|
// for array literals).
|
||||||
|
|
||||||
|
pp.parseExprList = function(close, allowTrailingComma, allowEmpty, refDestructuringErrors) {
|
||||||
|
let elts = [], first = true
|
||||||
|
while (!this.eat(close)) {
|
||||||
|
if (!first) {
|
||||||
|
this.expect(tt.comma)
|
||||||
|
if (this.type === close && refDestructuringErrors && !refDestructuringErrors.trailingComma) {
|
||||||
|
refDestructuringErrors.trailingComma = this.lastTokStart
|
||||||
|
}
|
||||||
|
if (allowTrailingComma && this.afterTrailingComma(close)) break
|
||||||
|
} else first = false
|
||||||
|
|
||||||
|
let elt
|
||||||
|
if (allowEmpty && this.type === tt.comma)
|
||||||
|
elt = null
|
||||||
|
else if (this.type === tt.ellipsis)
|
||||||
|
elt = this.parseSpread(refDestructuringErrors)
|
||||||
|
else
|
||||||
|
elt = this.parseMaybeAssign(false, refDestructuringErrors)
|
||||||
|
elts.push(elt)
|
||||||
|
}
|
||||||
|
return elts
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the next token as an identifier. If `liberal` is true (used
|
||||||
|
// when parsing properties), it will also convert keywords into
|
||||||
|
// identifiers.
|
||||||
|
|
||||||
|
pp.parseIdent = function(liberal) {
|
||||||
|
let node = this.startNode()
|
||||||
|
if (liberal && this.options.allowReserved == "never") liberal = false
|
||||||
|
if (this.type === tt.name) {
|
||||||
|
if (!liberal && (this.strict ? this.reservedWordsStrict : this.reservedWords).test(this.value) &&
|
||||||
|
(this.options.ecmaVersion >= 6 ||
|
||||||
|
this.input.slice(this.start, this.end).indexOf("\\") == -1))
|
||||||
|
this.raise(this.start, "The keyword '" + this.value + "' is reserved")
|
||||||
|
node.name = this.value
|
||||||
|
} else if (liberal && this.type.keyword) {
|
||||||
|
node.name = this.type.keyword
|
||||||
|
} else {
|
||||||
|
this.unexpected()
|
||||||
|
}
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "Identifier")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses yield expression inside generator.
|
||||||
|
|
||||||
|
pp.parseYield = function() {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
if (this.type == tt.semi || this.canInsertSemicolon() || (this.type != tt.star && !this.type.startsExpr)) {
|
||||||
|
node.delegate = false
|
||||||
|
node.argument = null
|
||||||
|
} else {
|
||||||
|
node.delegate = this.eat(tt.star)
|
||||||
|
node.argument = this.parseMaybeAssign()
|
||||||
|
}
|
||||||
|
return this.finishNode(node, "YieldExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses array and generator comprehensions.
|
||||||
|
|
||||||
|
pp.parseComprehension = function(node, isGenerator) {
|
||||||
|
node.blocks = []
|
||||||
|
while (this.type === tt._for) {
|
||||||
|
let block = this.startNode()
|
||||||
|
this.next()
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
block.left = this.parseBindingAtom()
|
||||||
|
this.checkLVal(block.left, true)
|
||||||
|
this.expectContextual("of")
|
||||||
|
block.right = this.parseExpression()
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
node.blocks.push(this.finishNode(block, "ComprehensionBlock"))
|
||||||
|
}
|
||||||
|
node.filter = this.eat(tt._if) ? this.parseParenExpression() : null
|
||||||
|
node.body = this.parseExpression()
|
||||||
|
this.expect(isGenerator ? tt.parenR : tt.bracketR)
|
||||||
|
node.generator = isGenerator
|
||||||
|
return this.finishNode(node, "ComprehensionExpression")
|
||||||
|
}
|
|
@ -0,0 +1,90 @@
|
||||||
|
// This is a trick taken from Esprima. It turns out that, on
|
||||||
|
// non-Chrome browsers, to check whether a string is in a set, a
|
||||||
|
// predicate containing a big ugly `switch` statement is faster than
|
||||||
|
// a regular expression, and on Chrome the two are about on par.
|
||||||
|
// This function uses `eval` (non-lexical) to produce such a
|
||||||
|
// predicate from a space-separated string of words.
|
||||||
|
//
|
||||||
|
// It starts by sorting the words by length.
|
||||||
|
|
||||||
|
// Reserved word lists for various dialects of the language
|
||||||
|
|
||||||
|
export const reservedWords = {
|
||||||
|
3: "abstract boolean byte char class double enum export extends final float goto implements import int interface long native package private protected public short static super synchronized throws transient volatile",
|
||||||
|
5: "class enum extends super const export import",
|
||||||
|
6: "enum",
|
||||||
|
strict: "implements interface let package private protected public static yield",
|
||||||
|
strictBind: "eval arguments"
|
||||||
|
}
|
||||||
|
|
||||||
|
// And the keywords
|
||||||
|
|
||||||
|
var ecma5AndLessKeywords = "break case catch continue debugger default do else finally for function if return switch throw try var while with null true false instanceof typeof void delete new in this"
|
||||||
|
|
||||||
|
export const keywords = {
|
||||||
|
5: ecma5AndLessKeywords,
|
||||||
|
6: ecma5AndLessKeywords + " let const class extends export import yield super"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ## Character categories
|
||||||
|
|
||||||
|
// Big ugly regular expressions that match characters in the
|
||||||
|
// whitespace, identifier, and identifier-start categories. These
|
||||||
|
// are only applied when a character is found to actually have a
|
||||||
|
// code point above 128.
|
||||||
|
// Generated by `bin/generate-identifier-regex.js`.
|
||||||
|
|
||||||
|
let nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0-\u08b2\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d60\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19c1-\u19c7\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fcc\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua78e\ua790-\ua7ad\ua7b0\ua7b1\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab5f\uab64\uab65\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc"
|
||||||
|
let nonASCIIidentifierChars = "\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08e4-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c03\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0d01-\u0d03\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d82\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19b0-\u19c0\u19c8\u19c9\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf2-\u1cf4\u1cf8\u1cf9\u1dc0-\u1df5\u1dfc-\u1dff\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua880\ua881\ua8b4-\ua8c4\ua8d0-\ua8d9\ua8e0-\ua8f1\ua900-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2d\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f"
|
||||||
|
|
||||||
|
const nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]")
|
||||||
|
const nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]")
|
||||||
|
|
||||||
|
nonASCIIidentifierStartChars = nonASCIIidentifierChars = null
|
||||||
|
|
||||||
|
// These are a run-length and offset encoded representation of the
|
||||||
|
// >0xffff code points that are a valid part of identifiers. The
|
||||||
|
// offset starts at 0x10000, and each pair of numbers represents an
|
||||||
|
// offset to the next range, and then a size of the range. They were
|
||||||
|
// generated by tools/generate-identifier-regex.js
|
||||||
|
var astralIdentifierStartCodes = [0,11,2,25,2,18,2,1,2,14,3,13,35,122,70,52,268,28,4,48,48,31,17,26,6,37,11,29,3,35,5,7,2,4,43,157,99,39,9,51,157,310,10,21,11,7,153,5,3,0,2,43,2,1,4,0,3,22,11,22,10,30,98,21,11,25,71,55,7,1,65,0,16,3,2,2,2,26,45,28,4,28,36,7,2,27,28,53,11,21,11,18,14,17,111,72,955,52,76,44,33,24,27,35,42,34,4,0,13,47,15,3,22,0,38,17,2,24,133,46,39,7,3,1,3,21,2,6,2,1,2,4,4,0,32,4,287,47,21,1,2,0,185,46,82,47,21,0,60,42,502,63,32,0,449,56,1288,920,104,110,2962,1070,13266,568,8,30,114,29,19,47,17,3,32,20,6,18,881,68,12,0,67,12,16481,1,3071,106,6,12,4,8,8,9,5991,84,2,70,2,1,3,0,3,1,3,3,2,11,2,0,2,6,2,64,2,3,3,7,2,6,2,27,2,3,2,4,2,0,4,6,2,339,3,24,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,7,4149,196,1340,3,2,26,2,1,2,0,3,0,2,9,2,3,2,0,2,0,7,0,5,0,2,0,2,0,2,2,2,1,2,0,3,0,2,0,2,0,2,0,2,0,2,1,2,0,3,3,2,6,2,3,2,3,2,0,2,9,2,16,6,2,2,4,2,16,4421,42710,42,4148,12,221,16355,541]
|
||||||
|
var astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,166,1,1306,2,54,14,32,9,16,3,46,10,54,9,7,2,37,13,2,9,52,0,13,2,49,13,16,9,83,11,168,11,6,9,8,2,57,0,2,6,3,1,3,2,10,0,11,1,3,6,4,4,316,19,13,9,214,6,3,8,112,16,16,9,82,12,9,9,535,9,20855,9,135,4,60,6,26,9,1016,45,17,3,19723,1,5319,4,4,5,9,7,3,6,31,3,149,2,1418,49,4305,6,792618,239]
|
||||||
|
|
||||||
|
// This has a complexity linear to the value of the code. The
|
||||||
|
// assumption is that looking up astral identifier characters is
|
||||||
|
// rare.
|
||||||
|
function isInAstralSet(code, set) {
|
||||||
|
let pos = 0x10000
|
||||||
|
for (let i = 0; i < set.length; i += 2) {
|
||||||
|
pos += set[i]
|
||||||
|
if (pos > code) return false
|
||||||
|
pos += set[i + 1]
|
||||||
|
if (pos >= code) return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test whether a given character code starts an identifier.
|
||||||
|
|
||||||
|
export function isIdentifierStart(code, astral) {
|
||||||
|
if (code < 65) return code === 36
|
||||||
|
if (code < 91) return true
|
||||||
|
if (code < 97) return code === 95
|
||||||
|
if (code < 123) return true
|
||||||
|
if (code <= 0xffff) return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code))
|
||||||
|
if (astral === false) return false
|
||||||
|
return isInAstralSet(code, astralIdentifierStartCodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test whether a given character is part of an identifier.
|
||||||
|
|
||||||
|
export function isIdentifierChar(code, astral) {
|
||||||
|
if (code < 48) return code === 36
|
||||||
|
if (code < 58) return true
|
||||||
|
if (code < 65) return false
|
||||||
|
if (code < 91) return true
|
||||||
|
if (code < 97) return code === 95
|
||||||
|
if (code < 123) return true
|
||||||
|
if (code <= 0xffff) return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code))
|
||||||
|
if (astral === false) return false
|
||||||
|
return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes)
|
||||||
|
}
|
|
@ -0,0 +1,67 @@
|
||||||
|
// Acorn is a tiny, fast JavaScript parser written in JavaScript.
|
||||||
|
//
|
||||||
|
// Acorn was written by Marijn Haverbeke, Ingvar Stepanyan, and
|
||||||
|
// various contributors and released under an MIT license.
|
||||||
|
//
|
||||||
|
// Git repositories for Acorn are available at
|
||||||
|
//
|
||||||
|
// http://marijnhaverbeke.nl/git/acorn
|
||||||
|
// https://github.com/ternjs/acorn.git
|
||||||
|
//
|
||||||
|
// Please use the [github bug tracker][ghbt] to report issues.
|
||||||
|
//
|
||||||
|
// [ghbt]: https://github.com/ternjs/acorn/issues
|
||||||
|
//
|
||||||
|
// This file defines the main parser interface. The library also comes
|
||||||
|
// with a [error-tolerant parser][dammit] and an
|
||||||
|
// [abstract syntax tree walker][walk], defined in other files.
|
||||||
|
//
|
||||||
|
// [dammit]: acorn_loose.js
|
||||||
|
// [walk]: util/walk.js
|
||||||
|
|
||||||
|
import {Parser} from "./state"
|
||||||
|
import "./parseutil"
|
||||||
|
import "./statement"
|
||||||
|
import "./lval"
|
||||||
|
import "./expression"
|
||||||
|
import "./location"
|
||||||
|
|
||||||
|
export {Parser, plugins} from "./state"
|
||||||
|
export {defaultOptions} from "./options"
|
||||||
|
export {Position, SourceLocation, getLineInfo} from "./locutil"
|
||||||
|
export {Node} from "./node"
|
||||||
|
export {TokenType, types as tokTypes} from "./tokentype"
|
||||||
|
export {TokContext, types as tokContexts} from "./tokencontext"
|
||||||
|
export {isIdentifierChar, isIdentifierStart} from "./identifier"
|
||||||
|
export {Token} from "./tokenize"
|
||||||
|
export {isNewLine, lineBreak, lineBreakG} from "./whitespace"
|
||||||
|
|
||||||
|
export const version = "2.7.0"
|
||||||
|
|
||||||
|
// The main exported interface (under `self.acorn` when in the
|
||||||
|
// browser) is a `parse` function that takes a code string and
|
||||||
|
// returns an abstract syntax tree as specified by [Mozilla parser
|
||||||
|
// API][api].
|
||||||
|
//
|
||||||
|
// [api]: https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API
|
||||||
|
|
||||||
|
export function parse(input, options) {
|
||||||
|
return new Parser(options, input).parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function tries to parse a single expression at a given
|
||||||
|
// offset in a string. Useful for parsing mixed-language formats
|
||||||
|
// that embed JavaScript expressions.
|
||||||
|
|
||||||
|
export function parseExpressionAt(input, pos, options) {
|
||||||
|
let p = new Parser(options, input, pos)
|
||||||
|
p.nextToken()
|
||||||
|
return p.parseExpression()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Acorn is organized as a tokenizer and a recursive-descent parser.
|
||||||
|
// The `tokenizer` export provides an interface to the tokenizer.
|
||||||
|
|
||||||
|
export function tokenizer(input, options) {
|
||||||
|
return new Parser(options, input)
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
import {Parser} from "./state"
|
||||||
|
import {Position, getLineInfo} from "./locutil"
|
||||||
|
|
||||||
|
const pp = Parser.prototype
|
||||||
|
|
||||||
|
// This function is used to raise exceptions on parse errors. It
|
||||||
|
// takes an offset integer (into the current `input`) to indicate
|
||||||
|
// the location of the error, attaches the position to the end
|
||||||
|
// of the error message, and then raises a `SyntaxError` with that
|
||||||
|
// message.
|
||||||
|
|
||||||
|
pp.raise = function(pos, message) {
|
||||||
|
let loc = getLineInfo(this.input, pos)
|
||||||
|
message += " (" + loc.line + ":" + loc.column + ")"
|
||||||
|
let err = new SyntaxError(message)
|
||||||
|
err.pos = pos; err.loc = loc; err.raisedAt = this.pos
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.curPosition = function() {
|
||||||
|
if (this.options.locations) {
|
||||||
|
return new Position(this.curLine, this.pos - this.lineStart)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
import {lineBreakG} from "./whitespace"
|
||||||
|
|
||||||
|
// These are used when `options.locations` is on, for the
|
||||||
|
// `startLoc` and `endLoc` properties.
|
||||||
|
|
||||||
|
export class Position {
|
||||||
|
constructor(line, col) {
|
||||||
|
this.line = line
|
||||||
|
this.column = col
|
||||||
|
}
|
||||||
|
|
||||||
|
offset(n) {
|
||||||
|
return new Position(this.line, this.column + n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SourceLocation {
|
||||||
|
constructor(p, start, end) {
|
||||||
|
this.start = start
|
||||||
|
this.end = end
|
||||||
|
if (p.sourceFile !== null) this.source = p.sourceFile
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The `getLineInfo` function is mostly useful when the
|
||||||
|
// `locations` option is off (for performance reasons) and you
|
||||||
|
// want to find the line/column position for a given character
|
||||||
|
// offset. `input` should be the code string that the offset refers
|
||||||
|
// into.
|
||||||
|
|
||||||
|
export function getLineInfo(input, offset) {
|
||||||
|
for (let line = 1, cur = 0;;) {
|
||||||
|
lineBreakG.lastIndex = cur
|
||||||
|
let match = lineBreakG.exec(input)
|
||||||
|
if (match && match.index < offset) {
|
||||||
|
++line
|
||||||
|
cur = match.index + match[0].length
|
||||||
|
} else {
|
||||||
|
return new Position(line, offset - cur)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,501 @@
|
||||||
|
import {LooseParser} from "./state"
|
||||||
|
import {isDummy} from "./parseutil"
|
||||||
|
import {tokTypes as tt} from ".."
|
||||||
|
|
||||||
|
const lp = LooseParser.prototype
|
||||||
|
|
||||||
|
lp.checkLVal = function(expr) {
|
||||||
|
if (!expr) return expr
|
||||||
|
switch (expr.type) {
|
||||||
|
case "Identifier":
|
||||||
|
case "MemberExpression":
|
||||||
|
return expr
|
||||||
|
|
||||||
|
case "ParenthesizedExpression":
|
||||||
|
expr.expression = this.checkLVal(expr.expression)
|
||||||
|
return expr
|
||||||
|
|
||||||
|
default:
|
||||||
|
return this.dummyIdent()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseExpression = function(noIn) {
|
||||||
|
let start = this.storeCurrentPos()
|
||||||
|
let expr = this.parseMaybeAssign(noIn)
|
||||||
|
if (this.tok.type === tt.comma) {
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.expressions = [expr]
|
||||||
|
while (this.eat(tt.comma)) node.expressions.push(this.parseMaybeAssign(noIn))
|
||||||
|
return this.finishNode(node, "SequenceExpression")
|
||||||
|
}
|
||||||
|
return expr
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseParenExpression = function() {
|
||||||
|
this.pushCx()
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
let val = this.parseExpression()
|
||||||
|
this.popCx()
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseMaybeAssign = function(noIn) {
|
||||||
|
let start = this.storeCurrentPos()
|
||||||
|
let left = this.parseMaybeConditional(noIn)
|
||||||
|
if (this.tok.type.isAssign) {
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.operator = this.tok.value
|
||||||
|
node.left = this.tok.type === tt.eq ? this.toAssignable(left) : this.checkLVal(left)
|
||||||
|
this.next()
|
||||||
|
node.right = this.parseMaybeAssign(noIn)
|
||||||
|
return this.finishNode(node, "AssignmentExpression")
|
||||||
|
}
|
||||||
|
return left
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseMaybeConditional = function(noIn) {
|
||||||
|
let start = this.storeCurrentPos()
|
||||||
|
let expr = this.parseExprOps(noIn)
|
||||||
|
if (this.eat(tt.question)) {
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.test = expr
|
||||||
|
node.consequent = this.parseMaybeAssign()
|
||||||
|
node.alternate = this.expect(tt.colon) ? this.parseMaybeAssign(noIn) : this.dummyIdent()
|
||||||
|
return this.finishNode(node, "ConditionalExpression")
|
||||||
|
}
|
||||||
|
return expr
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseExprOps = function(noIn) {
|
||||||
|
let start = this.storeCurrentPos()
|
||||||
|
let indent = this.curIndent, line = this.curLineStart
|
||||||
|
return this.parseExprOp(this.parseMaybeUnary(noIn), start, -1, noIn, indent, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseExprOp = function(left, start, minPrec, noIn, indent, line) {
|
||||||
|
if (this.curLineStart != line && this.curIndent < indent && this.tokenStartsLine()) return left
|
||||||
|
let prec = this.tok.type.binop
|
||||||
|
if (prec != null && (!noIn || this.tok.type !== tt._in)) {
|
||||||
|
if (prec > minPrec) {
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.left = left
|
||||||
|
node.operator = this.tok.value
|
||||||
|
this.next()
|
||||||
|
if (this.curLineStart != line && this.curIndent < indent && this.tokenStartsLine()) {
|
||||||
|
node.right = this.dummyIdent()
|
||||||
|
} else {
|
||||||
|
let rightStart = this.storeCurrentPos()
|
||||||
|
node.right = this.parseExprOp(this.parseMaybeUnary(noIn), rightStart, prec, noIn, indent, line)
|
||||||
|
}
|
||||||
|
this.finishNode(node, /&&|\|\|/.test(node.operator) ? "LogicalExpression" : "BinaryExpression")
|
||||||
|
return this.parseExprOp(node, start, minPrec, noIn, indent, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return left
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseMaybeUnary = function(noIn) {
|
||||||
|
if (this.tok.type.prefix) {
|
||||||
|
let node = this.startNode(), update = this.tok.type === tt.incDec
|
||||||
|
node.operator = this.tok.value
|
||||||
|
node.prefix = true
|
||||||
|
this.next()
|
||||||
|
node.argument = this.parseMaybeUnary(noIn)
|
||||||
|
if (update) node.argument = this.checkLVal(node.argument)
|
||||||
|
return this.finishNode(node, update ? "UpdateExpression" : "UnaryExpression")
|
||||||
|
} else if (this.tok.type === tt.ellipsis) {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
node.argument = this.parseMaybeUnary(noIn)
|
||||||
|
return this.finishNode(node, "SpreadElement")
|
||||||
|
}
|
||||||
|
let start = this.storeCurrentPos()
|
||||||
|
let expr = this.parseExprSubscripts()
|
||||||
|
while (this.tok.type.postfix && !this.canInsertSemicolon()) {
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.operator = this.tok.value
|
||||||
|
node.prefix = false
|
||||||
|
node.argument = this.checkLVal(expr)
|
||||||
|
this.next()
|
||||||
|
expr = this.finishNode(node, "UpdateExpression")
|
||||||
|
}
|
||||||
|
return expr
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseExprSubscripts = function() {
|
||||||
|
let start = this.storeCurrentPos()
|
||||||
|
return this.parseSubscripts(this.parseExprAtom(), start, false, this.curIndent, this.curLineStart)
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseSubscripts = function(base, start, noCalls, startIndent, line) {
|
||||||
|
for (;;) {
|
||||||
|
if (this.curLineStart != line && this.curIndent <= startIndent && this.tokenStartsLine()) {
|
||||||
|
if (this.tok.type == tt.dot && this.curIndent == startIndent)
|
||||||
|
--startIndent
|
||||||
|
else
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.eat(tt.dot)) {
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.object = base
|
||||||
|
if (this.curLineStart != line && this.curIndent <= startIndent && this.tokenStartsLine())
|
||||||
|
node.property = this.dummyIdent()
|
||||||
|
else
|
||||||
|
node.property = this.parsePropertyAccessor() || this.dummyIdent()
|
||||||
|
node.computed = false
|
||||||
|
base = this.finishNode(node, "MemberExpression")
|
||||||
|
} else if (this.tok.type == tt.bracketL) {
|
||||||
|
this.pushCx()
|
||||||
|
this.next()
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.object = base
|
||||||
|
node.property = this.parseExpression()
|
||||||
|
node.computed = true
|
||||||
|
this.popCx()
|
||||||
|
this.expect(tt.bracketR)
|
||||||
|
base = this.finishNode(node, "MemberExpression")
|
||||||
|
} else if (!noCalls && this.tok.type == tt.parenL) {
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.callee = base
|
||||||
|
node.arguments = this.parseExprList(tt.parenR)
|
||||||
|
base = this.finishNode(node, "CallExpression")
|
||||||
|
} else if (this.tok.type == tt.backQuote) {
|
||||||
|
let node = this.startNodeAt(start)
|
||||||
|
node.tag = base
|
||||||
|
node.quasi = this.parseTemplate()
|
||||||
|
base = this.finishNode(node, "TaggedTemplateExpression")
|
||||||
|
} else {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseExprAtom = function() {
|
||||||
|
let node
|
||||||
|
switch (this.tok.type) {
|
||||||
|
case tt._this:
|
||||||
|
case tt._super:
|
||||||
|
let type = this.tok.type === tt._this ? "ThisExpression" : "Super"
|
||||||
|
node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, type)
|
||||||
|
|
||||||
|
case tt.name:
|
||||||
|
let start = this.storeCurrentPos()
|
||||||
|
let id = this.parseIdent()
|
||||||
|
return this.eat(tt.arrow) ? this.parseArrowExpression(this.startNodeAt(start), [id]) : id
|
||||||
|
|
||||||
|
case tt.regexp:
|
||||||
|
node = this.startNode()
|
||||||
|
let val = this.tok.value
|
||||||
|
node.regex = {pattern: val.pattern, flags: val.flags}
|
||||||
|
node.value = val.value
|
||||||
|
node.raw = this.input.slice(this.tok.start, this.tok.end)
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "Literal")
|
||||||
|
|
||||||
|
case tt.num: case tt.string:
|
||||||
|
node = this.startNode()
|
||||||
|
node.value = this.tok.value
|
||||||
|
node.raw = this.input.slice(this.tok.start, this.tok.end)
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "Literal")
|
||||||
|
|
||||||
|
case tt._null: case tt._true: case tt._false:
|
||||||
|
node = this.startNode()
|
||||||
|
node.value = this.tok.type === tt._null ? null : this.tok.type === tt._true
|
||||||
|
node.raw = this.tok.type.keyword
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "Literal")
|
||||||
|
|
||||||
|
case tt.parenL:
|
||||||
|
let parenStart = this.storeCurrentPos()
|
||||||
|
this.next()
|
||||||
|
let inner = this.parseExpression()
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
if (this.eat(tt.arrow)) {
|
||||||
|
return this.parseArrowExpression(this.startNodeAt(parenStart), inner.expressions || (isDummy(inner) ? [] : [inner]))
|
||||||
|
}
|
||||||
|
if (this.options.preserveParens) {
|
||||||
|
let par = this.startNodeAt(parenStart)
|
||||||
|
par.expression = inner
|
||||||
|
inner = this.finishNode(par, "ParenthesizedExpression")
|
||||||
|
}
|
||||||
|
return inner
|
||||||
|
|
||||||
|
case tt.bracketL:
|
||||||
|
node = this.startNode()
|
||||||
|
node.elements = this.parseExprList(tt.bracketR, true)
|
||||||
|
return this.finishNode(node, "ArrayExpression")
|
||||||
|
|
||||||
|
case tt.braceL:
|
||||||
|
return this.parseObj()
|
||||||
|
|
||||||
|
case tt._class:
|
||||||
|
return this.parseClass()
|
||||||
|
|
||||||
|
case tt._function:
|
||||||
|
node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
return this.parseFunction(node, false)
|
||||||
|
|
||||||
|
case tt._new:
|
||||||
|
return this.parseNew()
|
||||||
|
|
||||||
|
case tt._yield:
|
||||||
|
node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
if (this.semicolon() || this.canInsertSemicolon() || (this.tok.type != tt.star && !this.tok.type.startsExpr)) {
|
||||||
|
node.delegate = false
|
||||||
|
node.argument = null
|
||||||
|
} else {
|
||||||
|
node.delegate = this.eat(tt.star)
|
||||||
|
node.argument = this.parseMaybeAssign()
|
||||||
|
}
|
||||||
|
return this.finishNode(node, "YieldExpression")
|
||||||
|
|
||||||
|
case tt.backQuote:
|
||||||
|
return this.parseTemplate()
|
||||||
|
|
||||||
|
default:
|
||||||
|
return this.dummyIdent()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseNew = function() {
|
||||||
|
let node = this.startNode(), startIndent = this.curIndent, line = this.curLineStart
|
||||||
|
let meta = this.parseIdent(true)
|
||||||
|
if (this.options.ecmaVersion >= 6 && this.eat(tt.dot)) {
|
||||||
|
node.meta = meta
|
||||||
|
node.property = this.parseIdent(true)
|
||||||
|
return this.finishNode(node, "MetaProperty")
|
||||||
|
}
|
||||||
|
let start = this.storeCurrentPos()
|
||||||
|
node.callee = this.parseSubscripts(this.parseExprAtom(), start, true, startIndent, line)
|
||||||
|
if (this.tok.type == tt.parenL) {
|
||||||
|
node.arguments = this.parseExprList(tt.parenR)
|
||||||
|
} else {
|
||||||
|
node.arguments = []
|
||||||
|
}
|
||||||
|
return this.finishNode(node, "NewExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseTemplateElement = function() {
|
||||||
|
let elem = this.startNode()
|
||||||
|
elem.value = {
|
||||||
|
raw: this.input.slice(this.tok.start, this.tok.end).replace(/\r\n?/g, '\n'),
|
||||||
|
cooked: this.tok.value
|
||||||
|
}
|
||||||
|
this.next()
|
||||||
|
elem.tail = this.tok.type === tt.backQuote
|
||||||
|
return this.finishNode(elem, "TemplateElement")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseTemplate = function() {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
node.expressions = []
|
||||||
|
let curElt = this.parseTemplateElement()
|
||||||
|
node.quasis = [curElt]
|
||||||
|
while (!curElt.tail) {
|
||||||
|
this.next()
|
||||||
|
node.expressions.push(this.parseExpression())
|
||||||
|
if (this.expect(tt.braceR)) {
|
||||||
|
curElt = this.parseTemplateElement()
|
||||||
|
} else {
|
||||||
|
curElt = this.startNode()
|
||||||
|
curElt.value = {cooked: '', raw: ''}
|
||||||
|
curElt.tail = true
|
||||||
|
}
|
||||||
|
node.quasis.push(curElt)
|
||||||
|
}
|
||||||
|
this.expect(tt.backQuote)
|
||||||
|
return this.finishNode(node, "TemplateLiteral")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseObj = function() {
|
||||||
|
let node = this.startNode()
|
||||||
|
node.properties = []
|
||||||
|
this.pushCx()
|
||||||
|
let indent = this.curIndent + 1, line = this.curLineStart
|
||||||
|
this.eat(tt.braceL)
|
||||||
|
if (this.curIndent + 1 < indent) { indent = this.curIndent; line = this.curLineStart }
|
||||||
|
while (!this.closes(tt.braceR, indent, line)) {
|
||||||
|
let prop = this.startNode(), isGenerator, start
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
start = this.storeCurrentPos()
|
||||||
|
prop.method = false
|
||||||
|
prop.shorthand = false
|
||||||
|
isGenerator = this.eat(tt.star)
|
||||||
|
}
|
||||||
|
this.parsePropertyName(prop)
|
||||||
|
if (isDummy(prop.key)) { if (isDummy(this.parseMaybeAssign())) this.next(); this.eat(tt.comma); continue }
|
||||||
|
if (this.eat(tt.colon)) {
|
||||||
|
prop.kind = "init"
|
||||||
|
prop.value = this.parseMaybeAssign()
|
||||||
|
} else if (this.options.ecmaVersion >= 6 && (this.tok.type === tt.parenL || this.tok.type === tt.braceL)) {
|
||||||
|
prop.kind = "init"
|
||||||
|
prop.method = true
|
||||||
|
prop.value = this.parseMethod(isGenerator)
|
||||||
|
} else if (this.options.ecmaVersion >= 5 && prop.key.type === "Identifier" &&
|
||||||
|
!prop.computed && (prop.key.name === "get" || prop.key.name === "set") &&
|
||||||
|
(this.tok.type != tt.comma && this.tok.type != tt.braceR)) {
|
||||||
|
prop.kind = prop.key.name
|
||||||
|
this.parsePropertyName(prop)
|
||||||
|
prop.value = this.parseMethod(false)
|
||||||
|
} else {
|
||||||
|
prop.kind = "init"
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
if (this.eat(tt.eq)) {
|
||||||
|
let assign = this.startNodeAt(start)
|
||||||
|
assign.operator = "="
|
||||||
|
assign.left = prop.key
|
||||||
|
assign.right = this.parseMaybeAssign()
|
||||||
|
prop.value = this.finishNode(assign, "AssignmentExpression")
|
||||||
|
} else {
|
||||||
|
prop.value = prop.key
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
prop.value = this.dummyIdent()
|
||||||
|
}
|
||||||
|
prop.shorthand = true
|
||||||
|
}
|
||||||
|
node.properties.push(this.finishNode(prop, "Property"))
|
||||||
|
this.eat(tt.comma)
|
||||||
|
}
|
||||||
|
this.popCx()
|
||||||
|
if (!this.eat(tt.braceR)) {
|
||||||
|
// If there is no closing brace, make the node span to the start
|
||||||
|
// of the next token (this is useful for Tern)
|
||||||
|
this.last.end = this.tok.start
|
||||||
|
if (this.options.locations) this.last.loc.end = this.tok.loc.start
|
||||||
|
}
|
||||||
|
return this.finishNode(node, "ObjectExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parsePropertyName = function(prop) {
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
if (this.eat(tt.bracketL)) {
|
||||||
|
prop.computed = true
|
||||||
|
prop.key = this.parseExpression()
|
||||||
|
this.expect(tt.bracketR)
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
prop.computed = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let key = (this.tok.type === tt.num || this.tok.type === tt.string) ? this.parseExprAtom() : this.parseIdent()
|
||||||
|
prop.key = key || this.dummyIdent()
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parsePropertyAccessor = function() {
|
||||||
|
if (this.tok.type === tt.name || this.tok.type.keyword) return this.parseIdent()
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseIdent = function() {
|
||||||
|
let name = this.tok.type === tt.name ? this.tok.value : this.tok.type.keyword
|
||||||
|
if (!name) return this.dummyIdent()
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
node.name = name
|
||||||
|
return this.finishNode(node, "Identifier")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.initFunction = function(node) {
|
||||||
|
node.id = null
|
||||||
|
node.params = []
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
node.generator = false
|
||||||
|
node.expression = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert existing expression atom to assignable pattern
|
||||||
|
// if possible.
|
||||||
|
|
||||||
|
lp.toAssignable = function(node, binding) {
|
||||||
|
if (!node || node.type == "Identifier" || (node.type == "MemberExpression" && !binding)) {
|
||||||
|
// Okay
|
||||||
|
} else if (node.type == "ParenthesizedExpression") {
|
||||||
|
node.expression = this.toAssignable(node.expression, binding)
|
||||||
|
} else if (this.options.ecmaVersion < 6) {
|
||||||
|
return this.dummyIdent()
|
||||||
|
} else if (node.type == "ObjectExpression") {
|
||||||
|
node.type = "ObjectPattern"
|
||||||
|
let props = node.properties
|
||||||
|
for (let i = 0; i < props.length; i++)
|
||||||
|
props[i].value = this.toAssignable(props[i].value, binding)
|
||||||
|
} else if (node.type == "ArrayExpression") {
|
||||||
|
node.type = "ArrayPattern"
|
||||||
|
this.toAssignableList(node.elements, binding)
|
||||||
|
} else if (node.type == "SpreadElement") {
|
||||||
|
node.type = "RestElement"
|
||||||
|
node.argument = this.toAssignable(node.argument, binding)
|
||||||
|
} else if (node.type == "AssignmentExpression") {
|
||||||
|
node.type = "AssignmentPattern"
|
||||||
|
delete node.operator
|
||||||
|
} else {
|
||||||
|
return this.dummyIdent()
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.toAssignableList = function(exprList, binding) {
|
||||||
|
for (let i = 0; i < exprList.length; i++)
|
||||||
|
exprList[i] = this.toAssignable(exprList[i], binding)
|
||||||
|
return exprList
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseFunctionParams = function(params) {
|
||||||
|
params = this.parseExprList(tt.parenR)
|
||||||
|
return this.toAssignableList(params, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseMethod = function(isGenerator) {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.initFunction(node)
|
||||||
|
node.params = this.parseFunctionParams()
|
||||||
|
node.generator = isGenerator || false
|
||||||
|
node.expression = this.options.ecmaVersion >= 6 && this.tok.type !== tt.braceL
|
||||||
|
node.body = node.expression ? this.parseMaybeAssign() : this.parseBlock()
|
||||||
|
return this.finishNode(node, "FunctionExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseArrowExpression = function(node, params) {
|
||||||
|
this.initFunction(node)
|
||||||
|
node.params = this.toAssignableList(params, true)
|
||||||
|
node.expression = this.tok.type !== tt.braceL
|
||||||
|
node.body = node.expression ? this.parseMaybeAssign() : this.parseBlock()
|
||||||
|
return this.finishNode(node, "ArrowFunctionExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseExprList = function(close, allowEmpty) {
|
||||||
|
this.pushCx()
|
||||||
|
let indent = this.curIndent, line = this.curLineStart, elts = []
|
||||||
|
this.next(); // Opening bracket
|
||||||
|
while (!this.closes(close, indent + 1, line)) {
|
||||||
|
if (this.eat(tt.comma)) {
|
||||||
|
elts.push(allowEmpty ? null : this.dummyIdent())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
let elt = this.parseMaybeAssign()
|
||||||
|
if (isDummy(elt)) {
|
||||||
|
if (this.closes(close, indent, line)) break
|
||||||
|
this.next()
|
||||||
|
} else {
|
||||||
|
elts.push(elt)
|
||||||
|
}
|
||||||
|
this.eat(tt.comma)
|
||||||
|
}
|
||||||
|
this.popCx()
|
||||||
|
if (!this.eat(close)) {
|
||||||
|
// If there is no closing brace, make the node span to the start
|
||||||
|
// of the next token (this is useful for Tern)
|
||||||
|
this.last.end = this.tok.start
|
||||||
|
if (this.options.locations) this.last.loc.end = this.tok.loc.start
|
||||||
|
}
|
||||||
|
return elts
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
// Acorn: Loose parser
|
||||||
|
//
|
||||||
|
// This module provides an alternative parser (`parse_dammit`) that
|
||||||
|
// exposes that same interface as `parse`, but will try to parse
|
||||||
|
// anything as JavaScript, repairing syntax error the best it can.
|
||||||
|
// There are circumstances in which it will raise an error and give
|
||||||
|
// up, but they are very rare. The resulting AST will be a mostly
|
||||||
|
// valid JavaScript AST (as per the [Mozilla parser API][api], except
|
||||||
|
// that:
|
||||||
|
//
|
||||||
|
// - Return outside functions is allowed
|
||||||
|
//
|
||||||
|
// - Label consistency (no conflicts, break only to existing labels)
|
||||||
|
// is not enforced.
|
||||||
|
//
|
||||||
|
// - Bogus Identifier nodes with a name of `"✖"` are inserted whenever
|
||||||
|
// the parser got too confused to return anything meaningful.
|
||||||
|
//
|
||||||
|
// [api]: https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API
|
||||||
|
//
|
||||||
|
// The expected use for this is to *first* try `acorn.parse`, and only
|
||||||
|
// if that fails switch to `parse_dammit`. The loose parser might
|
||||||
|
// parse badly indented code incorrectly, so **don't** use it as
|
||||||
|
// your default parser.
|
||||||
|
//
|
||||||
|
// Quite a lot of acorn.js is duplicated here. The alternative was to
|
||||||
|
// add a *lot* of extra cruft to that file, making it less readable
|
||||||
|
// and slower. Copying and editing the code allowed me to make
|
||||||
|
// invasive changes and simplifications without creating a complicated
|
||||||
|
// tangle.
|
||||||
|
|
||||||
|
import * as acorn from ".."
|
||||||
|
import {LooseParser, pluginsLoose} from "./state"
|
||||||
|
import "./tokenize"
|
||||||
|
import "./statement"
|
||||||
|
import "./expression"
|
||||||
|
|
||||||
|
export {LooseParser, pluginsLoose} from "./state"
|
||||||
|
|
||||||
|
acorn.defaultOptions.tabSize = 4
|
||||||
|
|
||||||
|
export function parse_dammit(input, options) {
|
||||||
|
let p = new LooseParser(input, options)
|
||||||
|
p.next()
|
||||||
|
return p.parseTopLevel()
|
||||||
|
}
|
||||||
|
|
||||||
|
acorn.parse_dammit = parse_dammit
|
||||||
|
acorn.LooseParser = LooseParser
|
||||||
|
acorn.pluginsLoose = pluginsLoose
|
|
@ -0,0 +1 @@
|
||||||
|
export function isDummy(node) { return node.name == "✖" }
|
|
@ -0,0 +1,160 @@
|
||||||
|
import {tokenizer, SourceLocation, tokTypes as tt, Node, lineBreak, isNewLine} from ".."
|
||||||
|
|
||||||
|
// Registered plugins
|
||||||
|
export const pluginsLoose = {}
|
||||||
|
|
||||||
|
export class LooseParser {
|
||||||
|
constructor(input, options) {
|
||||||
|
this.toks = tokenizer(input, options)
|
||||||
|
this.options = this.toks.options
|
||||||
|
this.input = this.toks.input
|
||||||
|
this.tok = this.last = {type: tt.eof, start: 0, end: 0}
|
||||||
|
if (this.options.locations) {
|
||||||
|
let here = this.toks.curPosition()
|
||||||
|
this.tok.loc = new SourceLocation(this.toks, here, here)
|
||||||
|
}
|
||||||
|
this.ahead = []; // Tokens ahead
|
||||||
|
this.context = []; // Indentation contexted
|
||||||
|
this.curIndent = 0
|
||||||
|
this.curLineStart = 0
|
||||||
|
this.nextLineStart = this.lineEnd(this.curLineStart) + 1
|
||||||
|
// Load plugins
|
||||||
|
this.options.pluginsLoose = options.pluginsLoose || {}
|
||||||
|
this.loadPlugins(this.options.pluginsLoose)
|
||||||
|
}
|
||||||
|
|
||||||
|
startNode() {
|
||||||
|
return new Node(this.toks, this.tok.start, this.options.locations ? this.tok.loc.start : null)
|
||||||
|
}
|
||||||
|
|
||||||
|
storeCurrentPos() {
|
||||||
|
return this.options.locations ? [this.tok.start, this.tok.loc.start] : this.tok.start
|
||||||
|
}
|
||||||
|
|
||||||
|
startNodeAt(pos) {
|
||||||
|
if (this.options.locations) {
|
||||||
|
return new Node(this.toks, pos[0], pos[1])
|
||||||
|
} else {
|
||||||
|
return new Node(this.toks, pos)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
finishNode(node, type) {
|
||||||
|
node.type = type
|
||||||
|
node.end = this.last.end
|
||||||
|
if (this.options.locations)
|
||||||
|
node.loc.end = this.last.loc.end
|
||||||
|
if (this.options.ranges)
|
||||||
|
node.range[1] = this.last.end
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
dummyNode(type) {
|
||||||
|
let dummy = this.startNode()
|
||||||
|
dummy.type = type
|
||||||
|
dummy.end = dummy.start
|
||||||
|
if (this.options.locations)
|
||||||
|
dummy.loc.end = dummy.loc.start
|
||||||
|
if (this.options.ranges)
|
||||||
|
dummy.range[1] = dummy.start
|
||||||
|
this.last = {type: tt.name, start: dummy.start, end: dummy.start, loc: dummy.loc}
|
||||||
|
return dummy
|
||||||
|
}
|
||||||
|
|
||||||
|
dummyIdent() {
|
||||||
|
let dummy = this.dummyNode("Identifier")
|
||||||
|
dummy.name = "✖"
|
||||||
|
return dummy
|
||||||
|
}
|
||||||
|
|
||||||
|
dummyString() {
|
||||||
|
let dummy = this.dummyNode("Literal")
|
||||||
|
dummy.value = dummy.raw = "✖"
|
||||||
|
return dummy
|
||||||
|
}
|
||||||
|
|
||||||
|
eat(type) {
|
||||||
|
if (this.tok.type === type) {
|
||||||
|
this.next()
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
isContextual(name) {
|
||||||
|
return this.tok.type === tt.name && this.tok.value === name
|
||||||
|
}
|
||||||
|
|
||||||
|
eatContextual(name) {
|
||||||
|
return this.tok.value === name && this.eat(tt.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
canInsertSemicolon() {
|
||||||
|
return this.tok.type === tt.eof || this.tok.type === tt.braceR ||
|
||||||
|
lineBreak.test(this.input.slice(this.last.end, this.tok.start))
|
||||||
|
}
|
||||||
|
|
||||||
|
semicolon() {
|
||||||
|
return this.eat(tt.semi)
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(type) {
|
||||||
|
if (this.eat(type)) return true
|
||||||
|
for (let i = 1; i <= 2; i++) {
|
||||||
|
if (this.lookAhead(i).type == type) {
|
||||||
|
for (let j = 0; j < i; j++) this.next()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pushCx() {
|
||||||
|
this.context.push(this.curIndent)
|
||||||
|
}
|
||||||
|
|
||||||
|
popCx() {
|
||||||
|
this.curIndent = this.context.pop()
|
||||||
|
}
|
||||||
|
|
||||||
|
lineEnd(pos) {
|
||||||
|
while (pos < this.input.length && !isNewLine(this.input.charCodeAt(pos))) ++pos
|
||||||
|
return pos
|
||||||
|
}
|
||||||
|
|
||||||
|
indentationAfter(pos) {
|
||||||
|
for (let count = 0;; ++pos) {
|
||||||
|
let ch = this.input.charCodeAt(pos)
|
||||||
|
if (ch === 32) ++count
|
||||||
|
else if (ch === 9) count += this.options.tabSize
|
||||||
|
else return count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
closes(closeTok, indent, line, blockHeuristic) {
|
||||||
|
if (this.tok.type === closeTok || this.tok.type === tt.eof) return true
|
||||||
|
return line != this.curLineStart && this.curIndent < indent && this.tokenStartsLine() &&
|
||||||
|
(!blockHeuristic || this.nextLineStart >= this.input.length ||
|
||||||
|
this.indentationAfter(this.nextLineStart) < indent)
|
||||||
|
}
|
||||||
|
|
||||||
|
tokenStartsLine() {
|
||||||
|
for (let p = this.tok.start - 1; p >= this.curLineStart; --p) {
|
||||||
|
let ch = this.input.charCodeAt(p)
|
||||||
|
if (ch !== 9 && ch !== 32) return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
extend(name, f) {
|
||||||
|
this[name] = f(this[name])
|
||||||
|
}
|
||||||
|
|
||||||
|
loadPlugins(pluginConfigs) {
|
||||||
|
for (let name in pluginConfigs) {
|
||||||
|
let plugin = pluginsLoose[name]
|
||||||
|
if (!plugin) throw new Error("Plugin '" + name + "' not found")
|
||||||
|
plugin(this, pluginConfigs[name])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,420 @@
|
||||||
|
import {LooseParser} from "./state"
|
||||||
|
import {isDummy} from "./parseutil"
|
||||||
|
import {getLineInfo, tokTypes as tt} from ".."
|
||||||
|
|
||||||
|
const lp = LooseParser.prototype
|
||||||
|
|
||||||
|
lp.parseTopLevel = function() {
|
||||||
|
let node = this.startNodeAt(this.options.locations ? [0, getLineInfo(this.input, 0)] : 0)
|
||||||
|
node.body = []
|
||||||
|
while (this.tok.type !== tt.eof) node.body.push(this.parseStatement())
|
||||||
|
this.last = this.tok
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
node.sourceType = this.options.sourceType
|
||||||
|
}
|
||||||
|
return this.finishNode(node, "Program")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseStatement = function() {
|
||||||
|
let starttype = this.tok.type, node = this.startNode()
|
||||||
|
|
||||||
|
switch (starttype) {
|
||||||
|
case tt._break: case tt._continue:
|
||||||
|
this.next()
|
||||||
|
let isBreak = starttype === tt._break
|
||||||
|
if (this.semicolon() || this.canInsertSemicolon()) {
|
||||||
|
node.label = null
|
||||||
|
} else {
|
||||||
|
node.label = this.tok.type === tt.name ? this.parseIdent() : null
|
||||||
|
this.semicolon()
|
||||||
|
}
|
||||||
|
return this.finishNode(node, isBreak ? "BreakStatement" : "ContinueStatement")
|
||||||
|
|
||||||
|
case tt._debugger:
|
||||||
|
this.next()
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "DebuggerStatement")
|
||||||
|
|
||||||
|
case tt._do:
|
||||||
|
this.next()
|
||||||
|
node.body = this.parseStatement()
|
||||||
|
node.test = this.eat(tt._while) ? this.parseParenExpression() : this.dummyIdent()
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "DoWhileStatement")
|
||||||
|
|
||||||
|
case tt._for:
|
||||||
|
this.next()
|
||||||
|
this.pushCx()
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
if (this.tok.type === tt.semi) return this.parseFor(node, null)
|
||||||
|
if (this.tok.type === tt._var || this.tok.type === tt._let || this.tok.type === tt._const) {
|
||||||
|
let init = this.parseVar(true)
|
||||||
|
if (init.declarations.length === 1 && (this.tok.type === tt._in || this.isContextual("of"))) {
|
||||||
|
return this.parseForIn(node, init)
|
||||||
|
}
|
||||||
|
return this.parseFor(node, init)
|
||||||
|
}
|
||||||
|
let init = this.parseExpression(true)
|
||||||
|
if (this.tok.type === tt._in || this.isContextual("of"))
|
||||||
|
return this.parseForIn(node, this.toAssignable(init))
|
||||||
|
return this.parseFor(node, init)
|
||||||
|
|
||||||
|
case tt._function:
|
||||||
|
this.next()
|
||||||
|
return this.parseFunction(node, true)
|
||||||
|
|
||||||
|
case tt._if:
|
||||||
|
this.next()
|
||||||
|
node.test = this.parseParenExpression()
|
||||||
|
node.consequent = this.parseStatement()
|
||||||
|
node.alternate = this.eat(tt._else) ? this.parseStatement() : null
|
||||||
|
return this.finishNode(node, "IfStatement")
|
||||||
|
|
||||||
|
case tt._return:
|
||||||
|
this.next()
|
||||||
|
if (this.eat(tt.semi) || this.canInsertSemicolon()) node.argument = null
|
||||||
|
else { node.argument = this.parseExpression(); this.semicolon() }
|
||||||
|
return this.finishNode(node, "ReturnStatement")
|
||||||
|
|
||||||
|
case tt._switch:
|
||||||
|
let blockIndent = this.curIndent, line = this.curLineStart
|
||||||
|
this.next()
|
||||||
|
node.discriminant = this.parseParenExpression()
|
||||||
|
node.cases = []
|
||||||
|
this.pushCx()
|
||||||
|
this.expect(tt.braceL)
|
||||||
|
|
||||||
|
let cur
|
||||||
|
while (!this.closes(tt.braceR, blockIndent, line, true)) {
|
||||||
|
if (this.tok.type === tt._case || this.tok.type === tt._default) {
|
||||||
|
let isCase = this.tok.type === tt._case
|
||||||
|
if (cur) this.finishNode(cur, "SwitchCase")
|
||||||
|
node.cases.push(cur = this.startNode())
|
||||||
|
cur.consequent = []
|
||||||
|
this.next()
|
||||||
|
if (isCase) cur.test = this.parseExpression()
|
||||||
|
else cur.test = null
|
||||||
|
this.expect(tt.colon)
|
||||||
|
} else {
|
||||||
|
if (!cur) {
|
||||||
|
node.cases.push(cur = this.startNode())
|
||||||
|
cur.consequent = []
|
||||||
|
cur.test = null
|
||||||
|
}
|
||||||
|
cur.consequent.push(this.parseStatement())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (cur) this.finishNode(cur, "SwitchCase")
|
||||||
|
this.popCx()
|
||||||
|
this.eat(tt.braceR)
|
||||||
|
return this.finishNode(node, "SwitchStatement")
|
||||||
|
|
||||||
|
case tt._throw:
|
||||||
|
this.next()
|
||||||
|
node.argument = this.parseExpression()
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "ThrowStatement")
|
||||||
|
|
||||||
|
case tt._try:
|
||||||
|
this.next()
|
||||||
|
node.block = this.parseBlock()
|
||||||
|
node.handler = null
|
||||||
|
if (this.tok.type === tt._catch) {
|
||||||
|
let clause = this.startNode()
|
||||||
|
this.next()
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
clause.param = this.toAssignable(this.parseExprAtom(), true)
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
clause.body = this.parseBlock()
|
||||||
|
node.handler = this.finishNode(clause, "CatchClause")
|
||||||
|
}
|
||||||
|
node.finalizer = this.eat(tt._finally) ? this.parseBlock() : null
|
||||||
|
if (!node.handler && !node.finalizer) return node.block
|
||||||
|
return this.finishNode(node, "TryStatement")
|
||||||
|
|
||||||
|
case tt._var:
|
||||||
|
case tt._let:
|
||||||
|
case tt._const:
|
||||||
|
return this.parseVar()
|
||||||
|
|
||||||
|
case tt._while:
|
||||||
|
this.next()
|
||||||
|
node.test = this.parseParenExpression()
|
||||||
|
node.body = this.parseStatement()
|
||||||
|
return this.finishNode(node, "WhileStatement")
|
||||||
|
|
||||||
|
case tt._with:
|
||||||
|
this.next()
|
||||||
|
node.object = this.parseParenExpression()
|
||||||
|
node.body = this.parseStatement()
|
||||||
|
return this.finishNode(node, "WithStatement")
|
||||||
|
|
||||||
|
case tt.braceL:
|
||||||
|
return this.parseBlock()
|
||||||
|
|
||||||
|
case tt.semi:
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "EmptyStatement")
|
||||||
|
|
||||||
|
case tt._class:
|
||||||
|
return this.parseClass(true)
|
||||||
|
|
||||||
|
case tt._import:
|
||||||
|
return this.parseImport()
|
||||||
|
|
||||||
|
case tt._export:
|
||||||
|
return this.parseExport()
|
||||||
|
|
||||||
|
default:
|
||||||
|
let expr = this.parseExpression()
|
||||||
|
if (isDummy(expr)) {
|
||||||
|
this.next()
|
||||||
|
if (this.tok.type === tt.eof) return this.finishNode(node, "EmptyStatement")
|
||||||
|
return this.parseStatement()
|
||||||
|
} else if (starttype === tt.name && expr.type === "Identifier" && this.eat(tt.colon)) {
|
||||||
|
node.body = this.parseStatement()
|
||||||
|
node.label = expr
|
||||||
|
return this.finishNode(node, "LabeledStatement")
|
||||||
|
} else {
|
||||||
|
node.expression = expr
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "ExpressionStatement")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseBlock = function() {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.pushCx()
|
||||||
|
this.expect(tt.braceL)
|
||||||
|
let blockIndent = this.curIndent, line = this.curLineStart
|
||||||
|
node.body = []
|
||||||
|
while (!this.closes(tt.braceR, blockIndent, line, true))
|
||||||
|
node.body.push(this.parseStatement())
|
||||||
|
this.popCx()
|
||||||
|
this.eat(tt.braceR)
|
||||||
|
return this.finishNode(node, "BlockStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseFor = function(node, init) {
|
||||||
|
node.init = init
|
||||||
|
node.test = node.update = null
|
||||||
|
if (this.eat(tt.semi) && this.tok.type !== tt.semi) node.test = this.parseExpression()
|
||||||
|
if (this.eat(tt.semi) && this.tok.type !== tt.parenR) node.update = this.parseExpression()
|
||||||
|
this.popCx()
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
node.body = this.parseStatement()
|
||||||
|
return this.finishNode(node, "ForStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseForIn = function(node, init) {
|
||||||
|
let type = this.tok.type === tt._in ? "ForInStatement" : "ForOfStatement"
|
||||||
|
this.next()
|
||||||
|
node.left = init
|
||||||
|
node.right = this.parseExpression()
|
||||||
|
this.popCx()
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
node.body = this.parseStatement()
|
||||||
|
return this.finishNode(node, type)
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseVar = function(noIn) {
|
||||||
|
let node = this.startNode()
|
||||||
|
node.kind = this.tok.type.keyword
|
||||||
|
this.next()
|
||||||
|
node.declarations = []
|
||||||
|
do {
|
||||||
|
let decl = this.startNode()
|
||||||
|
decl.id = this.options.ecmaVersion >= 6 ? this.toAssignable(this.parseExprAtom(), true) : this.parseIdent()
|
||||||
|
decl.init = this.eat(tt.eq) ? this.parseMaybeAssign(noIn) : null
|
||||||
|
node.declarations.push(this.finishNode(decl, "VariableDeclarator"))
|
||||||
|
} while (this.eat(tt.comma))
|
||||||
|
if (!node.declarations.length) {
|
||||||
|
let decl = this.startNode()
|
||||||
|
decl.id = this.dummyIdent()
|
||||||
|
node.declarations.push(this.finishNode(decl, "VariableDeclarator"))
|
||||||
|
}
|
||||||
|
if (!noIn) this.semicolon()
|
||||||
|
return this.finishNode(node, "VariableDeclaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseClass = function(isStatement) {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
if (this.tok.type === tt.name) node.id = this.parseIdent()
|
||||||
|
else if (isStatement) node.id = this.dummyIdent()
|
||||||
|
else node.id = null
|
||||||
|
node.superClass = this.eat(tt._extends) ? this.parseExpression() : null
|
||||||
|
node.body = this.startNode()
|
||||||
|
node.body.body = []
|
||||||
|
this.pushCx()
|
||||||
|
let indent = this.curIndent + 1, line = this.curLineStart
|
||||||
|
this.eat(tt.braceL)
|
||||||
|
if (this.curIndent + 1 < indent) { indent = this.curIndent; line = this.curLineStart }
|
||||||
|
while (!this.closes(tt.braceR, indent, line)) {
|
||||||
|
if (this.semicolon()) continue
|
||||||
|
let method = this.startNode(), isGenerator
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
method.static = false
|
||||||
|
isGenerator = this.eat(tt.star)
|
||||||
|
}
|
||||||
|
this.parsePropertyName(method)
|
||||||
|
if (isDummy(method.key)) { if (isDummy(this.parseMaybeAssign())) this.next(); this.eat(tt.comma); continue }
|
||||||
|
if (method.key.type === "Identifier" && !method.computed && method.key.name === "static" &&
|
||||||
|
(this.tok.type != tt.parenL && this.tok.type != tt.braceL)) {
|
||||||
|
method.static = true
|
||||||
|
isGenerator = this.eat(tt.star)
|
||||||
|
this.parsePropertyName(method)
|
||||||
|
} else {
|
||||||
|
method.static = false
|
||||||
|
}
|
||||||
|
if (this.options.ecmaVersion >= 5 && method.key.type === "Identifier" &&
|
||||||
|
!method.computed && (method.key.name === "get" || method.key.name === "set") &&
|
||||||
|
this.tok.type !== tt.parenL && this.tok.type !== tt.braceL) {
|
||||||
|
method.kind = method.key.name
|
||||||
|
this.parsePropertyName(method)
|
||||||
|
method.value = this.parseMethod(false)
|
||||||
|
} else {
|
||||||
|
if (!method.computed && !method.static && !isGenerator && (
|
||||||
|
method.key.type === "Identifier" && method.key.name === "constructor" ||
|
||||||
|
method.key.type === "Literal" && method.key.value === "constructor")) {
|
||||||
|
method.kind = "constructor"
|
||||||
|
} else {
|
||||||
|
method.kind = "method"
|
||||||
|
}
|
||||||
|
method.value = this.parseMethod(isGenerator)
|
||||||
|
}
|
||||||
|
node.body.body.push(this.finishNode(method, "MethodDefinition"))
|
||||||
|
}
|
||||||
|
this.popCx()
|
||||||
|
if (!this.eat(tt.braceR)) {
|
||||||
|
// If there is no closing brace, make the node span to the start
|
||||||
|
// of the next token (this is useful for Tern)
|
||||||
|
this.last.end = this.tok.start
|
||||||
|
if (this.options.locations) this.last.loc.end = this.tok.loc.start
|
||||||
|
}
|
||||||
|
this.semicolon()
|
||||||
|
this.finishNode(node.body, "ClassBody")
|
||||||
|
return this.finishNode(node, isStatement ? "ClassDeclaration" : "ClassExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseFunction = function(node, isStatement) {
|
||||||
|
this.initFunction(node)
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
node.generator = this.eat(tt.star)
|
||||||
|
}
|
||||||
|
if (this.tok.type === tt.name) node.id = this.parseIdent()
|
||||||
|
else if (isStatement) node.id = this.dummyIdent()
|
||||||
|
node.params = this.parseFunctionParams()
|
||||||
|
node.body = this.parseBlock()
|
||||||
|
return this.finishNode(node, isStatement ? "FunctionDeclaration" : "FunctionExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseExport = function() {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
if (this.eat(tt.star)) {
|
||||||
|
node.source = this.eatContextual("from") ? this.parseExprAtom() : null
|
||||||
|
return this.finishNode(node, "ExportAllDeclaration")
|
||||||
|
}
|
||||||
|
if (this.eat(tt._default)) {
|
||||||
|
let expr = this.parseMaybeAssign()
|
||||||
|
if (expr.id) {
|
||||||
|
switch (expr.type) {
|
||||||
|
case "FunctionExpression": expr.type = "FunctionDeclaration"; break
|
||||||
|
case "ClassExpression": expr.type = "ClassDeclaration"; break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
node.declaration = expr
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "ExportDefaultDeclaration")
|
||||||
|
}
|
||||||
|
if (this.tok.type.keyword) {
|
||||||
|
node.declaration = this.parseStatement()
|
||||||
|
node.specifiers = []
|
||||||
|
node.source = null
|
||||||
|
} else {
|
||||||
|
node.declaration = null
|
||||||
|
node.specifiers = this.parseExportSpecifierList()
|
||||||
|
node.source = this.eatContextual("from") ? this.parseExprAtom() : null
|
||||||
|
this.semicolon()
|
||||||
|
}
|
||||||
|
return this.finishNode(node, "ExportNamedDeclaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseImport = function() {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
if (this.tok.type === tt.string) {
|
||||||
|
node.specifiers = []
|
||||||
|
node.source = this.parseExprAtom()
|
||||||
|
node.kind = ''
|
||||||
|
} else {
|
||||||
|
let elt
|
||||||
|
if (this.tok.type === tt.name && this.tok.value !== "from") {
|
||||||
|
elt = this.startNode()
|
||||||
|
elt.local = this.parseIdent()
|
||||||
|
this.finishNode(elt, "ImportDefaultSpecifier")
|
||||||
|
this.eat(tt.comma)
|
||||||
|
}
|
||||||
|
node.specifiers = this.parseImportSpecifierList()
|
||||||
|
node.source = this.eatContextual("from") && this.tok.type == tt.string ? this.parseExprAtom() : this.dummyString()
|
||||||
|
if (elt) node.specifiers.unshift(elt)
|
||||||
|
}
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "ImportDeclaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseImportSpecifierList = function() {
|
||||||
|
let elts = []
|
||||||
|
if (this.tok.type === tt.star) {
|
||||||
|
let elt = this.startNode()
|
||||||
|
this.next()
|
||||||
|
if (this.eatContextual("as")) elt.local = this.parseIdent()
|
||||||
|
elts.push(this.finishNode(elt, "ImportNamespaceSpecifier"))
|
||||||
|
} else {
|
||||||
|
let indent = this.curIndent, line = this.curLineStart, continuedLine = this.nextLineStart
|
||||||
|
this.pushCx()
|
||||||
|
this.eat(tt.braceL)
|
||||||
|
if (this.curLineStart > continuedLine) continuedLine = this.curLineStart
|
||||||
|
while (!this.closes(tt.braceR, indent + (this.curLineStart <= continuedLine ? 1 : 0), line)) {
|
||||||
|
let elt = this.startNode()
|
||||||
|
if (this.eat(tt.star)) {
|
||||||
|
elt.local = this.eatContextual("as") ? this.parseIdent() : this.dummyIdent()
|
||||||
|
this.finishNode(elt, "ImportNamespaceSpecifier")
|
||||||
|
} else {
|
||||||
|
if (this.isContextual("from")) break
|
||||||
|
elt.imported = this.parseIdent()
|
||||||
|
if (isDummy(elt.imported)) break
|
||||||
|
elt.local = this.eatContextual("as") ? this.parseIdent() : elt.imported
|
||||||
|
this.finishNode(elt, "ImportSpecifier")
|
||||||
|
}
|
||||||
|
elts.push(elt)
|
||||||
|
this.eat(tt.comma)
|
||||||
|
}
|
||||||
|
this.eat(tt.braceR)
|
||||||
|
this.popCx()
|
||||||
|
}
|
||||||
|
return elts
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.parseExportSpecifierList = function() {
|
||||||
|
let elts = []
|
||||||
|
let indent = this.curIndent, line = this.curLineStart, continuedLine = this.nextLineStart
|
||||||
|
this.pushCx()
|
||||||
|
this.eat(tt.braceL)
|
||||||
|
if (this.curLineStart > continuedLine) continuedLine = this.curLineStart
|
||||||
|
while (!this.closes(tt.braceR, indent + (this.curLineStart <= continuedLine ? 1 : 0), line)) {
|
||||||
|
if (this.isContextual("from")) break
|
||||||
|
let elt = this.startNode()
|
||||||
|
elt.local = this.parseIdent()
|
||||||
|
if (isDummy(elt.local)) break
|
||||||
|
elt.exported = this.eatContextual("as") ? this.parseIdent() : elt.local
|
||||||
|
this.finishNode(elt, "ExportSpecifier")
|
||||||
|
elts.push(elt)
|
||||||
|
this.eat(tt.comma)
|
||||||
|
}
|
||||||
|
this.eat(tt.braceR)
|
||||||
|
this.popCx()
|
||||||
|
return elts
|
||||||
|
}
|
|
@ -0,0 +1,108 @@
|
||||||
|
import {tokTypes as tt, Token, isNewLine, SourceLocation, getLineInfo, lineBreakG} from ".."
|
||||||
|
import {LooseParser} from "./state"
|
||||||
|
|
||||||
|
const lp = LooseParser.prototype
|
||||||
|
|
||||||
|
function isSpace(ch) {
|
||||||
|
return (ch < 14 && ch > 8) || ch === 32 || ch === 160 || isNewLine(ch)
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.next = function() {
|
||||||
|
this.last = this.tok
|
||||||
|
if (this.ahead.length)
|
||||||
|
this.tok = this.ahead.shift()
|
||||||
|
else
|
||||||
|
this.tok = this.readToken()
|
||||||
|
|
||||||
|
if (this.tok.start >= this.nextLineStart) {
|
||||||
|
while (this.tok.start >= this.nextLineStart) {
|
||||||
|
this.curLineStart = this.nextLineStart
|
||||||
|
this.nextLineStart = this.lineEnd(this.curLineStart) + 1
|
||||||
|
}
|
||||||
|
this.curIndent = this.indentationAfter(this.curLineStart)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.readToken = function() {
|
||||||
|
for (;;) {
|
||||||
|
try {
|
||||||
|
this.toks.next()
|
||||||
|
if (this.toks.type === tt.dot &&
|
||||||
|
this.input.substr(this.toks.end, 1) === "." &&
|
||||||
|
this.options.ecmaVersion >= 6) {
|
||||||
|
this.toks.end++
|
||||||
|
this.toks.type = tt.ellipsis
|
||||||
|
}
|
||||||
|
return new Token(this.toks)
|
||||||
|
} catch(e) {
|
||||||
|
if (!(e instanceof SyntaxError)) throw e
|
||||||
|
|
||||||
|
// Try to skip some text, based on the error message, and then continue
|
||||||
|
let msg = e.message, pos = e.raisedAt, replace = true
|
||||||
|
if (/unterminated/i.test(msg)) {
|
||||||
|
pos = this.lineEnd(e.pos + 1)
|
||||||
|
if (/string/.test(msg)) {
|
||||||
|
replace = {start: e.pos, end: pos, type: tt.string, value: this.input.slice(e.pos + 1, pos)}
|
||||||
|
} else if (/regular expr/i.test(msg)) {
|
||||||
|
let re = this.input.slice(e.pos, pos)
|
||||||
|
try { re = new RegExp(re) } catch(e) {}
|
||||||
|
replace = {start: e.pos, end: pos, type: tt.regexp, value: re}
|
||||||
|
} else if (/template/.test(msg)) {
|
||||||
|
replace = {start: e.pos, end: pos,
|
||||||
|
type: tt.template,
|
||||||
|
value: this.input.slice(e.pos, pos)}
|
||||||
|
} else {
|
||||||
|
replace = false
|
||||||
|
}
|
||||||
|
} else if (/invalid (unicode|regexp|number)|expecting unicode|octal literal|is reserved|directly after number|expected number in radix/i.test(msg)) {
|
||||||
|
while (pos < this.input.length && !isSpace(this.input.charCodeAt(pos))) ++pos
|
||||||
|
} else if (/character escape|expected hexadecimal/i.test(msg)) {
|
||||||
|
while (pos < this.input.length) {
|
||||||
|
let ch = this.input.charCodeAt(pos++)
|
||||||
|
if (ch === 34 || ch === 39 || isNewLine(ch)) break
|
||||||
|
}
|
||||||
|
} else if (/unexpected character/i.test(msg)) {
|
||||||
|
pos++
|
||||||
|
replace = false
|
||||||
|
} else if (/regular expression/i.test(msg)) {
|
||||||
|
replace = true
|
||||||
|
} else {
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
this.resetTo(pos)
|
||||||
|
if (replace === true) replace = {start: pos, end: pos, type: tt.name, value: "✖"}
|
||||||
|
if (replace) {
|
||||||
|
if (this.options.locations)
|
||||||
|
replace.loc = new SourceLocation(
|
||||||
|
this.toks,
|
||||||
|
getLineInfo(this.input, replace.start),
|
||||||
|
getLineInfo(this.input, replace.end))
|
||||||
|
return replace
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.resetTo = function(pos) {
|
||||||
|
this.toks.pos = pos
|
||||||
|
let ch = this.input.charAt(pos - 1)
|
||||||
|
this.toks.exprAllowed = !ch || /[\[\{\(,;:?\/*=+\-~!|&%^<>]/.test(ch) ||
|
||||||
|
/[enwfd]/.test(ch) &&
|
||||||
|
/\b(keywords|case|else|return|throw|new|in|(instance|type)of|delete|void)$/.test(this.input.slice(pos - 10, pos))
|
||||||
|
|
||||||
|
if (this.options.locations) {
|
||||||
|
this.toks.curLine = 1
|
||||||
|
this.toks.lineStart = lineBreakG.lastIndex = 0
|
||||||
|
let match
|
||||||
|
while ((match = lineBreakG.exec(this.input)) && match.index < pos) {
|
||||||
|
++this.toks.curLine
|
||||||
|
this.toks.lineStart = match.index + match[0].length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lp.lookAhead = function(n) {
|
||||||
|
while (n > this.ahead.length)
|
||||||
|
this.ahead.push(this.readToken())
|
||||||
|
return this.ahead[n - 1]
|
||||||
|
}
|
|
@ -0,0 +1,215 @@
|
||||||
|
import {types as tt} from "./tokentype"
|
||||||
|
import {Parser} from "./state"
|
||||||
|
import {has} from "./util"
|
||||||
|
|
||||||
|
const pp = Parser.prototype
|
||||||
|
|
||||||
|
// Convert existing expression atom to assignable pattern
|
||||||
|
// if possible.
|
||||||
|
|
||||||
|
pp.toAssignable = function(node, isBinding) {
|
||||||
|
if (this.options.ecmaVersion >= 6 && node) {
|
||||||
|
switch (node.type) {
|
||||||
|
case "Identifier":
|
||||||
|
case "ObjectPattern":
|
||||||
|
case "ArrayPattern":
|
||||||
|
break
|
||||||
|
|
||||||
|
case "ObjectExpression":
|
||||||
|
node.type = "ObjectPattern"
|
||||||
|
for (let i = 0; i < node.properties.length; i++) {
|
||||||
|
let prop = node.properties[i]
|
||||||
|
if (prop.kind !== "init") this.raise(prop.key.start, "Object pattern can't contain getter or setter")
|
||||||
|
this.toAssignable(prop.value, isBinding)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
|
||||||
|
case "ArrayExpression":
|
||||||
|
node.type = "ArrayPattern"
|
||||||
|
this.toAssignableList(node.elements, isBinding)
|
||||||
|
break
|
||||||
|
|
||||||
|
case "AssignmentExpression":
|
||||||
|
if (node.operator === "=") {
|
||||||
|
node.type = "AssignmentPattern"
|
||||||
|
delete node.operator
|
||||||
|
// falls through to AssignmentPattern
|
||||||
|
} else {
|
||||||
|
this.raise(node.left.end, "Only '=' operator can be used for specifying default value.")
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "AssignmentPattern":
|
||||||
|
if (node.right.type === "YieldExpression")
|
||||||
|
this.raise(node.right.start, "Yield expression cannot be a default value")
|
||||||
|
break;
|
||||||
|
|
||||||
|
case "ParenthesizedExpression":
|
||||||
|
node.expression = this.toAssignable(node.expression, isBinding)
|
||||||
|
break
|
||||||
|
|
||||||
|
case "MemberExpression":
|
||||||
|
if (!isBinding) break
|
||||||
|
|
||||||
|
default:
|
||||||
|
this.raise(node.start, "Assigning to rvalue")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert list of expression atoms to binding list.
|
||||||
|
|
||||||
|
pp.toAssignableList = function(exprList, isBinding) {
|
||||||
|
let end = exprList.length
|
||||||
|
if (end) {
|
||||||
|
let last = exprList[end - 1]
|
||||||
|
if (last && last.type == "RestElement") {
|
||||||
|
--end
|
||||||
|
} else if (last && last.type == "SpreadElement") {
|
||||||
|
last.type = "RestElement"
|
||||||
|
let arg = last.argument
|
||||||
|
this.toAssignable(arg, isBinding)
|
||||||
|
if (arg.type !== "Identifier" && arg.type !== "MemberExpression" && arg.type !== "ArrayPattern")
|
||||||
|
this.unexpected(arg.start)
|
||||||
|
--end
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isBinding && last.type === "RestElement" && last.argument.type !== "Identifier")
|
||||||
|
this.unexpected(last.argument.start);
|
||||||
|
}
|
||||||
|
for (let i = 0; i < end; i++) {
|
||||||
|
let elt = exprList[i]
|
||||||
|
if (elt) this.toAssignable(elt, isBinding)
|
||||||
|
}
|
||||||
|
return exprList
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses spread element.
|
||||||
|
|
||||||
|
pp.parseSpread = function(refDestructuringErrors) {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
node.argument = this.parseMaybeAssign(refDestructuringErrors)
|
||||||
|
return this.finishNode(node, "SpreadElement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseRest = function(allowNonIdent) {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
|
||||||
|
// RestElement inside of a function parameter must be an identifier
|
||||||
|
if (allowNonIdent) node.argument = this.type === tt.name ? this.parseIdent() : this.unexpected()
|
||||||
|
else node.argument = this.type === tt.name || this.type === tt.bracketL ? this.parseBindingAtom() : this.unexpected()
|
||||||
|
|
||||||
|
return this.finishNode(node, "RestElement")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses lvalue (assignable) atom.
|
||||||
|
|
||||||
|
pp.parseBindingAtom = function() {
|
||||||
|
if (this.options.ecmaVersion < 6) return this.parseIdent()
|
||||||
|
switch (this.type) {
|
||||||
|
case tt.name:
|
||||||
|
return this.parseIdent()
|
||||||
|
|
||||||
|
case tt.bracketL:
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
node.elements = this.parseBindingList(tt.bracketR, true, true)
|
||||||
|
return this.finishNode(node, "ArrayPattern")
|
||||||
|
|
||||||
|
case tt.braceL:
|
||||||
|
return this.parseObj(true)
|
||||||
|
|
||||||
|
default:
|
||||||
|
this.unexpected()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseBindingList = function(close, allowEmpty, allowTrailingComma, allowNonIdent) {
|
||||||
|
let elts = [], first = true
|
||||||
|
while (!this.eat(close)) {
|
||||||
|
if (first) first = false
|
||||||
|
else this.expect(tt.comma)
|
||||||
|
if (allowEmpty && this.type === tt.comma) {
|
||||||
|
elts.push(null)
|
||||||
|
} else if (allowTrailingComma && this.afterTrailingComma(close)) {
|
||||||
|
break
|
||||||
|
} else if (this.type === tt.ellipsis) {
|
||||||
|
let rest = this.parseRest(allowNonIdent)
|
||||||
|
this.parseBindingListItem(rest)
|
||||||
|
elts.push(rest)
|
||||||
|
this.expect(close)
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
let elem = this.parseMaybeDefault(this.start, this.startLoc)
|
||||||
|
this.parseBindingListItem(elem)
|
||||||
|
elts.push(elem)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return elts
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseBindingListItem = function(param) {
|
||||||
|
return param
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses assignment pattern around given atom if possible.
|
||||||
|
|
||||||
|
pp.parseMaybeDefault = function(startPos, startLoc, left) {
|
||||||
|
left = left || this.parseBindingAtom()
|
||||||
|
if (this.options.ecmaVersion < 6 || !this.eat(tt.eq)) return left
|
||||||
|
let node = this.startNodeAt(startPos, startLoc)
|
||||||
|
node.left = left
|
||||||
|
node.right = this.parseMaybeAssign()
|
||||||
|
return this.finishNode(node, "AssignmentPattern")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that a node is an lval — something that can be assigned
|
||||||
|
// to.
|
||||||
|
|
||||||
|
pp.checkLVal = function(expr, isBinding, checkClashes) {
|
||||||
|
switch (expr.type) {
|
||||||
|
case "Identifier":
|
||||||
|
if (this.strict && this.reservedWordsStrictBind.test(expr.name))
|
||||||
|
this.raise(expr.start, (isBinding ? "Binding " : "Assigning to ") + expr.name + " in strict mode")
|
||||||
|
if (checkClashes) {
|
||||||
|
if (has(checkClashes, expr.name))
|
||||||
|
this.raise(expr.start, "Argument name clash")
|
||||||
|
checkClashes[expr.name] = true
|
||||||
|
}
|
||||||
|
break
|
||||||
|
|
||||||
|
case "MemberExpression":
|
||||||
|
if (isBinding) this.raise(expr.start, (isBinding ? "Binding" : "Assigning to") + " member expression")
|
||||||
|
break
|
||||||
|
|
||||||
|
case "ObjectPattern":
|
||||||
|
for (let i = 0; i < expr.properties.length; i++)
|
||||||
|
this.checkLVal(expr.properties[i].value, isBinding, checkClashes)
|
||||||
|
break
|
||||||
|
|
||||||
|
case "ArrayPattern":
|
||||||
|
for (let i = 0; i < expr.elements.length; i++) {
|
||||||
|
let elem = expr.elements[i]
|
||||||
|
if (elem) this.checkLVal(elem, isBinding, checkClashes)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
|
||||||
|
case "AssignmentPattern":
|
||||||
|
this.checkLVal(expr.left, isBinding, checkClashes)
|
||||||
|
break
|
||||||
|
|
||||||
|
case "RestElement":
|
||||||
|
this.checkLVal(expr.argument, isBinding, checkClashes)
|
||||||
|
break
|
||||||
|
|
||||||
|
case "ParenthesizedExpression":
|
||||||
|
this.checkLVal(expr.expression, isBinding, checkClashes)
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
this.raise(expr.start, (isBinding ? "Binding" : "Assigning to") + " rvalue")
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
import {Parser} from "./state"
|
||||||
|
import {SourceLocation} from "./locutil"
|
||||||
|
|
||||||
|
export class Node {
|
||||||
|
constructor(parser, pos, loc) {
|
||||||
|
this.type = ""
|
||||||
|
this.start = pos
|
||||||
|
this.end = 0
|
||||||
|
if (parser.options.locations)
|
||||||
|
this.loc = new SourceLocation(parser, loc)
|
||||||
|
if (parser.options.directSourceFile)
|
||||||
|
this.sourceFile = parser.options.directSourceFile
|
||||||
|
if (parser.options.ranges)
|
||||||
|
this.range = [pos, 0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start an AST node, attaching a start offset.
|
||||||
|
|
||||||
|
const pp = Parser.prototype
|
||||||
|
|
||||||
|
pp.startNode = function() {
|
||||||
|
return new Node(this, this.start, this.startLoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.startNodeAt = function(pos, loc) {
|
||||||
|
return new Node(this, pos, loc)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finish an AST node, adding `type` and `end` properties.
|
||||||
|
|
||||||
|
function finishNodeAt(node, type, pos, loc) {
|
||||||
|
node.type = type
|
||||||
|
node.end = pos
|
||||||
|
if (this.options.locations)
|
||||||
|
node.loc.end = loc
|
||||||
|
if (this.options.ranges)
|
||||||
|
node.range[1] = pos
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.finishNode = function(node, type) {
|
||||||
|
return finishNodeAt.call(this, node, type, this.lastTokEnd, this.lastTokEndLoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finish node at given position
|
||||||
|
|
||||||
|
pp.finishNodeAt = function(node, type, pos, loc) {
|
||||||
|
return finishNodeAt.call(this, node, type, pos, loc)
|
||||||
|
}
|
|
@ -0,0 +1,121 @@
|
||||||
|
import {has, isArray} from "./util"
|
||||||
|
import {SourceLocation} from "./locutil"
|
||||||
|
|
||||||
|
// A second optional argument can be given to further configure
|
||||||
|
// the parser process. These options are recognized:
|
||||||
|
|
||||||
|
export const defaultOptions = {
|
||||||
|
// `ecmaVersion` indicates the ECMAScript version to parse. Must
|
||||||
|
// be either 3, or 5, or 6. This influences support for strict
|
||||||
|
// mode, the set of reserved words, support for getters and
|
||||||
|
// setters and other features.
|
||||||
|
ecmaVersion: 5,
|
||||||
|
// Source type ("script" or "module") for different semantics
|
||||||
|
sourceType: "script",
|
||||||
|
// `onInsertedSemicolon` can be a callback that will be called
|
||||||
|
// when a semicolon is automatically inserted. It will be passed
|
||||||
|
// th position of the comma as an offset, and if `locations` is
|
||||||
|
// enabled, it is given the location as a `{line, column}` object
|
||||||
|
// as second argument.
|
||||||
|
onInsertedSemicolon: null,
|
||||||
|
// `onTrailingComma` is similar to `onInsertedSemicolon`, but for
|
||||||
|
// trailing commas.
|
||||||
|
onTrailingComma: null,
|
||||||
|
// By default, reserved words are only enforced if ecmaVersion >= 5.
|
||||||
|
// Set `allowReserved` to a boolean value to explicitly turn this on
|
||||||
|
// an off. When this option has the value "never", reserved words
|
||||||
|
// and keywords can also not be used as property names.
|
||||||
|
allowReserved: null,
|
||||||
|
// When enabled, a return at the top level is not considered an
|
||||||
|
// error.
|
||||||
|
allowReturnOutsideFunction: false,
|
||||||
|
// When enabled, import/export statements are not constrained to
|
||||||
|
// appearing at the top of the program.
|
||||||
|
allowImportExportEverywhere: false,
|
||||||
|
// When enabled, hashbang directive in the beginning of file
|
||||||
|
// is allowed and treated as a line comment.
|
||||||
|
allowHashBang: false,
|
||||||
|
// When `locations` is on, `loc` properties holding objects with
|
||||||
|
// `start` and `end` properties in `{line, column}` form (with
|
||||||
|
// line being 1-based and column 0-based) will be attached to the
|
||||||
|
// nodes.
|
||||||
|
locations: false,
|
||||||
|
// A function can be passed as `onToken` option, which will
|
||||||
|
// cause Acorn to call that function with object in the same
|
||||||
|
// format as tokens returned from `tokenizer().getToken()`. Note
|
||||||
|
// that you are not allowed to call the parser from the
|
||||||
|
// callback—that will corrupt its internal state.
|
||||||
|
onToken: null,
|
||||||
|
// A function can be passed as `onComment` option, which will
|
||||||
|
// cause Acorn to call that function with `(block, text, start,
|
||||||
|
// end)` parameters whenever a comment is skipped. `block` is a
|
||||||
|
// boolean indicating whether this is a block (`/* */`) comment,
|
||||||
|
// `text` is the content of the comment, and `start` and `end` are
|
||||||
|
// character offsets that denote the start and end of the comment.
|
||||||
|
// When the `locations` option is on, two more parameters are
|
||||||
|
// passed, the full `{line, column}` locations of the start and
|
||||||
|
// end of the comments. Note that you are not allowed to call the
|
||||||
|
// parser from the callback—that will corrupt its internal state.
|
||||||
|
onComment: null,
|
||||||
|
// Nodes have their start and end characters offsets recorded in
|
||||||
|
// `start` and `end` properties (directly on the node, rather than
|
||||||
|
// the `loc` object, which holds line/column data. To also add a
|
||||||
|
// [semi-standardized][range] `range` property holding a `[start,
|
||||||
|
// end]` array with the same numbers, set the `ranges` option to
|
||||||
|
// `true`.
|
||||||
|
//
|
||||||
|
// [range]: https://bugzilla.mozilla.org/show_bug.cgi?id=745678
|
||||||
|
ranges: false,
|
||||||
|
// It is possible to parse multiple files into a single AST by
|
||||||
|
// passing the tree produced by parsing the first file as
|
||||||
|
// `program` option in subsequent parses. This will add the
|
||||||
|
// toplevel forms of the parsed file to the `Program` (top) node
|
||||||
|
// of an existing parse tree.
|
||||||
|
program: null,
|
||||||
|
// When `locations` is on, you can pass this to record the source
|
||||||
|
// file in every node's `loc` object.
|
||||||
|
sourceFile: null,
|
||||||
|
// This value, if given, is stored in every node, whether
|
||||||
|
// `locations` is on or off.
|
||||||
|
directSourceFile: null,
|
||||||
|
// When enabled, parenthesized expressions are represented by
|
||||||
|
// (non-standard) ParenthesizedExpression nodes
|
||||||
|
preserveParens: false,
|
||||||
|
plugins: {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interpret and default an options object
|
||||||
|
|
||||||
|
export function getOptions(opts) {
|
||||||
|
let options = {}
|
||||||
|
for (let opt in defaultOptions)
|
||||||
|
options[opt] = opts && has(opts, opt) ? opts[opt] : defaultOptions[opt]
|
||||||
|
if (options.allowReserved == null)
|
||||||
|
options.allowReserved = options.ecmaVersion < 5
|
||||||
|
|
||||||
|
if (isArray(options.onToken)) {
|
||||||
|
let tokens = options.onToken
|
||||||
|
options.onToken = (token) => tokens.push(token)
|
||||||
|
}
|
||||||
|
if (isArray(options.onComment))
|
||||||
|
options.onComment = pushComment(options, options.onComment)
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushComment(options, array) {
|
||||||
|
return function (block, text, start, end, startLoc, endLoc) {
|
||||||
|
let comment = {
|
||||||
|
type: block ? 'Block' : 'Line',
|
||||||
|
value: text,
|
||||||
|
start: start,
|
||||||
|
end: end
|
||||||
|
}
|
||||||
|
if (options.locations)
|
||||||
|
comment.loc = new SourceLocation(this, startLoc, endLoc)
|
||||||
|
if (options.ranges)
|
||||||
|
comment.range = [start, end]
|
||||||
|
array.push(comment)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,102 @@
|
||||||
|
import {types as tt} from "./tokentype"
|
||||||
|
import {Parser} from "./state"
|
||||||
|
import {lineBreak} from "./whitespace"
|
||||||
|
|
||||||
|
const pp = Parser.prototype
|
||||||
|
|
||||||
|
// ## Parser utilities
|
||||||
|
|
||||||
|
// Test whether a statement node is the string literal `"use strict"`.
|
||||||
|
|
||||||
|
pp.isUseStrict = function(stmt) {
|
||||||
|
return this.options.ecmaVersion >= 5 && stmt.type === "ExpressionStatement" &&
|
||||||
|
stmt.expression.type === "Literal" &&
|
||||||
|
stmt.expression.raw.slice(1, -1) === "use strict"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Predicate that tests whether the next token is of the given
|
||||||
|
// type, and if yes, consumes it as a side effect.
|
||||||
|
|
||||||
|
pp.eat = function(type) {
|
||||||
|
if (this.type === type) {
|
||||||
|
this.next()
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tests whether parsed token is a contextual keyword.
|
||||||
|
|
||||||
|
pp.isContextual = function(name) {
|
||||||
|
return this.type === tt.name && this.value === name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Consumes contextual keyword if possible.
|
||||||
|
|
||||||
|
pp.eatContextual = function(name) {
|
||||||
|
return this.value === name && this.eat(tt.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Asserts that following token is given contextual keyword.
|
||||||
|
|
||||||
|
pp.expectContextual = function(name) {
|
||||||
|
if (!this.eatContextual(name)) this.unexpected()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test whether a semicolon can be inserted at the current position.
|
||||||
|
|
||||||
|
pp.canInsertSemicolon = function() {
|
||||||
|
return this.type === tt.eof ||
|
||||||
|
this.type === tt.braceR ||
|
||||||
|
lineBreak.test(this.input.slice(this.lastTokEnd, this.start))
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.insertSemicolon = function() {
|
||||||
|
if (this.canInsertSemicolon()) {
|
||||||
|
if (this.options.onInsertedSemicolon)
|
||||||
|
this.options.onInsertedSemicolon(this.lastTokEnd, this.lastTokEndLoc)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Consume a semicolon, or, failing that, see if we are allowed to
|
||||||
|
// pretend that there is a semicolon at this position.
|
||||||
|
|
||||||
|
pp.semicolon = function() {
|
||||||
|
if (!this.eat(tt.semi) && !this.insertSemicolon()) this.unexpected()
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.afterTrailingComma = function(tokType) {
|
||||||
|
if (this.type == tokType) {
|
||||||
|
if (this.options.onTrailingComma)
|
||||||
|
this.options.onTrailingComma(this.lastTokStart, this.lastTokStartLoc)
|
||||||
|
this.next()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expect a token of a given type. If found, consume it, otherwise,
|
||||||
|
// raise an unexpected token error.
|
||||||
|
|
||||||
|
pp.expect = function(type) {
|
||||||
|
this.eat(type) || this.unexpected()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Raise an unexpected token error.
|
||||||
|
|
||||||
|
pp.unexpected = function(pos) {
|
||||||
|
this.raise(pos != null ? pos : this.start, "Unexpected token")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.checkPatternErrors = function(refDestructuringErrors, andThrow) {
|
||||||
|
let pos = refDestructuringErrors && refDestructuringErrors.trailingComma
|
||||||
|
if (!andThrow) return !!pos
|
||||||
|
if (pos) this.raise(pos, "Trailing comma is not permitted in destructuring patterns")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.checkExpressionErrors = function(refDestructuringErrors, andThrow) {
|
||||||
|
let pos = refDestructuringErrors && refDestructuringErrors.shorthandAssign
|
||||||
|
if (!andThrow) return !!pos
|
||||||
|
if (pos) this.raise(pos, "Shorthand property assignments are valid only in destructuring patterns")
|
||||||
|
}
|
|
@ -0,0 +1,104 @@
|
||||||
|
import {reservedWords, keywords} from "./identifier"
|
||||||
|
import {types as tt} from "./tokentype"
|
||||||
|
import {lineBreak} from "./whitespace"
|
||||||
|
import {getOptions} from "./options"
|
||||||
|
|
||||||
|
// Registered plugins
|
||||||
|
export const plugins = {}
|
||||||
|
|
||||||
|
function keywordRegexp(words) {
|
||||||
|
return new RegExp("^(" + words.replace(/ /g, "|") + ")$")
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Parser {
|
||||||
|
constructor(options, input, startPos) {
|
||||||
|
this.options = options = getOptions(options)
|
||||||
|
this.sourceFile = options.sourceFile
|
||||||
|
this.keywords = keywordRegexp(keywords[options.ecmaVersion >= 6 ? 6 : 5])
|
||||||
|
let reserved = options.allowReserved ? "" :
|
||||||
|
reservedWords[options.ecmaVersion] + (options.sourceType == "module" ? " await" : "")
|
||||||
|
this.reservedWords = keywordRegexp(reserved)
|
||||||
|
let reservedStrict = (reserved ? reserved + " " : "") + reservedWords.strict
|
||||||
|
this.reservedWordsStrict = keywordRegexp(reservedStrict)
|
||||||
|
this.reservedWordsStrictBind = keywordRegexp(reservedStrict + " " + reservedWords.strictBind)
|
||||||
|
this.input = String(input)
|
||||||
|
|
||||||
|
// Used to signal to callers of `readWord1` whether the word
|
||||||
|
// contained any escape sequences. This is needed because words with
|
||||||
|
// escape sequences must not be interpreted as keywords.
|
||||||
|
this.containsEsc = false;
|
||||||
|
|
||||||
|
// Load plugins
|
||||||
|
this.loadPlugins(options.plugins)
|
||||||
|
|
||||||
|
// Set up token state
|
||||||
|
|
||||||
|
// The current position of the tokenizer in the input.
|
||||||
|
if (startPos) {
|
||||||
|
this.pos = startPos
|
||||||
|
this.lineStart = Math.max(0, this.input.lastIndexOf("\n", startPos))
|
||||||
|
this.curLine = this.input.slice(0, this.lineStart).split(lineBreak).length
|
||||||
|
} else {
|
||||||
|
this.pos = this.lineStart = 0
|
||||||
|
this.curLine = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Properties of the current token:
|
||||||
|
// Its type
|
||||||
|
this.type = tt.eof
|
||||||
|
// For tokens that include more information than their type, the value
|
||||||
|
this.value = null
|
||||||
|
// Its start and end offset
|
||||||
|
this.start = this.end = this.pos
|
||||||
|
// And, if locations are used, the {line, column} object
|
||||||
|
// corresponding to those offsets
|
||||||
|
this.startLoc = this.endLoc = this.curPosition()
|
||||||
|
|
||||||
|
// Position information for the previous token
|
||||||
|
this.lastTokEndLoc = this.lastTokStartLoc = null
|
||||||
|
this.lastTokStart = this.lastTokEnd = this.pos
|
||||||
|
|
||||||
|
// The context stack is used to superficially track syntactic
|
||||||
|
// context to predict whether a regular expression is allowed in a
|
||||||
|
// given position.
|
||||||
|
this.context = this.initialContext()
|
||||||
|
this.exprAllowed = true
|
||||||
|
|
||||||
|
// Figure out if it's a module code.
|
||||||
|
this.strict = this.inModule = options.sourceType === "module"
|
||||||
|
|
||||||
|
// Used to signify the start of a potential arrow function
|
||||||
|
this.potentialArrowAt = -1
|
||||||
|
|
||||||
|
// Flags to track whether we are in a function, a generator.
|
||||||
|
this.inFunction = this.inGenerator = false
|
||||||
|
// Labels in scope.
|
||||||
|
this.labels = []
|
||||||
|
|
||||||
|
// If enabled, skip leading hashbang line.
|
||||||
|
if (this.pos === 0 && options.allowHashBang && this.input.slice(0, 2) === '#!')
|
||||||
|
this.skipLineComment(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DEPRECATED Kept for backwards compatibility until 3.0 in case a plugin uses them
|
||||||
|
isKeyword(word) { return this.keywords.test(word) }
|
||||||
|
isReservedWord(word) { return this.reservedWords.test(word) }
|
||||||
|
|
||||||
|
extend(name, f) {
|
||||||
|
this[name] = f(this[name])
|
||||||
|
}
|
||||||
|
|
||||||
|
loadPlugins(pluginConfigs) {
|
||||||
|
for (let name in pluginConfigs) {
|
||||||
|
let plugin = plugins[name]
|
||||||
|
if (!plugin) throw new Error("Plugin '" + name + "' not found")
|
||||||
|
plugin(this, pluginConfigs[name])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parse() {
|
||||||
|
let node = this.options.program || this.startNode()
|
||||||
|
this.nextToken()
|
||||||
|
return this.parseTopLevel(node)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,626 @@
|
||||||
|
import {types as tt} from "./tokentype"
|
||||||
|
import {Parser} from "./state"
|
||||||
|
import {lineBreak} from "./whitespace"
|
||||||
|
|
||||||
|
const pp = Parser.prototype
|
||||||
|
|
||||||
|
// ### Statement parsing
|
||||||
|
|
||||||
|
// Parse a program. Initializes the parser, reads any number of
|
||||||
|
// statements, and wraps them in a Program node. Optionally takes a
|
||||||
|
// `program` argument. If present, the statements will be appended
|
||||||
|
// to its body instead of creating a new node.
|
||||||
|
|
||||||
|
pp.parseTopLevel = function(node) {
|
||||||
|
let first = true
|
||||||
|
if (!node.body) node.body = []
|
||||||
|
while (this.type !== tt.eof) {
|
||||||
|
let stmt = this.parseStatement(true, true)
|
||||||
|
node.body.push(stmt)
|
||||||
|
if (first) {
|
||||||
|
if (this.isUseStrict(stmt)) this.setStrict(true)
|
||||||
|
first = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.next()
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
node.sourceType = this.options.sourceType
|
||||||
|
}
|
||||||
|
return this.finishNode(node, "Program")
|
||||||
|
}
|
||||||
|
|
||||||
|
const loopLabel = {kind: "loop"}, switchLabel = {kind: "switch"}
|
||||||
|
|
||||||
|
// Parse a single statement.
|
||||||
|
//
|
||||||
|
// If expecting a statement and finding a slash operator, parse a
|
||||||
|
// regular expression literal. This is to handle cases like
|
||||||
|
// `if (foo) /blah/.exec(foo)`, where looking at the previous token
|
||||||
|
// does not help.
|
||||||
|
|
||||||
|
pp.parseStatement = function(declaration, topLevel) {
|
||||||
|
let starttype = this.type, node = this.startNode()
|
||||||
|
|
||||||
|
// Most types of statements are recognized by the keyword they
|
||||||
|
// start with. Many are trivial to parse, some require a bit of
|
||||||
|
// complexity.
|
||||||
|
|
||||||
|
switch (starttype) {
|
||||||
|
case tt._break: case tt._continue: return this.parseBreakContinueStatement(node, starttype.keyword)
|
||||||
|
case tt._debugger: return this.parseDebuggerStatement(node)
|
||||||
|
case tt._do: return this.parseDoStatement(node)
|
||||||
|
case tt._for: return this.parseForStatement(node)
|
||||||
|
case tt._function:
|
||||||
|
if (!declaration && this.options.ecmaVersion >= 6) this.unexpected()
|
||||||
|
return this.parseFunctionStatement(node)
|
||||||
|
case tt._class:
|
||||||
|
if (!declaration) this.unexpected()
|
||||||
|
return this.parseClass(node, true)
|
||||||
|
case tt._if: return this.parseIfStatement(node)
|
||||||
|
case tt._return: return this.parseReturnStatement(node)
|
||||||
|
case tt._switch: return this.parseSwitchStatement(node)
|
||||||
|
case tt._throw: return this.parseThrowStatement(node)
|
||||||
|
case tt._try: return this.parseTryStatement(node)
|
||||||
|
case tt._let: case tt._const: if (!declaration) this.unexpected() // NOTE: falls through to _var
|
||||||
|
case tt._var: return this.parseVarStatement(node, starttype)
|
||||||
|
case tt._while: return this.parseWhileStatement(node)
|
||||||
|
case tt._with: return this.parseWithStatement(node)
|
||||||
|
case tt.braceL: return this.parseBlock()
|
||||||
|
case tt.semi: return this.parseEmptyStatement(node)
|
||||||
|
case tt._export:
|
||||||
|
case tt._import:
|
||||||
|
if (!this.options.allowImportExportEverywhere) {
|
||||||
|
if (!topLevel)
|
||||||
|
this.raise(this.start, "'import' and 'export' may only appear at the top level")
|
||||||
|
if (!this.inModule)
|
||||||
|
this.raise(this.start, "'import' and 'export' may appear only with 'sourceType: module'")
|
||||||
|
}
|
||||||
|
return starttype === tt._import ? this.parseImport(node) : this.parseExport(node)
|
||||||
|
|
||||||
|
// If the statement does not start with a statement keyword or a
|
||||||
|
// brace, it's an ExpressionStatement or LabeledStatement. We
|
||||||
|
// simply start parsing an expression, and afterwards, if the
|
||||||
|
// next token is a colon and the expression was a simple
|
||||||
|
// Identifier node, we switch to interpreting it as a label.
|
||||||
|
default:
|
||||||
|
let maybeName = this.value, expr = this.parseExpression()
|
||||||
|
if (starttype === tt.name && expr.type === "Identifier" && this.eat(tt.colon))
|
||||||
|
return this.parseLabeledStatement(node, maybeName, expr)
|
||||||
|
else return this.parseExpressionStatement(node, expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseBreakContinueStatement = function(node, keyword) {
|
||||||
|
let isBreak = keyword == "break"
|
||||||
|
this.next()
|
||||||
|
if (this.eat(tt.semi) || this.insertSemicolon()) node.label = null
|
||||||
|
else if (this.type !== tt.name) this.unexpected()
|
||||||
|
else {
|
||||||
|
node.label = this.parseIdent()
|
||||||
|
this.semicolon()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that there is an actual destination to break or
|
||||||
|
// continue to.
|
||||||
|
for (var i = 0; i < this.labels.length; ++i) {
|
||||||
|
let lab = this.labels[i]
|
||||||
|
if (node.label == null || lab.name === node.label.name) {
|
||||||
|
if (lab.kind != null && (isBreak || lab.kind === "loop")) break
|
||||||
|
if (node.label && isBreak) break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (i === this.labels.length) this.raise(node.start, "Unsyntactic " + keyword)
|
||||||
|
return this.finishNode(node, isBreak ? "BreakStatement" : "ContinueStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseDebuggerStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "DebuggerStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseDoStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
this.labels.push(loopLabel)
|
||||||
|
node.body = this.parseStatement(false)
|
||||||
|
this.labels.pop()
|
||||||
|
this.expect(tt._while)
|
||||||
|
node.test = this.parseParenExpression()
|
||||||
|
if (this.options.ecmaVersion >= 6)
|
||||||
|
this.eat(tt.semi)
|
||||||
|
else
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "DoWhileStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disambiguating between a `for` and a `for`/`in` or `for`/`of`
|
||||||
|
// loop is non-trivial. Basically, we have to parse the init `var`
|
||||||
|
// statement or expression, disallowing the `in` operator (see
|
||||||
|
// the second parameter to `parseExpression`), and then check
|
||||||
|
// whether the next token is `in` or `of`. When there is no init
|
||||||
|
// part (semicolon immediately after the opening parenthesis), it
|
||||||
|
// is a regular `for` loop.
|
||||||
|
|
||||||
|
pp.parseForStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
this.labels.push(loopLabel)
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
if (this.type === tt.semi) return this.parseFor(node, null)
|
||||||
|
if (this.type === tt._var || this.type === tt._let || this.type === tt._const) {
|
||||||
|
let init = this.startNode(), varKind = this.type
|
||||||
|
this.next()
|
||||||
|
this.parseVar(init, true, varKind)
|
||||||
|
this.finishNode(init, "VariableDeclaration")
|
||||||
|
if ((this.type === tt._in || (this.options.ecmaVersion >= 6 && this.isContextual("of"))) && init.declarations.length === 1 &&
|
||||||
|
!(varKind !== tt._var && init.declarations[0].init))
|
||||||
|
return this.parseForIn(node, init)
|
||||||
|
return this.parseFor(node, init)
|
||||||
|
}
|
||||||
|
let refDestructuringErrors = {shorthandAssign: 0, trailingComma: 0}
|
||||||
|
let init = this.parseExpression(true, refDestructuringErrors)
|
||||||
|
if (this.type === tt._in || (this.options.ecmaVersion >= 6 && this.isContextual("of"))) {
|
||||||
|
this.checkPatternErrors(refDestructuringErrors, true)
|
||||||
|
this.toAssignable(init)
|
||||||
|
this.checkLVal(init)
|
||||||
|
return this.parseForIn(node, init)
|
||||||
|
} else {
|
||||||
|
this.checkExpressionErrors(refDestructuringErrors, true)
|
||||||
|
}
|
||||||
|
return this.parseFor(node, init)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseFunctionStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
return this.parseFunction(node, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseIfStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
node.test = this.parseParenExpression()
|
||||||
|
node.consequent = this.parseStatement(false)
|
||||||
|
node.alternate = this.eat(tt._else) ? this.parseStatement(false) : null
|
||||||
|
return this.finishNode(node, "IfStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseReturnStatement = function(node) {
|
||||||
|
if (!this.inFunction && !this.options.allowReturnOutsideFunction)
|
||||||
|
this.raise(this.start, "'return' outside of function")
|
||||||
|
this.next()
|
||||||
|
|
||||||
|
// In `return` (and `break`/`continue`), the keywords with
|
||||||
|
// optional arguments, we eagerly look for a semicolon or the
|
||||||
|
// possibility to insert one.
|
||||||
|
|
||||||
|
if (this.eat(tt.semi) || this.insertSemicolon()) node.argument = null
|
||||||
|
else { node.argument = this.parseExpression(); this.semicolon() }
|
||||||
|
return this.finishNode(node, "ReturnStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseSwitchStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
node.discriminant = this.parseParenExpression()
|
||||||
|
node.cases = []
|
||||||
|
this.expect(tt.braceL)
|
||||||
|
this.labels.push(switchLabel)
|
||||||
|
|
||||||
|
// Statements under must be grouped (by label) in SwitchCase
|
||||||
|
// nodes. `cur` is used to keep the node that we are currently
|
||||||
|
// adding statements to.
|
||||||
|
|
||||||
|
for (var cur, sawDefault = false; this.type != tt.braceR;) {
|
||||||
|
if (this.type === tt._case || this.type === tt._default) {
|
||||||
|
let isCase = this.type === tt._case
|
||||||
|
if (cur) this.finishNode(cur, "SwitchCase")
|
||||||
|
node.cases.push(cur = this.startNode())
|
||||||
|
cur.consequent = []
|
||||||
|
this.next()
|
||||||
|
if (isCase) {
|
||||||
|
cur.test = this.parseExpression()
|
||||||
|
} else {
|
||||||
|
if (sawDefault) this.raise(this.lastTokStart, "Multiple default clauses")
|
||||||
|
sawDefault = true
|
||||||
|
cur.test = null
|
||||||
|
}
|
||||||
|
this.expect(tt.colon)
|
||||||
|
} else {
|
||||||
|
if (!cur) this.unexpected()
|
||||||
|
cur.consequent.push(this.parseStatement(true))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (cur) this.finishNode(cur, "SwitchCase")
|
||||||
|
this.next() // Closing brace
|
||||||
|
this.labels.pop()
|
||||||
|
return this.finishNode(node, "SwitchStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseThrowStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
if (lineBreak.test(this.input.slice(this.lastTokEnd, this.start)))
|
||||||
|
this.raise(this.lastTokEnd, "Illegal newline after throw")
|
||||||
|
node.argument = this.parseExpression()
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "ThrowStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reused empty array added for node fields that are always empty.
|
||||||
|
|
||||||
|
const empty = []
|
||||||
|
|
||||||
|
pp.parseTryStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
node.block = this.parseBlock()
|
||||||
|
node.handler = null
|
||||||
|
if (this.type === tt._catch) {
|
||||||
|
let clause = this.startNode()
|
||||||
|
this.next()
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
clause.param = this.parseBindingAtom()
|
||||||
|
this.checkLVal(clause.param, true)
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
clause.body = this.parseBlock()
|
||||||
|
node.handler = this.finishNode(clause, "CatchClause")
|
||||||
|
}
|
||||||
|
node.finalizer = this.eat(tt._finally) ? this.parseBlock() : null
|
||||||
|
if (!node.handler && !node.finalizer)
|
||||||
|
this.raise(node.start, "Missing catch or finally clause")
|
||||||
|
return this.finishNode(node, "TryStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseVarStatement = function(node, kind) {
|
||||||
|
this.next()
|
||||||
|
this.parseVar(node, false, kind)
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "VariableDeclaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseWhileStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
node.test = this.parseParenExpression()
|
||||||
|
this.labels.push(loopLabel)
|
||||||
|
node.body = this.parseStatement(false)
|
||||||
|
this.labels.pop()
|
||||||
|
return this.finishNode(node, "WhileStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseWithStatement = function(node) {
|
||||||
|
if (this.strict) this.raise(this.start, "'with' in strict mode")
|
||||||
|
this.next()
|
||||||
|
node.object = this.parseParenExpression()
|
||||||
|
node.body = this.parseStatement(false)
|
||||||
|
return this.finishNode(node, "WithStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseEmptyStatement = function(node) {
|
||||||
|
this.next()
|
||||||
|
return this.finishNode(node, "EmptyStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseLabeledStatement = function(node, maybeName, expr) {
|
||||||
|
for (let i = 0; i < this.labels.length; ++i)
|
||||||
|
if (this.labels[i].name === maybeName) this.raise(expr.start, "Label '" + maybeName + "' is already declared")
|
||||||
|
let kind = this.type.isLoop ? "loop" : this.type === tt._switch ? "switch" : null
|
||||||
|
for (let i = this.labels.length - 1; i >= 0; i--) {
|
||||||
|
let label = this.labels[i]
|
||||||
|
if (label.statementStart == node.start) {
|
||||||
|
label.statementStart = this.start;
|
||||||
|
label.kind = kind;
|
||||||
|
} else break;
|
||||||
|
}
|
||||||
|
this.labels.push({name: maybeName, kind: kind, statementStart: this.start})
|
||||||
|
node.body = this.parseStatement(true)
|
||||||
|
this.labels.pop()
|
||||||
|
node.label = expr
|
||||||
|
return this.finishNode(node, "LabeledStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseExpressionStatement = function(node, expr) {
|
||||||
|
node.expression = expr
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "ExpressionStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a semicolon-enclosed block of statements, handling `"use
|
||||||
|
// strict"` declarations when `allowStrict` is true (used for
|
||||||
|
// function bodies).
|
||||||
|
|
||||||
|
pp.parseBlock = function(allowStrict) {
|
||||||
|
let node = this.startNode(), first = true, oldStrict
|
||||||
|
node.body = []
|
||||||
|
this.expect(tt.braceL)
|
||||||
|
while (!this.eat(tt.braceR)) {
|
||||||
|
let stmt = this.parseStatement(true)
|
||||||
|
node.body.push(stmt)
|
||||||
|
if (first && allowStrict && this.isUseStrict(stmt)) {
|
||||||
|
oldStrict = this.strict
|
||||||
|
this.setStrict(this.strict = true)
|
||||||
|
}
|
||||||
|
first = false
|
||||||
|
}
|
||||||
|
if (oldStrict === false) this.setStrict(false)
|
||||||
|
return this.finishNode(node, "BlockStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a regular `for` loop. The disambiguation code in
|
||||||
|
// `parseStatement` will already have parsed the init statement or
|
||||||
|
// expression.
|
||||||
|
|
||||||
|
pp.parseFor = function(node, init) {
|
||||||
|
node.init = init
|
||||||
|
this.expect(tt.semi)
|
||||||
|
node.test = this.type === tt.semi ? null : this.parseExpression()
|
||||||
|
this.expect(tt.semi)
|
||||||
|
node.update = this.type === tt.parenR ? null : this.parseExpression()
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
node.body = this.parseStatement(false)
|
||||||
|
this.labels.pop()
|
||||||
|
return this.finishNode(node, "ForStatement")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a `for`/`in` and `for`/`of` loop, which are almost
|
||||||
|
// same from parser's perspective.
|
||||||
|
|
||||||
|
pp.parseForIn = function(node, init) {
|
||||||
|
let type = this.type === tt._in ? "ForInStatement" : "ForOfStatement"
|
||||||
|
this.next()
|
||||||
|
node.left = init
|
||||||
|
node.right = this.parseExpression()
|
||||||
|
this.expect(tt.parenR)
|
||||||
|
node.body = this.parseStatement(false)
|
||||||
|
this.labels.pop()
|
||||||
|
return this.finishNode(node, type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a list of variable declarations.
|
||||||
|
|
||||||
|
pp.parseVar = function(node, isFor, kind) {
|
||||||
|
node.declarations = []
|
||||||
|
node.kind = kind.keyword
|
||||||
|
for (;;) {
|
||||||
|
let decl = this.startNode()
|
||||||
|
this.parseVarId(decl)
|
||||||
|
if (this.eat(tt.eq)) {
|
||||||
|
decl.init = this.parseMaybeAssign(isFor)
|
||||||
|
} else if (kind === tt._const && !(this.type === tt._in || (this.options.ecmaVersion >= 6 && this.isContextual("of")))) {
|
||||||
|
this.unexpected()
|
||||||
|
} else if (decl.id.type != "Identifier" && !(isFor && (this.type === tt._in || this.isContextual("of")))) {
|
||||||
|
this.raise(this.lastTokEnd, "Complex binding patterns require an initialization value")
|
||||||
|
} else {
|
||||||
|
decl.init = null
|
||||||
|
}
|
||||||
|
node.declarations.push(this.finishNode(decl, "VariableDeclarator"))
|
||||||
|
if (!this.eat(tt.comma)) break
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseVarId = function(decl) {
|
||||||
|
decl.id = this.parseBindingAtom()
|
||||||
|
this.checkLVal(decl.id, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a function declaration or literal (depending on the
|
||||||
|
// `isStatement` parameter).
|
||||||
|
|
||||||
|
pp.parseFunction = function(node, isStatement, allowExpressionBody) {
|
||||||
|
this.initFunction(node)
|
||||||
|
if (this.options.ecmaVersion >= 6)
|
||||||
|
node.generator = this.eat(tt.star)
|
||||||
|
if (isStatement || this.type === tt.name)
|
||||||
|
node.id = this.parseIdent()
|
||||||
|
this.parseFunctionParams(node)
|
||||||
|
this.parseFunctionBody(node, allowExpressionBody)
|
||||||
|
return this.finishNode(node, isStatement ? "FunctionDeclaration" : "FunctionExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseFunctionParams = function(node) {
|
||||||
|
this.expect(tt.parenL)
|
||||||
|
node.params = this.parseBindingList(tt.parenR, false, false, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a class declaration or literal (depending on the
|
||||||
|
// `isStatement` parameter).
|
||||||
|
|
||||||
|
pp.parseClass = function(node, isStatement) {
|
||||||
|
this.next()
|
||||||
|
this.parseClassId(node, isStatement)
|
||||||
|
this.parseClassSuper(node)
|
||||||
|
let classBody = this.startNode()
|
||||||
|
let hadConstructor = false
|
||||||
|
classBody.body = []
|
||||||
|
this.expect(tt.braceL)
|
||||||
|
while (!this.eat(tt.braceR)) {
|
||||||
|
if (this.eat(tt.semi)) continue
|
||||||
|
let method = this.startNode()
|
||||||
|
let isGenerator = this.eat(tt.star)
|
||||||
|
let isMaybeStatic = this.type === tt.name && this.value === "static"
|
||||||
|
this.parsePropertyName(method)
|
||||||
|
method.static = isMaybeStatic && this.type !== tt.parenL
|
||||||
|
if (method.static) {
|
||||||
|
if (isGenerator) this.unexpected()
|
||||||
|
isGenerator = this.eat(tt.star)
|
||||||
|
this.parsePropertyName(method)
|
||||||
|
}
|
||||||
|
method.kind = "method"
|
||||||
|
let isGetSet = false
|
||||||
|
if (!method.computed) {
|
||||||
|
let {key} = method
|
||||||
|
if (!isGenerator && key.type === "Identifier" && this.type !== tt.parenL && (key.name === "get" || key.name === "set")) {
|
||||||
|
isGetSet = true
|
||||||
|
method.kind = key.name
|
||||||
|
key = this.parsePropertyName(method)
|
||||||
|
}
|
||||||
|
if (!method.static && (key.type === "Identifier" && key.name === "constructor" ||
|
||||||
|
key.type === "Literal" && key.value === "constructor")) {
|
||||||
|
if (hadConstructor) this.raise(key.start, "Duplicate constructor in the same class")
|
||||||
|
if (isGetSet) this.raise(key.start, "Constructor can't have get/set modifier")
|
||||||
|
if (isGenerator) this.raise(key.start, "Constructor can't be a generator")
|
||||||
|
method.kind = "constructor"
|
||||||
|
hadConstructor = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.parseClassMethod(classBody, method, isGenerator)
|
||||||
|
if (isGetSet) {
|
||||||
|
let paramCount = method.kind === "get" ? 0 : 1
|
||||||
|
if (method.value.params.length !== paramCount) {
|
||||||
|
let start = method.value.start
|
||||||
|
if (method.kind === "get")
|
||||||
|
this.raise(start, "getter should have no params");
|
||||||
|
else
|
||||||
|
this.raise(start, "setter should have exactly one param")
|
||||||
|
}
|
||||||
|
if (method.kind === "set" && method.value.params[0].type === "RestElement")
|
||||||
|
this.raise(method.value.params[0].start, "Setter cannot use rest params")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
node.body = this.finishNode(classBody, "ClassBody")
|
||||||
|
return this.finishNode(node, isStatement ? "ClassDeclaration" : "ClassExpression")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseClassMethod = function(classBody, method, isGenerator) {
|
||||||
|
method.value = this.parseMethod(isGenerator)
|
||||||
|
classBody.body.push(this.finishNode(method, "MethodDefinition"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseClassId = function(node, isStatement) {
|
||||||
|
node.id = this.type === tt.name ? this.parseIdent() : isStatement ? this.unexpected() : null
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.parseClassSuper = function(node) {
|
||||||
|
node.superClass = this.eat(tt._extends) ? this.parseExprSubscripts() : null
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses module export declaration.
|
||||||
|
|
||||||
|
pp.parseExport = function(node) {
|
||||||
|
this.next()
|
||||||
|
// export * from '...'
|
||||||
|
if (this.eat(tt.star)) {
|
||||||
|
this.expectContextual("from")
|
||||||
|
node.source = this.type === tt.string ? this.parseExprAtom() : this.unexpected()
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "ExportAllDeclaration")
|
||||||
|
}
|
||||||
|
if (this.eat(tt._default)) { // export default ...
|
||||||
|
let expr = this.parseMaybeAssign()
|
||||||
|
let needsSemi = true
|
||||||
|
if (expr.type == "FunctionExpression" ||
|
||||||
|
expr.type == "ClassExpression") {
|
||||||
|
needsSemi = false
|
||||||
|
if (expr.id) {
|
||||||
|
expr.type = expr.type == "FunctionExpression"
|
||||||
|
? "FunctionDeclaration"
|
||||||
|
: "ClassDeclaration"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
node.declaration = expr
|
||||||
|
if (needsSemi) this.semicolon()
|
||||||
|
return this.finishNode(node, "ExportDefaultDeclaration")
|
||||||
|
}
|
||||||
|
// export var|const|let|function|class ...
|
||||||
|
if (this.shouldParseExportStatement()) {
|
||||||
|
node.declaration = this.parseStatement(true)
|
||||||
|
node.specifiers = []
|
||||||
|
node.source = null
|
||||||
|
} else { // export { x, y as z } [from '...']
|
||||||
|
node.declaration = null
|
||||||
|
node.specifiers = this.parseExportSpecifiers()
|
||||||
|
if (this.eatContextual("from")) {
|
||||||
|
node.source = this.type === tt.string ? this.parseExprAtom() : this.unexpected()
|
||||||
|
} else {
|
||||||
|
// check for keywords used as local names
|
||||||
|
for (let i = 0; i < node.specifiers.length; i++) {
|
||||||
|
if (this.keywords.test(node.specifiers[i].local.name) || this.reservedWords.test(node.specifiers[i].local.name)) {
|
||||||
|
this.unexpected(node.specifiers[i].local.start)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node.source = null
|
||||||
|
}
|
||||||
|
this.semicolon()
|
||||||
|
}
|
||||||
|
return this.finishNode(node, "ExportNamedDeclaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.shouldParseExportStatement = function() {
|
||||||
|
return this.type.keyword
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses a comma-separated list of module exports.
|
||||||
|
|
||||||
|
pp.parseExportSpecifiers = function() {
|
||||||
|
let nodes = [], first = true
|
||||||
|
// export { x, y as z } [from '...']
|
||||||
|
this.expect(tt.braceL)
|
||||||
|
while (!this.eat(tt.braceR)) {
|
||||||
|
if (!first) {
|
||||||
|
this.expect(tt.comma)
|
||||||
|
if (this.afterTrailingComma(tt.braceR)) break
|
||||||
|
} else first = false
|
||||||
|
|
||||||
|
let node = this.startNode()
|
||||||
|
node.local = this.parseIdent(this.type === tt._default)
|
||||||
|
node.exported = this.eatContextual("as") ? this.parseIdent(true) : node.local
|
||||||
|
nodes.push(this.finishNode(node, "ExportSpecifier"))
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses import declaration.
|
||||||
|
|
||||||
|
pp.parseImport = function(node) {
|
||||||
|
this.next()
|
||||||
|
// import '...'
|
||||||
|
if (this.type === tt.string) {
|
||||||
|
node.specifiers = empty
|
||||||
|
node.source = this.parseExprAtom()
|
||||||
|
} else {
|
||||||
|
node.specifiers = this.parseImportSpecifiers()
|
||||||
|
this.expectContextual("from")
|
||||||
|
node.source = this.type === tt.string ? this.parseExprAtom() : this.unexpected()
|
||||||
|
}
|
||||||
|
this.semicolon()
|
||||||
|
return this.finishNode(node, "ImportDeclaration")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses a comma-separated list of module imports.
|
||||||
|
|
||||||
|
pp.parseImportSpecifiers = function() {
|
||||||
|
let nodes = [], first = true
|
||||||
|
if (this.type === tt.name) {
|
||||||
|
// import defaultObj, { x, y as z } from '...'
|
||||||
|
let node = this.startNode()
|
||||||
|
node.local = this.parseIdent()
|
||||||
|
this.checkLVal(node.local, true)
|
||||||
|
nodes.push(this.finishNode(node, "ImportDefaultSpecifier"))
|
||||||
|
if (!this.eat(tt.comma)) return nodes
|
||||||
|
}
|
||||||
|
if (this.type === tt.star) {
|
||||||
|
let node = this.startNode()
|
||||||
|
this.next()
|
||||||
|
this.expectContextual("as")
|
||||||
|
node.local = this.parseIdent()
|
||||||
|
this.checkLVal(node.local, true)
|
||||||
|
nodes.push(this.finishNode(node, "ImportNamespaceSpecifier"))
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
this.expect(tt.braceL)
|
||||||
|
while (!this.eat(tt.braceR)) {
|
||||||
|
if (!first) {
|
||||||
|
this.expect(tt.comma)
|
||||||
|
if (this.afterTrailingComma(tt.braceR)) break
|
||||||
|
} else first = false
|
||||||
|
|
||||||
|
let node = this.startNode()
|
||||||
|
node.imported = this.parseIdent(true)
|
||||||
|
if (this.eatContextual("as")) {
|
||||||
|
node.local = this.parseIdent()
|
||||||
|
} else {
|
||||||
|
node.local = node.imported
|
||||||
|
if (this.isKeyword(node.local.name)) this.unexpected(node.local.start)
|
||||||
|
if (this.reservedWordsStrict.test(node.local.name)) this.raise(node.local.start, "The keyword '" + node.local.name + "' is reserved")
|
||||||
|
}
|
||||||
|
this.checkLVal(node.local, true)
|
||||||
|
nodes.push(this.finishNode(node, "ImportSpecifier"))
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
|
@ -0,0 +1,109 @@
|
||||||
|
// The algorithm used to determine whether a regexp can appear at a
|
||||||
|
// given point in the program is loosely based on sweet.js' approach.
|
||||||
|
// See https://github.com/mozilla/sweet.js/wiki/design
|
||||||
|
|
||||||
|
import {Parser} from "./state"
|
||||||
|
import {types as tt} from "./tokentype"
|
||||||
|
import {lineBreak} from "./whitespace"
|
||||||
|
|
||||||
|
export class TokContext {
|
||||||
|
constructor(token, isExpr, preserveSpace, override) {
|
||||||
|
this.token = token
|
||||||
|
this.isExpr = !!isExpr
|
||||||
|
this.preserveSpace = !!preserveSpace
|
||||||
|
this.override = override
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const types = {
|
||||||
|
b_stat: new TokContext("{", false),
|
||||||
|
b_expr: new TokContext("{", true),
|
||||||
|
b_tmpl: new TokContext("${", true),
|
||||||
|
p_stat: new TokContext("(", false),
|
||||||
|
p_expr: new TokContext("(", true),
|
||||||
|
q_tmpl: new TokContext("`", true, true, p => p.readTmplToken()),
|
||||||
|
f_expr: new TokContext("function", true)
|
||||||
|
}
|
||||||
|
|
||||||
|
const pp = Parser.prototype
|
||||||
|
|
||||||
|
pp.initialContext = function() {
|
||||||
|
return [types.b_stat]
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.braceIsBlock = function(prevType) {
|
||||||
|
if (prevType === tt.colon) {
|
||||||
|
let parent = this.curContext()
|
||||||
|
if (parent === types.b_stat || parent === types.b_expr)
|
||||||
|
return !parent.isExpr
|
||||||
|
}
|
||||||
|
if (prevType === tt._return)
|
||||||
|
return lineBreak.test(this.input.slice(this.lastTokEnd, this.start))
|
||||||
|
if (prevType === tt._else || prevType === tt.semi || prevType === tt.eof || prevType === tt.parenR)
|
||||||
|
return true
|
||||||
|
if (prevType == tt.braceL)
|
||||||
|
return this.curContext() === types.b_stat
|
||||||
|
return !this.exprAllowed
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.updateContext = function(prevType) {
|
||||||
|
let update, type = this.type
|
||||||
|
if (type.keyword && prevType == tt.dot)
|
||||||
|
this.exprAllowed = false
|
||||||
|
else if (update = type.updateContext)
|
||||||
|
update.call(this, prevType)
|
||||||
|
else
|
||||||
|
this.exprAllowed = type.beforeExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Token-specific context update code
|
||||||
|
|
||||||
|
tt.parenR.updateContext = tt.braceR.updateContext = function() {
|
||||||
|
if (this.context.length == 1) {
|
||||||
|
this.exprAllowed = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let out = this.context.pop()
|
||||||
|
if (out === types.b_stat && this.curContext() === types.f_expr) {
|
||||||
|
this.context.pop()
|
||||||
|
this.exprAllowed = false
|
||||||
|
} else if (out === types.b_tmpl) {
|
||||||
|
this.exprAllowed = true
|
||||||
|
} else {
|
||||||
|
this.exprAllowed = !out.isExpr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.braceL.updateContext = function(prevType) {
|
||||||
|
this.context.push(this.braceIsBlock(prevType) ? types.b_stat : types.b_expr)
|
||||||
|
this.exprAllowed = true
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.dollarBraceL.updateContext = function() {
|
||||||
|
this.context.push(types.b_tmpl)
|
||||||
|
this.exprAllowed = true
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.parenL.updateContext = function(prevType) {
|
||||||
|
let statementParens = prevType === tt._if || prevType === tt._for || prevType === tt._with || prevType === tt._while
|
||||||
|
this.context.push(statementParens ? types.p_stat : types.p_expr)
|
||||||
|
this.exprAllowed = true
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.incDec.updateContext = function() {
|
||||||
|
// tokExprAllowed stays unchanged
|
||||||
|
}
|
||||||
|
|
||||||
|
tt._function.updateContext = function() {
|
||||||
|
if (this.curContext() !== types.b_stat)
|
||||||
|
this.context.push(types.f_expr)
|
||||||
|
this.exprAllowed = false
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.backQuote.updateContext = function() {
|
||||||
|
if (this.curContext() === types.q_tmpl)
|
||||||
|
this.context.pop()
|
||||||
|
else
|
||||||
|
this.context.push(types.q_tmpl)
|
||||||
|
this.exprAllowed = false
|
||||||
|
}
|
|
@ -0,0 +1,682 @@
|
||||||
|
import {isIdentifierStart, isIdentifierChar} from "./identifier"
|
||||||
|
import {types as tt, keywords as keywordTypes} from "./tokentype"
|
||||||
|
import {Parser} from "./state"
|
||||||
|
import {SourceLocation} from "./locutil"
|
||||||
|
import {lineBreak, lineBreakG, isNewLine, nonASCIIwhitespace} from "./whitespace"
|
||||||
|
|
||||||
|
// Object type used to represent tokens. Note that normally, tokens
|
||||||
|
// simply exist as properties on the parser object. This is only
|
||||||
|
// used for the onToken callback and the external tokenizer.
|
||||||
|
|
||||||
|
export class Token {
|
||||||
|
constructor(p) {
|
||||||
|
this.type = p.type
|
||||||
|
this.value = p.value
|
||||||
|
this.start = p.start
|
||||||
|
this.end = p.end
|
||||||
|
if (p.options.locations)
|
||||||
|
this.loc = new SourceLocation(p, p.startLoc, p.endLoc)
|
||||||
|
if (p.options.ranges)
|
||||||
|
this.range = [p.start, p.end]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ## Tokenizer
|
||||||
|
|
||||||
|
const pp = Parser.prototype
|
||||||
|
|
||||||
|
// Are we running under Rhino?
|
||||||
|
const isRhino = typeof Packages == "object" && Object.prototype.toString.call(Packages) == "[object JavaPackage]"
|
||||||
|
|
||||||
|
// Move to the next token
|
||||||
|
|
||||||
|
pp.next = function() {
|
||||||
|
if (this.options.onToken)
|
||||||
|
this.options.onToken(new Token(this))
|
||||||
|
|
||||||
|
this.lastTokEnd = this.end
|
||||||
|
this.lastTokStart = this.start
|
||||||
|
this.lastTokEndLoc = this.endLoc
|
||||||
|
this.lastTokStartLoc = this.startLoc
|
||||||
|
this.nextToken()
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.getToken = function() {
|
||||||
|
this.next()
|
||||||
|
return new Token(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we're in an ES6 environment, make parsers iterable
|
||||||
|
if (typeof Symbol !== "undefined")
|
||||||
|
pp[Symbol.iterator] = function () {
|
||||||
|
let self = this
|
||||||
|
return {next: function () {
|
||||||
|
let token = self.getToken()
|
||||||
|
return {
|
||||||
|
done: token.type === tt.eof,
|
||||||
|
value: token
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Toggle strict mode. Re-reads the next number or string to please
|
||||||
|
// pedantic tests (`"use strict"; 010;` should fail).
|
||||||
|
|
||||||
|
pp.setStrict = function(strict) {
|
||||||
|
this.strict = strict
|
||||||
|
if (this.type !== tt.num && this.type !== tt.string) return
|
||||||
|
this.pos = this.start
|
||||||
|
if (this.options.locations) {
|
||||||
|
while (this.pos < this.lineStart) {
|
||||||
|
this.lineStart = this.input.lastIndexOf("\n", this.lineStart - 2) + 1
|
||||||
|
--this.curLine
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.nextToken()
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.curContext = function() {
|
||||||
|
return this.context[this.context.length - 1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read a single token, updating the parser object's token-related
|
||||||
|
// properties.
|
||||||
|
|
||||||
|
pp.nextToken = function() {
|
||||||
|
let curContext = this.curContext()
|
||||||
|
if (!curContext || !curContext.preserveSpace) this.skipSpace()
|
||||||
|
|
||||||
|
this.start = this.pos
|
||||||
|
if (this.options.locations) this.startLoc = this.curPosition()
|
||||||
|
if (this.pos >= this.input.length) return this.finishToken(tt.eof)
|
||||||
|
|
||||||
|
if (curContext.override) return curContext.override(this)
|
||||||
|
else this.readToken(this.fullCharCodeAtPos())
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readToken = function(code) {
|
||||||
|
// Identifier or keyword. '\uXXXX' sequences are allowed in
|
||||||
|
// identifiers, so '\' also dispatches to that.
|
||||||
|
if (isIdentifierStart(code, this.options.ecmaVersion >= 6) || code === 92 /* '\' */)
|
||||||
|
return this.readWord()
|
||||||
|
|
||||||
|
return this.getTokenFromCode(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.fullCharCodeAtPos = function() {
|
||||||
|
let code = this.input.charCodeAt(this.pos)
|
||||||
|
if (code <= 0xd7ff || code >= 0xe000) return code
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
return (code << 10) + next - 0x35fdc00
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.skipBlockComment = function() {
|
||||||
|
let startLoc = this.options.onComment && this.curPosition()
|
||||||
|
let start = this.pos, end = this.input.indexOf("*/", this.pos += 2)
|
||||||
|
if (end === -1) this.raise(this.pos - 2, "Unterminated comment")
|
||||||
|
this.pos = end + 2
|
||||||
|
if (this.options.locations) {
|
||||||
|
lineBreakG.lastIndex = start
|
||||||
|
let match
|
||||||
|
while ((match = lineBreakG.exec(this.input)) && match.index < this.pos) {
|
||||||
|
++this.curLine
|
||||||
|
this.lineStart = match.index + match[0].length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.options.onComment)
|
||||||
|
this.options.onComment(true, this.input.slice(start + 2, end), start, this.pos,
|
||||||
|
startLoc, this.curPosition())
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.skipLineComment = function(startSkip) {
|
||||||
|
let start = this.pos
|
||||||
|
let startLoc = this.options.onComment && this.curPosition()
|
||||||
|
let ch = this.input.charCodeAt(this.pos+=startSkip)
|
||||||
|
while (this.pos < this.input.length && ch !== 10 && ch !== 13 && ch !== 8232 && ch !== 8233) {
|
||||||
|
++this.pos
|
||||||
|
ch = this.input.charCodeAt(this.pos)
|
||||||
|
}
|
||||||
|
if (this.options.onComment)
|
||||||
|
this.options.onComment(false, this.input.slice(start + startSkip, this.pos), start, this.pos,
|
||||||
|
startLoc, this.curPosition())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called at the start of the parse and after every token. Skips
|
||||||
|
// whitespace and comments, and.
|
||||||
|
|
||||||
|
pp.skipSpace = function() {
|
||||||
|
loop: while (this.pos < this.input.length) {
|
||||||
|
let ch = this.input.charCodeAt(this.pos)
|
||||||
|
switch (ch) {
|
||||||
|
case 32: case 160: // ' '
|
||||||
|
++this.pos
|
||||||
|
break
|
||||||
|
case 13:
|
||||||
|
if (this.input.charCodeAt(this.pos + 1) === 10) {
|
||||||
|
++this.pos
|
||||||
|
}
|
||||||
|
case 10: case 8232: case 8233:
|
||||||
|
++this.pos
|
||||||
|
if (this.options.locations) {
|
||||||
|
++this.curLine
|
||||||
|
this.lineStart = this.pos
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case 47: // '/'
|
||||||
|
switch (this.input.charCodeAt(this.pos + 1)) {
|
||||||
|
case 42: // '*'
|
||||||
|
this.skipBlockComment()
|
||||||
|
break
|
||||||
|
case 47:
|
||||||
|
this.skipLineComment(2)
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
break loop
|
||||||
|
}
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
if (ch > 8 && ch < 14 || ch >= 5760 && nonASCIIwhitespace.test(String.fromCharCode(ch))) {
|
||||||
|
++this.pos
|
||||||
|
} else {
|
||||||
|
break loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called at the end of every token. Sets `end`, `val`, and
|
||||||
|
// maintains `context` and `exprAllowed`, and skips the space after
|
||||||
|
// the token, so that the next one's `start` will point at the
|
||||||
|
// right position.
|
||||||
|
|
||||||
|
pp.finishToken = function(type, val) {
|
||||||
|
this.end = this.pos
|
||||||
|
if (this.options.locations) this.endLoc = this.curPosition()
|
||||||
|
let prevType = this.type
|
||||||
|
this.type = type
|
||||||
|
this.value = val
|
||||||
|
|
||||||
|
this.updateContext(prevType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### Token reading
|
||||||
|
|
||||||
|
// This is the function that is called to fetch the next token. It
|
||||||
|
// is somewhat obscure, because it works in character codes rather
|
||||||
|
// than characters, and because operator parsing has been inlined
|
||||||
|
// into it.
|
||||||
|
//
|
||||||
|
// All in the name of speed.
|
||||||
|
//
|
||||||
|
pp.readToken_dot = function() {
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
if (next >= 48 && next <= 57) return this.readNumber(true)
|
||||||
|
let next2 = this.input.charCodeAt(this.pos + 2)
|
||||||
|
if (this.options.ecmaVersion >= 6 && next === 46 && next2 === 46) { // 46 = dot '.'
|
||||||
|
this.pos += 3
|
||||||
|
return this.finishToken(tt.ellipsis)
|
||||||
|
} else {
|
||||||
|
++this.pos
|
||||||
|
return this.finishToken(tt.dot)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readToken_slash = function() { // '/'
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
if (this.exprAllowed) {++this.pos; return this.readRegexp();}
|
||||||
|
if (next === 61) return this.finishOp(tt.assign, 2)
|
||||||
|
return this.finishOp(tt.slash, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readToken_mult_modulo = function(code) { // '%*'
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
if (next === 61) return this.finishOp(tt.assign, 2)
|
||||||
|
return this.finishOp(code === 42 ? tt.star : tt.modulo, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readToken_pipe_amp = function(code) { // '|&'
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
if (next === code) return this.finishOp(code === 124 ? tt.logicalOR : tt.logicalAND, 2)
|
||||||
|
if (next === 61) return this.finishOp(tt.assign, 2)
|
||||||
|
return this.finishOp(code === 124 ? tt.bitwiseOR : tt.bitwiseAND, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readToken_caret = function() { // '^'
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
if (next === 61) return this.finishOp(tt.assign, 2)
|
||||||
|
return this.finishOp(tt.bitwiseXOR, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readToken_plus_min = function(code) { // '+-'
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
if (next === code) {
|
||||||
|
if (next == 45 && this.input.charCodeAt(this.pos + 2) == 62 &&
|
||||||
|
lineBreak.test(this.input.slice(this.lastTokEnd, this.pos))) {
|
||||||
|
// A `-->` line comment
|
||||||
|
this.skipLineComment(3)
|
||||||
|
this.skipSpace()
|
||||||
|
return this.nextToken()
|
||||||
|
}
|
||||||
|
return this.finishOp(tt.incDec, 2)
|
||||||
|
}
|
||||||
|
if (next === 61) return this.finishOp(tt.assign, 2)
|
||||||
|
return this.finishOp(tt.plusMin, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readToken_lt_gt = function(code) { // '<>'
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
let size = 1
|
||||||
|
if (next === code) {
|
||||||
|
size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2
|
||||||
|
if (this.input.charCodeAt(this.pos + size) === 61) return this.finishOp(tt.assign, size + 1)
|
||||||
|
return this.finishOp(tt.bitShift, size)
|
||||||
|
}
|
||||||
|
if (next == 33 && code == 60 && this.input.charCodeAt(this.pos + 2) == 45 &&
|
||||||
|
this.input.charCodeAt(this.pos + 3) == 45) {
|
||||||
|
if (this.inModule) this.unexpected()
|
||||||
|
// `<!--`, an XML-style comment that should be interpreted as a line comment
|
||||||
|
this.skipLineComment(4)
|
||||||
|
this.skipSpace()
|
||||||
|
return this.nextToken()
|
||||||
|
}
|
||||||
|
if (next === 61)
|
||||||
|
size = this.input.charCodeAt(this.pos + 2) === 61 ? 3 : 2
|
||||||
|
return this.finishOp(tt.relational, size)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readToken_eq_excl = function(code) { // '=!'
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
if (next === 61) return this.finishOp(tt.equality, this.input.charCodeAt(this.pos + 2) === 61 ? 3 : 2)
|
||||||
|
if (code === 61 && next === 62 && this.options.ecmaVersion >= 6) { // '=>'
|
||||||
|
this.pos += 2
|
||||||
|
return this.finishToken(tt.arrow)
|
||||||
|
}
|
||||||
|
return this.finishOp(code === 61 ? tt.eq : tt.prefix, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.getTokenFromCode = function(code) {
|
||||||
|
switch (code) {
|
||||||
|
// The interpretation of a dot depends on whether it is followed
|
||||||
|
// by a digit or another two dots.
|
||||||
|
case 46: // '.'
|
||||||
|
return this.readToken_dot()
|
||||||
|
|
||||||
|
// Punctuation tokens.
|
||||||
|
case 40: ++this.pos; return this.finishToken(tt.parenL)
|
||||||
|
case 41: ++this.pos; return this.finishToken(tt.parenR)
|
||||||
|
case 59: ++this.pos; return this.finishToken(tt.semi)
|
||||||
|
case 44: ++this.pos; return this.finishToken(tt.comma)
|
||||||
|
case 91: ++this.pos; return this.finishToken(tt.bracketL)
|
||||||
|
case 93: ++this.pos; return this.finishToken(tt.bracketR)
|
||||||
|
case 123: ++this.pos; return this.finishToken(tt.braceL)
|
||||||
|
case 125: ++this.pos; return this.finishToken(tt.braceR)
|
||||||
|
case 58: ++this.pos; return this.finishToken(tt.colon)
|
||||||
|
case 63: ++this.pos; return this.finishToken(tt.question)
|
||||||
|
|
||||||
|
case 96: // '`'
|
||||||
|
if (this.options.ecmaVersion < 6) break
|
||||||
|
++this.pos
|
||||||
|
return this.finishToken(tt.backQuote)
|
||||||
|
|
||||||
|
case 48: // '0'
|
||||||
|
let next = this.input.charCodeAt(this.pos + 1)
|
||||||
|
if (next === 120 || next === 88) return this.readRadixNumber(16); // '0x', '0X' - hex number
|
||||||
|
if (this.options.ecmaVersion >= 6) {
|
||||||
|
if (next === 111 || next === 79) return this.readRadixNumber(8); // '0o', '0O' - octal number
|
||||||
|
if (next === 98 || next === 66) return this.readRadixNumber(2); // '0b', '0B' - binary number
|
||||||
|
}
|
||||||
|
// Anything else beginning with a digit is an integer, octal
|
||||||
|
// number, or float.
|
||||||
|
case 49: case 50: case 51: case 52: case 53: case 54: case 55: case 56: case 57: // 1-9
|
||||||
|
return this.readNumber(false)
|
||||||
|
|
||||||
|
// Quotes produce strings.
|
||||||
|
case 34: case 39: // '"', "'"
|
||||||
|
return this.readString(code)
|
||||||
|
|
||||||
|
// Operators are parsed inline in tiny state machines. '=' (61) is
|
||||||
|
// often referred to. `finishOp` simply skips the amount of
|
||||||
|
// characters it is given as second argument, and returns a token
|
||||||
|
// of the type given by its first argument.
|
||||||
|
|
||||||
|
case 47: // '/'
|
||||||
|
return this.readToken_slash()
|
||||||
|
|
||||||
|
case 37: case 42: // '%*'
|
||||||
|
return this.readToken_mult_modulo(code)
|
||||||
|
|
||||||
|
case 124: case 38: // '|&'
|
||||||
|
return this.readToken_pipe_amp(code)
|
||||||
|
|
||||||
|
case 94: // '^'
|
||||||
|
return this.readToken_caret()
|
||||||
|
|
||||||
|
case 43: case 45: // '+-'
|
||||||
|
return this.readToken_plus_min(code)
|
||||||
|
|
||||||
|
case 60: case 62: // '<>'
|
||||||
|
return this.readToken_lt_gt(code)
|
||||||
|
|
||||||
|
case 61: case 33: // '=!'
|
||||||
|
return this.readToken_eq_excl(code)
|
||||||
|
|
||||||
|
case 126: // '~'
|
||||||
|
return this.finishOp(tt.prefix, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.raise(this.pos, "Unexpected character '" + codePointToString(code) + "'")
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.finishOp = function(type, size) {
|
||||||
|
let str = this.input.slice(this.pos, this.pos + size)
|
||||||
|
this.pos += size
|
||||||
|
return this.finishToken(type, str)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a regular expression. Some context-awareness is necessary,
|
||||||
|
// since a '/' inside a '[]' set does not end the expression.
|
||||||
|
|
||||||
|
function tryCreateRegexp(src, flags, throwErrorAt, parser) {
|
||||||
|
try {
|
||||||
|
return new RegExp(src, flags);
|
||||||
|
} catch (e) {
|
||||||
|
if (throwErrorAt !== undefined) {
|
||||||
|
if (e instanceof SyntaxError) parser.raise(throwErrorAt, "Error parsing regular expression: " + e.message)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var regexpUnicodeSupport = !!tryCreateRegexp("\uffff", "u");
|
||||||
|
|
||||||
|
pp.readRegexp = function() {
|
||||||
|
let escaped, inClass, start = this.pos
|
||||||
|
for (;;) {
|
||||||
|
if (this.pos >= this.input.length) this.raise(start, "Unterminated regular expression")
|
||||||
|
let ch = this.input.charAt(this.pos)
|
||||||
|
if (lineBreak.test(ch)) this.raise(start, "Unterminated regular expression")
|
||||||
|
if (!escaped) {
|
||||||
|
if (ch === "[") inClass = true
|
||||||
|
else if (ch === "]" && inClass) inClass = false
|
||||||
|
else if (ch === "/" && !inClass) break
|
||||||
|
escaped = ch === "\\"
|
||||||
|
} else escaped = false
|
||||||
|
++this.pos
|
||||||
|
}
|
||||||
|
let content = this.input.slice(start, this.pos)
|
||||||
|
++this.pos
|
||||||
|
// Need to use `readWord1` because '\uXXXX' sequences are allowed
|
||||||
|
// here (don't ask).
|
||||||
|
let mods = this.readWord1()
|
||||||
|
let tmp = content
|
||||||
|
if (mods) {
|
||||||
|
let validFlags = /^[gim]*$/
|
||||||
|
if (this.options.ecmaVersion >= 6) validFlags = /^[gimuy]*$/
|
||||||
|
if (!validFlags.test(mods)) this.raise(start, "Invalid regular expression flag")
|
||||||
|
if (mods.indexOf('u') >= 0 && !regexpUnicodeSupport) {
|
||||||
|
// Replace each astral symbol and every Unicode escape sequence that
|
||||||
|
// possibly represents an astral symbol or a paired surrogate with a
|
||||||
|
// single ASCII symbol to avoid throwing on regular expressions that
|
||||||
|
// are only valid in combination with the `/u` flag.
|
||||||
|
// Note: replacing with the ASCII symbol `x` might cause false
|
||||||
|
// negatives in unlikely scenarios. For example, `[\u{61}-b]` is a
|
||||||
|
// perfectly valid pattern that is equivalent to `[a-b]`, but it would
|
||||||
|
// be replaced by `[x-b]` which throws an error.
|
||||||
|
tmp = tmp.replace(/\\u\{([0-9a-fA-F]+)\}/g, (_match, code, offset) => {
|
||||||
|
code = Number("0x" + code)
|
||||||
|
if (code > 0x10FFFF) this.raise(start + offset + 3, "Code point out of bounds")
|
||||||
|
return "x"
|
||||||
|
});
|
||||||
|
tmp = tmp.replace(/\\u([a-fA-F0-9]{4})|[\uD800-\uDBFF][\uDC00-\uDFFF]/g, "x")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Detect invalid regular expressions.
|
||||||
|
let value = null
|
||||||
|
// Rhino's regular expression parser is flaky and throws uncatchable exceptions,
|
||||||
|
// so don't do detection if we are running under Rhino
|
||||||
|
if (!isRhino) {
|
||||||
|
tryCreateRegexp(tmp, undefined, start, this);
|
||||||
|
// Get a regular expression object for this pattern-flag pair, or `null` in
|
||||||
|
// case the current environment doesn't support the flags it uses.
|
||||||
|
value = tryCreateRegexp(content, mods)
|
||||||
|
}
|
||||||
|
return this.finishToken(tt.regexp, {pattern: content, flags: mods, value: value})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read an integer in the given radix. Return null if zero digits
|
||||||
|
// were read, the integer value otherwise. When `len` is given, this
|
||||||
|
// will return `null` unless the integer has exactly `len` digits.
|
||||||
|
|
||||||
|
pp.readInt = function(radix, len) {
|
||||||
|
let start = this.pos, total = 0
|
||||||
|
for (let i = 0, e = len == null ? Infinity : len; i < e; ++i) {
|
||||||
|
let code = this.input.charCodeAt(this.pos), val
|
||||||
|
if (code >= 97) val = code - 97 + 10; // a
|
||||||
|
else if (code >= 65) val = code - 65 + 10; // A
|
||||||
|
else if (code >= 48 && code <= 57) val = code - 48; // 0-9
|
||||||
|
else val = Infinity
|
||||||
|
if (val >= radix) break
|
||||||
|
++this.pos
|
||||||
|
total = total * radix + val
|
||||||
|
}
|
||||||
|
if (this.pos === start || len != null && this.pos - start !== len) return null
|
||||||
|
|
||||||
|
return total
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readRadixNumber = function(radix) {
|
||||||
|
this.pos += 2; // 0x
|
||||||
|
let val = this.readInt(radix)
|
||||||
|
if (val == null) this.raise(this.start + 2, "Expected number in radix " + radix)
|
||||||
|
if (isIdentifierStart(this.fullCharCodeAtPos())) this.raise(this.pos, "Identifier directly after number")
|
||||||
|
return this.finishToken(tt.num, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read an integer, octal integer, or floating-point number.
|
||||||
|
|
||||||
|
pp.readNumber = function(startsWithDot) {
|
||||||
|
let start = this.pos, isFloat = false, octal = this.input.charCodeAt(this.pos) === 48
|
||||||
|
if (!startsWithDot && this.readInt(10) === null) this.raise(start, "Invalid number")
|
||||||
|
let next = this.input.charCodeAt(this.pos)
|
||||||
|
if (next === 46) { // '.'
|
||||||
|
++this.pos
|
||||||
|
this.readInt(10)
|
||||||
|
isFloat = true
|
||||||
|
next = this.input.charCodeAt(this.pos)
|
||||||
|
}
|
||||||
|
if (next === 69 || next === 101) { // 'eE'
|
||||||
|
next = this.input.charCodeAt(++this.pos)
|
||||||
|
if (next === 43 || next === 45) ++this.pos; // '+-'
|
||||||
|
if (this.readInt(10) === null) this.raise(start, "Invalid number")
|
||||||
|
isFloat = true
|
||||||
|
}
|
||||||
|
if (isIdentifierStart(this.fullCharCodeAtPos())) this.raise(this.pos, "Identifier directly after number")
|
||||||
|
|
||||||
|
let str = this.input.slice(start, this.pos), val
|
||||||
|
if (isFloat) val = parseFloat(str)
|
||||||
|
else if (!octal || str.length === 1) val = parseInt(str, 10)
|
||||||
|
else if (/[89]/.test(str) || this.strict) this.raise(start, "Invalid number")
|
||||||
|
else val = parseInt(str, 8)
|
||||||
|
return this.finishToken(tt.num, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read a string value, interpreting backslash-escapes.
|
||||||
|
|
||||||
|
pp.readCodePoint = function() {
|
||||||
|
let ch = this.input.charCodeAt(this.pos), code
|
||||||
|
|
||||||
|
if (ch === 123) {
|
||||||
|
if (this.options.ecmaVersion < 6) this.unexpected()
|
||||||
|
let codePos = ++this.pos
|
||||||
|
code = this.readHexChar(this.input.indexOf('}', this.pos) - this.pos)
|
||||||
|
++this.pos
|
||||||
|
if (code > 0x10FFFF) this.raise(codePos, "Code point out of bounds")
|
||||||
|
} else {
|
||||||
|
code = this.readHexChar(4)
|
||||||
|
}
|
||||||
|
return code
|
||||||
|
}
|
||||||
|
|
||||||
|
function codePointToString(code) {
|
||||||
|
// UTF-16 Decoding
|
||||||
|
if (code <= 0xFFFF) return String.fromCharCode(code)
|
||||||
|
code -= 0x10000
|
||||||
|
return String.fromCharCode((code >> 10) + 0xD800, (code & 1023) + 0xDC00)
|
||||||
|
}
|
||||||
|
|
||||||
|
pp.readString = function(quote) {
|
||||||
|
let out = "", chunkStart = ++this.pos
|
||||||
|
for (;;) {
|
||||||
|
if (this.pos >= this.input.length) this.raise(this.start, "Unterminated string constant")
|
||||||
|
let ch = this.input.charCodeAt(this.pos)
|
||||||
|
if (ch === quote) break
|
||||||
|
if (ch === 92) { // '\'
|
||||||
|
out += this.input.slice(chunkStart, this.pos)
|
||||||
|
out += this.readEscapedChar(false)
|
||||||
|
chunkStart = this.pos
|
||||||
|
} else {
|
||||||
|
if (isNewLine(ch)) this.raise(this.start, "Unterminated string constant")
|
||||||
|
++this.pos
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out += this.input.slice(chunkStart, this.pos++)
|
||||||
|
return this.finishToken(tt.string, out)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reads template string tokens.
|
||||||
|
|
||||||
|
pp.readTmplToken = function() {
|
||||||
|
let out = "", chunkStart = this.pos
|
||||||
|
for (;;) {
|
||||||
|
if (this.pos >= this.input.length) this.raise(this.start, "Unterminated template")
|
||||||
|
let ch = this.input.charCodeAt(this.pos)
|
||||||
|
if (ch === 96 || ch === 36 && this.input.charCodeAt(this.pos + 1) === 123) { // '`', '${'
|
||||||
|
if (this.pos === this.start && this.type === tt.template) {
|
||||||
|
if (ch === 36) {
|
||||||
|
this.pos += 2
|
||||||
|
return this.finishToken(tt.dollarBraceL)
|
||||||
|
} else {
|
||||||
|
++this.pos
|
||||||
|
return this.finishToken(tt.backQuote)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out += this.input.slice(chunkStart, this.pos)
|
||||||
|
return this.finishToken(tt.template, out)
|
||||||
|
}
|
||||||
|
if (ch === 92) { // '\'
|
||||||
|
out += this.input.slice(chunkStart, this.pos)
|
||||||
|
out += this.readEscapedChar(true)
|
||||||
|
chunkStart = this.pos
|
||||||
|
} else if (isNewLine(ch)) {
|
||||||
|
out += this.input.slice(chunkStart, this.pos)
|
||||||
|
++this.pos
|
||||||
|
switch (ch) {
|
||||||
|
case 13:
|
||||||
|
if (this.input.charCodeAt(this.pos) === 10) ++this.pos;
|
||||||
|
case 10:
|
||||||
|
out += "\n";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
out += String.fromCharCode(ch);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (this.options.locations) {
|
||||||
|
++this.curLine
|
||||||
|
this.lineStart = this.pos
|
||||||
|
}
|
||||||
|
chunkStart = this.pos
|
||||||
|
} else {
|
||||||
|
++this.pos
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used to read escaped characters
|
||||||
|
|
||||||
|
pp.readEscapedChar = function(inTemplate) {
|
||||||
|
let ch = this.input.charCodeAt(++this.pos)
|
||||||
|
++this.pos
|
||||||
|
switch (ch) {
|
||||||
|
case 110: return "\n"; // 'n' -> '\n'
|
||||||
|
case 114: return "\r"; // 'r' -> '\r'
|
||||||
|
case 120: return String.fromCharCode(this.readHexChar(2)); // 'x'
|
||||||
|
case 117: return codePointToString(this.readCodePoint()); // 'u'
|
||||||
|
case 116: return "\t"; // 't' -> '\t'
|
||||||
|
case 98: return "\b"; // 'b' -> '\b'
|
||||||
|
case 118: return "\u000b"; // 'v' -> '\u000b'
|
||||||
|
case 102: return "\f"; // 'f' -> '\f'
|
||||||
|
case 13: if (this.input.charCodeAt(this.pos) === 10) ++this.pos; // '\r\n'
|
||||||
|
case 10: // ' \n'
|
||||||
|
if (this.options.locations) { this.lineStart = this.pos; ++this.curLine }
|
||||||
|
return ""
|
||||||
|
default:
|
||||||
|
if (ch >= 48 && ch <= 55) {
|
||||||
|
let octalStr = this.input.substr(this.pos - 1, 3).match(/^[0-7]+/)[0]
|
||||||
|
let octal = parseInt(octalStr, 8)
|
||||||
|
if (octal > 255) {
|
||||||
|
octalStr = octalStr.slice(0, -1)
|
||||||
|
octal = parseInt(octalStr, 8)
|
||||||
|
}
|
||||||
|
if (octalStr !== "0" && (this.strict || inTemplate)) {
|
||||||
|
this.raise(this.pos - 2, "Octal literal in strict mode")
|
||||||
|
}
|
||||||
|
this.pos += octalStr.length - 1
|
||||||
|
return String.fromCharCode(octal)
|
||||||
|
}
|
||||||
|
return String.fromCharCode(ch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used to read character escape sequences ('\x', '\u', '\U').
|
||||||
|
|
||||||
|
pp.readHexChar = function(len) {
|
||||||
|
let codePos = this.pos
|
||||||
|
let n = this.readInt(16, len)
|
||||||
|
if (n === null) this.raise(codePos, "Bad character escape sequence")
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read an identifier, and return it as a string. Sets `this.containsEsc`
|
||||||
|
// to whether the word contained a '\u' escape.
|
||||||
|
//
|
||||||
|
// Incrementally adds only escaped chars, adding other chunks as-is
|
||||||
|
// as a micro-optimization.
|
||||||
|
|
||||||
|
pp.readWord1 = function() {
|
||||||
|
this.containsEsc = false
|
||||||
|
let word = "", first = true, chunkStart = this.pos
|
||||||
|
let astral = this.options.ecmaVersion >= 6
|
||||||
|
while (this.pos < this.input.length) {
|
||||||
|
let ch = this.fullCharCodeAtPos()
|
||||||
|
if (isIdentifierChar(ch, astral)) {
|
||||||
|
this.pos += ch <= 0xffff ? 1 : 2
|
||||||
|
} else if (ch === 92) { // "\"
|
||||||
|
this.containsEsc = true
|
||||||
|
word += this.input.slice(chunkStart, this.pos)
|
||||||
|
let escStart = this.pos
|
||||||
|
if (this.input.charCodeAt(++this.pos) != 117) // "u"
|
||||||
|
this.raise(this.pos, "Expecting Unicode escape sequence \\uXXXX")
|
||||||
|
++this.pos
|
||||||
|
let esc = this.readCodePoint()
|
||||||
|
if (!(first ? isIdentifierStart : isIdentifierChar)(esc, astral))
|
||||||
|
this.raise(escStart, "Invalid Unicode escape")
|
||||||
|
word += codePointToString(esc)
|
||||||
|
chunkStart = this.pos
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
first = false
|
||||||
|
}
|
||||||
|
return word + this.input.slice(chunkStart, this.pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read an identifier or keyword token. Will check for reserved
|
||||||
|
// words when necessary.
|
||||||
|
|
||||||
|
pp.readWord = function() {
|
||||||
|
let word = this.readWord1()
|
||||||
|
let type = tt.name
|
||||||
|
if ((this.options.ecmaVersion >= 6 || !this.containsEsc) && this.keywords.test(word))
|
||||||
|
type = keywordTypes[word]
|
||||||
|
return this.finishToken(type, word)
|
||||||
|
}
|
|
@ -0,0 +1,147 @@
|
||||||
|
// ## Token types
|
||||||
|
|
||||||
|
// The assignment of fine-grained, information-carrying type objects
|
||||||
|
// allows the tokenizer to store the information it has about a
|
||||||
|
// token in a way that is very cheap for the parser to look up.
|
||||||
|
|
||||||
|
// All token type variables start with an underscore, to make them
|
||||||
|
// easy to recognize.
|
||||||
|
|
||||||
|
// The `beforeExpr` property is used to disambiguate between regular
|
||||||
|
// expressions and divisions. It is set on all token types that can
|
||||||
|
// be followed by an expression (thus, a slash after them would be a
|
||||||
|
// regular expression).
|
||||||
|
//
|
||||||
|
// The `startsExpr` property is used to check if the token ends a
|
||||||
|
// `yield` expression. It is set on all token types that either can
|
||||||
|
// directly start an expression (like a quotation mark) or can
|
||||||
|
// continue an expression (like the body of a string).
|
||||||
|
//
|
||||||
|
// `isLoop` marks a keyword as starting a loop, which is important
|
||||||
|
// to know when parsing a label, in order to allow or disallow
|
||||||
|
// continue jumps to that label.
|
||||||
|
|
||||||
|
export class TokenType {
|
||||||
|
constructor(label, conf = {}) {
|
||||||
|
this.label = label
|
||||||
|
this.keyword = conf.keyword
|
||||||
|
this.beforeExpr = !!conf.beforeExpr
|
||||||
|
this.startsExpr = !!conf.startsExpr
|
||||||
|
this.isLoop = !!conf.isLoop
|
||||||
|
this.isAssign = !!conf.isAssign
|
||||||
|
this.prefix = !!conf.prefix
|
||||||
|
this.postfix = !!conf.postfix
|
||||||
|
this.binop = conf.binop || null
|
||||||
|
this.updateContext = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function binop(name, prec) {
|
||||||
|
return new TokenType(name, {beforeExpr: true, binop: prec})
|
||||||
|
}
|
||||||
|
const beforeExpr = {beforeExpr: true}, startsExpr = {startsExpr: true}
|
||||||
|
|
||||||
|
export const types = {
|
||||||
|
num: new TokenType("num", startsExpr),
|
||||||
|
regexp: new TokenType("regexp", startsExpr),
|
||||||
|
string: new TokenType("string", startsExpr),
|
||||||
|
name: new TokenType("name", startsExpr),
|
||||||
|
eof: new TokenType("eof"),
|
||||||
|
|
||||||
|
// Punctuation token types.
|
||||||
|
bracketL: new TokenType("[", {beforeExpr: true, startsExpr: true}),
|
||||||
|
bracketR: new TokenType("]"),
|
||||||
|
braceL: new TokenType("{", {beforeExpr: true, startsExpr: true}),
|
||||||
|
braceR: new TokenType("}"),
|
||||||
|
parenL: new TokenType("(", {beforeExpr: true, startsExpr: true}),
|
||||||
|
parenR: new TokenType(")"),
|
||||||
|
comma: new TokenType(",", beforeExpr),
|
||||||
|
semi: new TokenType(";", beforeExpr),
|
||||||
|
colon: new TokenType(":", beforeExpr),
|
||||||
|
dot: new TokenType("."),
|
||||||
|
question: new TokenType("?", beforeExpr),
|
||||||
|
arrow: new TokenType("=>", beforeExpr),
|
||||||
|
template: new TokenType("template"),
|
||||||
|
ellipsis: new TokenType("...", beforeExpr),
|
||||||
|
backQuote: new TokenType("`", startsExpr),
|
||||||
|
dollarBraceL: new TokenType("${", {beforeExpr: true, startsExpr: true}),
|
||||||
|
|
||||||
|
// Operators. These carry several kinds of properties to help the
|
||||||
|
// parser use them properly (the presence of these properties is
|
||||||
|
// what categorizes them as operators).
|
||||||
|
//
|
||||||
|
// `binop`, when present, specifies that this operator is a binary
|
||||||
|
// operator, and will refer to its precedence.
|
||||||
|
//
|
||||||
|
// `prefix` and `postfix` mark the operator as a prefix or postfix
|
||||||
|
// unary operator.
|
||||||
|
//
|
||||||
|
// `isAssign` marks all of `=`, `+=`, `-=` etcetera, which act as
|
||||||
|
// binary operators with a very low precedence, that should result
|
||||||
|
// in AssignmentExpression nodes.
|
||||||
|
|
||||||
|
eq: new TokenType("=", {beforeExpr: true, isAssign: true}),
|
||||||
|
assign: new TokenType("_=", {beforeExpr: true, isAssign: true}),
|
||||||
|
incDec: new TokenType("++/--", {prefix: true, postfix: true, startsExpr: true}),
|
||||||
|
prefix: new TokenType("prefix", {beforeExpr: true, prefix: true, startsExpr: true}),
|
||||||
|
logicalOR: binop("||", 1),
|
||||||
|
logicalAND: binop("&&", 2),
|
||||||
|
bitwiseOR: binop("|", 3),
|
||||||
|
bitwiseXOR: binop("^", 4),
|
||||||
|
bitwiseAND: binop("&", 5),
|
||||||
|
equality: binop("==/!=", 6),
|
||||||
|
relational: binop("</>", 7),
|
||||||
|
bitShift: binop("<</>>", 8),
|
||||||
|
plusMin: new TokenType("+/-", {beforeExpr: true, binop: 9, prefix: true, startsExpr: true}),
|
||||||
|
modulo: binop("%", 10),
|
||||||
|
star: binop("*", 10),
|
||||||
|
slash: binop("/", 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map keyword names to token types.
|
||||||
|
|
||||||
|
export const keywords = {}
|
||||||
|
|
||||||
|
// Succinct definitions of keyword token types
|
||||||
|
function kw(name, options = {}) {
|
||||||
|
options.keyword = name
|
||||||
|
keywords[name] = types["_" + name] = new TokenType(name, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
kw("break")
|
||||||
|
kw("case", beforeExpr)
|
||||||
|
kw("catch")
|
||||||
|
kw("continue")
|
||||||
|
kw("debugger")
|
||||||
|
kw("default", beforeExpr)
|
||||||
|
kw("do", {isLoop: true, beforeExpr: true})
|
||||||
|
kw("else", beforeExpr)
|
||||||
|
kw("finally")
|
||||||
|
kw("for", {isLoop: true})
|
||||||
|
kw("function", startsExpr)
|
||||||
|
kw("if")
|
||||||
|
kw("return", beforeExpr)
|
||||||
|
kw("switch")
|
||||||
|
kw("throw", beforeExpr)
|
||||||
|
kw("try")
|
||||||
|
kw("var")
|
||||||
|
kw("let")
|
||||||
|
kw("const")
|
||||||
|
kw("while", {isLoop: true})
|
||||||
|
kw("with")
|
||||||
|
kw("new", {beforeExpr: true, startsExpr: true})
|
||||||
|
kw("this", startsExpr)
|
||||||
|
kw("super", startsExpr)
|
||||||
|
kw("class")
|
||||||
|
kw("extends", beforeExpr)
|
||||||
|
kw("export")
|
||||||
|
kw("import")
|
||||||
|
kw("yield", {beforeExpr: true, startsExpr: true})
|
||||||
|
kw("null", startsExpr)
|
||||||
|
kw("true", startsExpr)
|
||||||
|
kw("false", startsExpr)
|
||||||
|
kw("in", {beforeExpr: true, binop: 7})
|
||||||
|
kw("instanceof", {beforeExpr: true, binop: 7})
|
||||||
|
kw("typeof", {beforeExpr: true, prefix: true, startsExpr: true})
|
||||||
|
kw("void", {beforeExpr: true, prefix: true, startsExpr: true})
|
||||||
|
kw("delete", {beforeExpr: true, prefix: true, startsExpr: true})
|
|
@ -0,0 +1,9 @@
|
||||||
|
export function isArray(obj) {
|
||||||
|
return Object.prototype.toString.call(obj) === "[object Array]"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks if an object has a property.
|
||||||
|
|
||||||
|
export function has(obj, propName) {
|
||||||
|
return Object.prototype.hasOwnProperty.call(obj, propName)
|
||||||
|
}
|
|
@ -0,0 +1,340 @@
|
||||||
|
// AST walker module for Mozilla Parser API compatible trees
|
||||||
|
|
||||||
|
// A simple walk is one where you simply specify callbacks to be
|
||||||
|
// called on specific nodes. The last two arguments are optional. A
|
||||||
|
// simple use would be
|
||||||
|
//
|
||||||
|
// walk.simple(myTree, {
|
||||||
|
// Expression: function(node) { ... }
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// to do something with all expressions. All Parser API node types
|
||||||
|
// can be used to identify node types, as well as Expression,
|
||||||
|
// Statement, and ScopeBody, which denote categories of nodes.
|
||||||
|
//
|
||||||
|
// The base argument can be used to pass a custom (recursive)
|
||||||
|
// walker, and state can be used to give this walked an initial
|
||||||
|
// state.
|
||||||
|
|
||||||
|
export function simple(node, visitors, base, state, override) {
|
||||||
|
if (!base) base = exports.base
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
let type = override || node.type, found = visitors[type]
|
||||||
|
base[type](node, st, c)
|
||||||
|
if (found) found(node, st)
|
||||||
|
})(node, state, override)
|
||||||
|
}
|
||||||
|
|
||||||
|
// An ancestor walk builds up an array of ancestor nodes (including
|
||||||
|
// the current node) and passes them to the callback as the state parameter.
|
||||||
|
export function ancestor(node, visitors, base, state) {
|
||||||
|
if (!base) base = exports.base
|
||||||
|
if (!state) state = []
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
let type = override || node.type, found = visitors[type]
|
||||||
|
if (node != st[st.length - 1]) {
|
||||||
|
st = st.slice()
|
||||||
|
st.push(node)
|
||||||
|
}
|
||||||
|
base[type](node, st, c)
|
||||||
|
if (found) found(node, st)
|
||||||
|
})(node, state)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A recursive walk is one where your functions override the default
|
||||||
|
// walkers. They can modify and replace the state parameter that's
|
||||||
|
// threaded through the walk, and can opt how and whether to walk
|
||||||
|
// their child nodes (by calling their third argument on these
|
||||||
|
// nodes).
|
||||||
|
export function recursive(node, state, funcs, base, override) {
|
||||||
|
let visitor = funcs ? exports.make(funcs, base) : base
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
visitor[override || node.type](node, st, c)
|
||||||
|
})(node, state, override)
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeTest(test) {
|
||||||
|
if (typeof test == "string")
|
||||||
|
return type => type == test
|
||||||
|
else if (!test)
|
||||||
|
return () => true
|
||||||
|
else
|
||||||
|
return test
|
||||||
|
}
|
||||||
|
|
||||||
|
class Found {
|
||||||
|
constructor(node, state) { this.node = node; this.state = state }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find a node with a given start, end, and type (all are optional,
|
||||||
|
// null can be used as wildcard). Returns a {node, state} object, or
|
||||||
|
// undefined when it doesn't find a matching node.
|
||||||
|
export function findNodeAt(node, start, end, test, base, state) {
|
||||||
|
test = makeTest(test)
|
||||||
|
if (!base) base = exports.base
|
||||||
|
try {
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
let type = override || node.type
|
||||||
|
if ((start == null || node.start <= start) &&
|
||||||
|
(end == null || node.end >= end))
|
||||||
|
base[type](node, st, c)
|
||||||
|
if ((start == null || node.start == start) &&
|
||||||
|
(end == null || node.end == end) &&
|
||||||
|
test(type, node))
|
||||||
|
throw new Found(node, st)
|
||||||
|
})(node, state)
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) return e
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the innermost node of a given type that contains the given
|
||||||
|
// position. Interface similar to findNodeAt.
|
||||||
|
export function findNodeAround(node, pos, test, base, state) {
|
||||||
|
test = makeTest(test)
|
||||||
|
if (!base) base = exports.base
|
||||||
|
try {
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
let type = override || node.type
|
||||||
|
if (node.start > pos || node.end < pos) return
|
||||||
|
base[type](node, st, c)
|
||||||
|
if (test(type, node)) throw new Found(node, st)
|
||||||
|
})(node, state)
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) return e
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the outermost matching node after a given position.
|
||||||
|
export function findNodeAfter(node, pos, test, base, state) {
|
||||||
|
test = makeTest(test)
|
||||||
|
if (!base) base = exports.base
|
||||||
|
try {
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
if (node.end < pos) return
|
||||||
|
let type = override || node.type
|
||||||
|
if (node.start >= pos && test(type, node)) throw new Found(node, st)
|
||||||
|
base[type](node, st, c)
|
||||||
|
})(node, state)
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) return e
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the outermost matching node before a given position.
|
||||||
|
export function findNodeBefore(node, pos, test, base, state) {
|
||||||
|
test = makeTest(test)
|
||||||
|
if (!base) base = exports.base
|
||||||
|
let max
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
if (node.start > pos) return
|
||||||
|
let type = override || node.type
|
||||||
|
if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))
|
||||||
|
max = new Found(node, st)
|
||||||
|
base[type](node, st, c)
|
||||||
|
})(node, state)
|
||||||
|
return max
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used to create a custom walker. Will fill in all missing node
|
||||||
|
// type properties with the defaults.
|
||||||
|
export function make(funcs, base) {
|
||||||
|
if (!base) base = exports.base
|
||||||
|
let visitor = {}
|
||||||
|
for (var type in base) visitor[type] = base[type]
|
||||||
|
for (var type in funcs) visitor[type] = funcs[type]
|
||||||
|
return visitor
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipThrough(node, st, c) { c(node, st) }
|
||||||
|
function ignore(_node, _st, _c) {}
|
||||||
|
|
||||||
|
// Node walkers.
|
||||||
|
|
||||||
|
export const base = {}
|
||||||
|
|
||||||
|
base.Program = base.BlockStatement = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.body.length; ++i)
|
||||||
|
c(node.body[i], st, "Statement")
|
||||||
|
}
|
||||||
|
base.Statement = skipThrough
|
||||||
|
base.EmptyStatement = ignore
|
||||||
|
base.ExpressionStatement = base.ParenthesizedExpression =
|
||||||
|
(node, st, c) => c(node.expression, st, "Expression")
|
||||||
|
base.IfStatement = (node, st, c) => {
|
||||||
|
c(node.test, st, "Expression")
|
||||||
|
c(node.consequent, st, "Statement")
|
||||||
|
if (node.alternate) c(node.alternate, st, "Statement")
|
||||||
|
}
|
||||||
|
base.LabeledStatement = (node, st, c) => c(node.body, st, "Statement")
|
||||||
|
base.BreakStatement = base.ContinueStatement = ignore
|
||||||
|
base.WithStatement = (node, st, c) => {
|
||||||
|
c(node.object, st, "Expression")
|
||||||
|
c(node.body, st, "Statement")
|
||||||
|
}
|
||||||
|
base.SwitchStatement = (node, st, c) => {
|
||||||
|
c(node.discriminant, st, "Expression")
|
||||||
|
for (let i = 0; i < node.cases.length; ++i) {
|
||||||
|
let cs = node.cases[i]
|
||||||
|
if (cs.test) c(cs.test, st, "Expression")
|
||||||
|
for (let j = 0; j < cs.consequent.length; ++j)
|
||||||
|
c(cs.consequent[j], st, "Statement")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
base.ReturnStatement = base.YieldExpression = (node, st, c) => {
|
||||||
|
if (node.argument) c(node.argument, st, "Expression")
|
||||||
|
}
|
||||||
|
base.ThrowStatement = base.SpreadElement =
|
||||||
|
(node, st, c) => c(node.argument, st, "Expression")
|
||||||
|
base.TryStatement = (node, st, c) => {
|
||||||
|
c(node.block, st, "Statement")
|
||||||
|
if (node.handler) {
|
||||||
|
c(node.handler.param, st, "Pattern")
|
||||||
|
c(node.handler.body, st, "ScopeBody")
|
||||||
|
}
|
||||||
|
if (node.finalizer) c(node.finalizer, st, "Statement")
|
||||||
|
}
|
||||||
|
base.WhileStatement = base.DoWhileStatement = (node, st, c) => {
|
||||||
|
c(node.test, st, "Expression")
|
||||||
|
c(node.body, st, "Statement")
|
||||||
|
}
|
||||||
|
base.ForStatement = (node, st, c) => {
|
||||||
|
if (node.init) c(node.init, st, "ForInit")
|
||||||
|
if (node.test) c(node.test, st, "Expression")
|
||||||
|
if (node.update) c(node.update, st, "Expression")
|
||||||
|
c(node.body, st, "Statement")
|
||||||
|
}
|
||||||
|
base.ForInStatement = base.ForOfStatement = (node, st, c) => {
|
||||||
|
c(node.left, st, "ForInit")
|
||||||
|
c(node.right, st, "Expression")
|
||||||
|
c(node.body, st, "Statement")
|
||||||
|
}
|
||||||
|
base.ForInit = (node, st, c) => {
|
||||||
|
if (node.type == "VariableDeclaration") c(node, st)
|
||||||
|
else c(node, st, "Expression")
|
||||||
|
}
|
||||||
|
base.DebuggerStatement = ignore
|
||||||
|
|
||||||
|
base.FunctionDeclaration = (node, st, c) => c(node, st, "Function")
|
||||||
|
base.VariableDeclaration = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.declarations.length; ++i)
|
||||||
|
c(node.declarations[i], st)
|
||||||
|
}
|
||||||
|
base.VariableDeclarator = (node, st, c) => {
|
||||||
|
c(node.id, st, "Pattern")
|
||||||
|
if (node.init) c(node.init, st, "Expression")
|
||||||
|
}
|
||||||
|
|
||||||
|
base.Function = (node, st, c) => {
|
||||||
|
if (node.id) c(node.id, st, "Pattern")
|
||||||
|
for (let i = 0; i < node.params.length; i++)
|
||||||
|
c(node.params[i], st, "Pattern")
|
||||||
|
c(node.body, st, node.expression ? "ScopeExpression" : "ScopeBody")
|
||||||
|
}
|
||||||
|
// FIXME drop these node types in next major version
|
||||||
|
// (They are awkward, and in ES6 every block can be a scope.)
|
||||||
|
base.ScopeBody = (node, st, c) => c(node, st, "Statement")
|
||||||
|
base.ScopeExpression = (node, st, c) => c(node, st, "Expression")
|
||||||
|
|
||||||
|
base.Pattern = (node, st, c) => {
|
||||||
|
if (node.type == "Identifier")
|
||||||
|
c(node, st, "VariablePattern")
|
||||||
|
else if (node.type == "MemberExpression")
|
||||||
|
c(node, st, "MemberPattern")
|
||||||
|
else
|
||||||
|
c(node, st)
|
||||||
|
}
|
||||||
|
base.VariablePattern = ignore
|
||||||
|
base.MemberPattern = skipThrough
|
||||||
|
base.RestElement = (node, st, c) => c(node.argument, st, "Pattern")
|
||||||
|
base.ArrayPattern = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.elements.length; ++i) {
|
||||||
|
let elt = node.elements[i]
|
||||||
|
if (elt) c(elt, st, "Pattern")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
base.ObjectPattern = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.properties.length; ++i)
|
||||||
|
c(node.properties[i].value, st, "Pattern")
|
||||||
|
}
|
||||||
|
|
||||||
|
base.Expression = skipThrough
|
||||||
|
base.ThisExpression = base.Super = base.MetaProperty = ignore
|
||||||
|
base.ArrayExpression = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.elements.length; ++i) {
|
||||||
|
let elt = node.elements[i]
|
||||||
|
if (elt) c(elt, st, "Expression")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
base.ObjectExpression = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.properties.length; ++i)
|
||||||
|
c(node.properties[i], st)
|
||||||
|
}
|
||||||
|
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration
|
||||||
|
base.SequenceExpression = base.TemplateLiteral = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.expressions.length; ++i)
|
||||||
|
c(node.expressions[i], st, "Expression")
|
||||||
|
}
|
||||||
|
base.UnaryExpression = base.UpdateExpression = (node, st, c) => {
|
||||||
|
c(node.argument, st, "Expression")
|
||||||
|
}
|
||||||
|
base.BinaryExpression = base.LogicalExpression = (node, st, c) => {
|
||||||
|
c(node.left, st, "Expression")
|
||||||
|
c(node.right, st, "Expression")
|
||||||
|
}
|
||||||
|
base.AssignmentExpression = base.AssignmentPattern = (node, st, c) => {
|
||||||
|
c(node.left, st, "Pattern")
|
||||||
|
c(node.right, st, "Expression")
|
||||||
|
}
|
||||||
|
base.ConditionalExpression = (node, st, c) => {
|
||||||
|
c(node.test, st, "Expression")
|
||||||
|
c(node.consequent, st, "Expression")
|
||||||
|
c(node.alternate, st, "Expression")
|
||||||
|
}
|
||||||
|
base.NewExpression = base.CallExpression = (node, st, c) => {
|
||||||
|
c(node.callee, st, "Expression")
|
||||||
|
if (node.arguments) for (let i = 0; i < node.arguments.length; ++i)
|
||||||
|
c(node.arguments[i], st, "Expression")
|
||||||
|
}
|
||||||
|
base.MemberExpression = (node, st, c) => {
|
||||||
|
c(node.object, st, "Expression")
|
||||||
|
if (node.computed) c(node.property, st, "Expression")
|
||||||
|
}
|
||||||
|
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = (node, st, c) => {
|
||||||
|
if (node.declaration)
|
||||||
|
c(node.declaration, st, node.type == "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression")
|
||||||
|
if (node.source) c(node.source, st, "Expression")
|
||||||
|
}
|
||||||
|
base.ExportAllDeclaration = (node, st, c) => {
|
||||||
|
c(node.source, st, "Expression")
|
||||||
|
}
|
||||||
|
base.ImportDeclaration = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.specifiers.length; i++)
|
||||||
|
c(node.specifiers[i], st)
|
||||||
|
c(node.source, st, "Expression")
|
||||||
|
}
|
||||||
|
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore
|
||||||
|
|
||||||
|
base.TaggedTemplateExpression = (node, st, c) => {
|
||||||
|
c(node.tag, st, "Expression")
|
||||||
|
c(node.quasi, st)
|
||||||
|
}
|
||||||
|
base.ClassDeclaration = base.ClassExpression = (node, st, c) => c(node, st, "Class")
|
||||||
|
base.Class = (node, st, c) => {
|
||||||
|
if (node.id) c(node.id, st, "Pattern")
|
||||||
|
if (node.superClass) c(node.superClass, st, "Expression")
|
||||||
|
for (let i = 0; i < node.body.body.length; i++)
|
||||||
|
c(node.body.body[i], st)
|
||||||
|
}
|
||||||
|
base.MethodDefinition = base.Property = (node, st, c) => {
|
||||||
|
if (node.computed) c(node.key, st, "Expression")
|
||||||
|
c(node.value, st, "Expression")
|
||||||
|
}
|
||||||
|
base.ComprehensionExpression = (node, st, c) => {
|
||||||
|
for (let i = 0; i < node.blocks.length; i++)
|
||||||
|
c(node.blocks[i].right, st, "Expression")
|
||||||
|
c(node.body, st, "Expression")
|
||||||
|
}
|
|
@ -0,0 +1,12 @@
|
||||||
|
// Matches a whole line break (where CRLF is considered a single
|
||||||
|
// line break). Used to count lines.
|
||||||
|
|
||||||
|
export const lineBreak = /\r\n?|\n|\u2028|\u2029/
|
||||||
|
export const lineBreakG = new RegExp(lineBreak.source, "g")
|
||||||
|
|
||||||
|
export function isNewLine(code) {
|
||||||
|
return code === 10 || code === 13 || code === 0x2028 || code == 0x2029
|
||||||
|
}
|
||||||
|
|
||||||
|
export const nonASCIIwhitespace = /[\u1680\u180e\u2000-\u200a\u202f\u205f\u3000\ufeff]/
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015, Jon Schlinkert.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
|
@ -0,0 +1,236 @@
|
||||||
|
# align-text [![NPM version](https://badge.fury.io/js/align-text.svg)](http://badge.fury.io/js/align-text) [![Build Status](https://travis-ci.org/jonschlinkert/align-text.svg)](https://travis-ci.org/jonschlinkert/align-text)
|
||||||
|
|
||||||
|
> Align the text in a string.
|
||||||
|
|
||||||
|
**Examples**
|
||||||
|
|
||||||
|
Align text values in an array:
|
||||||
|
|
||||||
|
```js
|
||||||
|
align([1, 2, 3, 100]);
|
||||||
|
//=> [' 1', ' 2', ' 3', '100']
|
||||||
|
```
|
||||||
|
|
||||||
|
Or [do stuff like this](./example.js):
|
||||||
|
|
||||||
|
[![screen shot 2015-06-09 at 2 08 34 am](https://cloud.githubusercontent.com/assets/383994/8051597/7b716fbc-0e4c-11e5-9aef-4493fd22db58.png)](./example.js)
|
||||||
|
|
||||||
|
Visit [the example](./example.js) to see how this works.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Install with [npm](https://www.npmjs.com/)
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm i align-text --save
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
var align = require('align-text');
|
||||||
|
align(text, callback_function_or_integer);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Params**
|
||||||
|
|
||||||
|
* `text` can be a **string or array**. If a string is passed, a string will be returned. If an array is passed, an array will be returned.
|
||||||
|
* `callback|integer`: if an integer, the text will be indented by that amount. If a function, it must return an integer representing the amount of leading indentation to use as `align` loops over each line.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```js
|
||||||
|
align(text, 4);
|
||||||
|
```
|
||||||
|
|
||||||
|
Would align:
|
||||||
|
|
||||||
|
```
|
||||||
|
abc
|
||||||
|
abc
|
||||||
|
abc
|
||||||
|
```
|
||||||
|
|
||||||
|
To:
|
||||||
|
|
||||||
|
```
|
||||||
|
abc
|
||||||
|
abc
|
||||||
|
abc
|
||||||
|
```
|
||||||
|
|
||||||
|
## callback
|
||||||
|
|
||||||
|
### params
|
||||||
|
|
||||||
|
The callback is used to determine the indentation of each line and gets the following params:
|
||||||
|
|
||||||
|
* `len` the length of the "current" line
|
||||||
|
* `longest` the length of the longest line
|
||||||
|
* `line` the current line (string) being aligned
|
||||||
|
* `lines` the array of all lines
|
||||||
|
|
||||||
|
### return
|
||||||
|
|
||||||
|
The callback may return:
|
||||||
|
|
||||||
|
* an integer that represents the number of spaces to use for padding,
|
||||||
|
* or an object with the following properties:
|
||||||
|
- `indent`: **{Number}** the amount of indentation to use. Default is `0` when an object is returned.
|
||||||
|
- `character`: **{String}** the character to use for indentation. Default is `''` (empty string) when an object is returned.
|
||||||
|
- `prefix`: **{String}** leading characters to use at the beginning of each line. `''` (empty string) when an object is returned.
|
||||||
|
|
||||||
|
**Integer example:**
|
||||||
|
|
||||||
|
```js
|
||||||
|
// calculate half the difference between the length
|
||||||
|
// of the current line and the longest line
|
||||||
|
function centerAlign(len, longest, line, lines) {
|
||||||
|
return Math.floor((longest - len) / 2);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Object example:**
|
||||||
|
|
||||||
|
```js
|
||||||
|
function centerAlign(len, longest, line, lines) {
|
||||||
|
return {
|
||||||
|
character: '\t',
|
||||||
|
indent: Math.floor((longest - len) / 2),
|
||||||
|
prefix: '~ ',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage examples
|
||||||
|
|
||||||
|
### Center align
|
||||||
|
|
||||||
|
Using the `centerAlign` function from above:
|
||||||
|
|
||||||
|
```js
|
||||||
|
align(text, centerAlign);
|
||||||
|
```
|
||||||
|
|
||||||
|
Would align this text:
|
||||||
|
|
||||||
|
```js
|
||||||
|
Lorem ipsum dolor sit amet
|
||||||
|
consectetur adipiscin
|
||||||
|
elit, sed do eiusmod tempor incididun
|
||||||
|
ut labore et dolor
|
||||||
|
magna aliqua. Ut enim ad mini
|
||||||
|
veniam, quis
|
||||||
|
```
|
||||||
|
|
||||||
|
Resulting in this:
|
||||||
|
|
||||||
|
```
|
||||||
|
Lorem ipsum dolor sit amet,
|
||||||
|
consectetur adipiscing
|
||||||
|
elit, sed do eiusmod tempor incididunt
|
||||||
|
ut labore et dolore
|
||||||
|
magna aliqua. Ut enim ad minim
|
||||||
|
veniam, quis
|
||||||
|
```
|
||||||
|
|
||||||
|
**Customize**
|
||||||
|
|
||||||
|
If you wanted to add more padding on the left, just pass the number in the callback.
|
||||||
|
|
||||||
|
For example, to add 4 spaces before every line:
|
||||||
|
|
||||||
|
```js
|
||||||
|
function centerAlign(len, longest, line, lines) {
|
||||||
|
return 4 + Math.floor((longest - len) / 2);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Would result in:
|
||||||
|
|
||||||
|
```
|
||||||
|
Lorem ipsum dolor sit amet,
|
||||||
|
consectetur adipiscing
|
||||||
|
elit, sed do eiusmod tempor incididunt
|
||||||
|
ut labore et dolore
|
||||||
|
magna aliqua. Ut enim ad minim
|
||||||
|
veniam, quis
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bullets
|
||||||
|
|
||||||
|
```js
|
||||||
|
align(text, function (len, max, line, lines) {
|
||||||
|
return {prefix: ' - '};
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
Would return:
|
||||||
|
|
||||||
|
```
|
||||||
|
- Lorem ipsum dolor sit amet,
|
||||||
|
- consectetur adipiscing
|
||||||
|
- elit, sed do eiusmod tempor incididunt
|
||||||
|
- ut labore et dolore
|
||||||
|
- magna aliqua. Ut enim ad minim
|
||||||
|
- veniam, quis
|
||||||
|
```
|
||||||
|
|
||||||
|
### Different indent character
|
||||||
|
|
||||||
|
```js
|
||||||
|
align(text, function (len, max, line, lines) {
|
||||||
|
return {
|
||||||
|
indent: Math.floor((max - len) / 2),
|
||||||
|
character: '~',
|
||||||
|
};
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
Would return
|
||||||
|
|
||||||
|
```
|
||||||
|
~~~~~Lorem ipsum dolor sit amet,
|
||||||
|
~~~~~~~~consectetur adipiscing
|
||||||
|
elit, sed do eiusmod tempor incididunt
|
||||||
|
~~~~~~~~~ut labore et dolore
|
||||||
|
~~~~magna aliqua. Ut enim ad minim
|
||||||
|
~~~~~~~~~~~~~veniam, quis
|
||||||
|
```
|
||||||
|
|
||||||
|
## Related projects
|
||||||
|
|
||||||
|
* [center-align](https://github.com/jonschlinkert/center-align): Center-align the text in a string.
|
||||||
|
* [justify](https://github.com/bahamas10/node-justify): Left or right (or both) justify text using a custom width and character
|
||||||
|
* [longest](https://github.com/jonschlinkert/longest): Get the longest item in an array.
|
||||||
|
* [right-align](https://github.com/jonschlinkert/right-align): Right-align the text in a string.
|
||||||
|
* [repeat-string](https://github.com/jonschlinkert/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string.
|
||||||
|
* [word-wrap](https://github.com/jonschlinkert/word-wrap): Wrap words to a specified length.
|
||||||
|
|
||||||
|
## Running tests
|
||||||
|
|
||||||
|
Install dev dependencies:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm i -d && npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/align-text/issues/new)
|
||||||
|
|
||||||
|
## Author
|
||||||
|
|
||||||
|
**Jon Schlinkert**
|
||||||
|
|
||||||
|
+ [github/jonschlinkert](https://github.com/jonschlinkert)
|
||||||
|
+ [twitter/jonschlinkert](http://twitter.com/jonschlinkert)
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Copyright © 2015 [Jon Schlinkert](https://github.com/jonschlinkert)
|
||||||
|
Released under the MIT license.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on June 09, 2015._
|
|
@ -0,0 +1,52 @@
|
||||||
|
/*!
|
||||||
|
* align-text <https://github.com/jonschlinkert/align-text>
|
||||||
|
*
|
||||||
|
* Copyright (c) 2015, Jon Schlinkert.
|
||||||
|
* Licensed under the MIT License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
var typeOf = require('kind-of');
|
||||||
|
var repeat = require('repeat-string');
|
||||||
|
var longest = require('longest');
|
||||||
|
|
||||||
|
module.exports = function alignText(val, fn) {
|
||||||
|
var lines, type = typeOf(val);
|
||||||
|
|
||||||
|
if (type === 'array') {
|
||||||
|
lines = val;
|
||||||
|
} else if (type === 'string') {
|
||||||
|
lines = val.split(/(?:\r\n|\n)/);
|
||||||
|
} else {
|
||||||
|
throw new TypeError('align-text expects a string or array.');
|
||||||
|
}
|
||||||
|
|
||||||
|
var fnType = typeOf(fn);
|
||||||
|
var len = lines.length;
|
||||||
|
var max = longest(lines);
|
||||||
|
var res = [], i = 0;
|
||||||
|
|
||||||
|
while (len--) {
|
||||||
|
var line = String(lines[i++]);
|
||||||
|
var diff;
|
||||||
|
|
||||||
|
if (fnType === 'function') {
|
||||||
|
diff = fn(line.length, max.length, line, lines, i);
|
||||||
|
} else if (fnType === 'number') {
|
||||||
|
diff = fn;
|
||||||
|
} else {
|
||||||
|
diff = max.length - line.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeOf(diff) === 'number') {
|
||||||
|
res.push(repeat(' ', diff) + line);
|
||||||
|
} else if (typeOf(diff) === 'object') {
|
||||||
|
var result = repeat(diff.character || ' ', diff.indent || 0);
|
||||||
|
res.push((diff.prefix || '') + result + line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (type === 'array') return res;
|
||||||
|
return res.join('\n');
|
||||||
|
};
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user