migrating to svelte
This commit is contained in:
parent
96a12939c8
commit
a819d952d3
@ -7,7 +7,7 @@ from generateur.generateur_main import generate_from_data, generate_from_path
|
||||
from database.auth.models import User
|
||||
from database.db import get_session
|
||||
from fastapi import Depends
|
||||
from database.exercices.models import ExampleEnum, ExerciceCreate, Exercice, ExerciceEdit, ExerciceRead, ExercicesTagLink, Tag, TagCreate, Supports
|
||||
from database.exercices.models import ExampleEnum, ExerciceCreate, Exercice, ExerciceEdit, ExerciceRead, ExercicesTagLink, Tag, TagCreate, Supports, ExerciceReadFull
|
||||
from services.auth import get_current_user, get_current_user_optional
|
||||
from services.database import generate_unique_code
|
||||
from services.io import add_fast_api_root, get_ancestor, get_or_create_dir
|
||||
@ -185,9 +185,9 @@ def get_tags_dependency(tags: List[str] | None = Query(None), db: Session = Depe
|
||||
def check_author(exo: Exercice, user_id:int):
|
||||
return exo.author_id == user_id
|
||||
|
||||
def serialize_exo(*, exo: Exercice, user_id: User = None, db: Session):
|
||||
def serialize_exo(*, exo: ExerciceRead, user_id: User = None, db: Session):
|
||||
tags = parse_exo_tags(exo_id=exo.id, user_id=user_id,
|
||||
db=db) if user_id is not None else []
|
||||
is_author = user_id is not None and check_author(exo=exo, user_id=user_id)
|
||||
return ExerciceRead(**exo.dict(), author=exo.author, original=exo.original, tags=tags, is_author=is_author, supports={**exo.dict()})
|
||||
return ExerciceReadFull(**exo.dict(), author=exo.author, original=exo.original, tags=tags, is_author=is_author, supports={**exo.dict()})
|
||||
|
||||
|
@ -144,10 +144,8 @@ class Author(SQLModel):
|
||||
def get_source_path_from_name_and_id(name: str, id_code: str):
|
||||
return f'/uploads/{id_code}/{name}'
|
||||
|
||||
|
||||
|
||||
|
||||
class ExerciceRead(ExerciceBase):
|
||||
class ExerciceReadBase(ExerciceBase):
|
||||
id_code: str
|
||||
author: Author
|
||||
original: ExerciceOrigin | None
|
||||
@ -158,14 +156,23 @@ class ExerciceRead(ExerciceBase):
|
||||
examples: Example = None
|
||||
|
||||
is_author: bool = None
|
||||
supports: Supports
|
||||
|
||||
|
||||
|
||||
|
||||
@validator('exo_source')
|
||||
def get_exo_source_name(cls, value, values):
|
||||
if value is not None:
|
||||
return get_filename_from_path(value)
|
||||
return value
|
||||
|
||||
class ExerciceRead(ExerciceBase):
|
||||
id: int
|
||||
author_id:int
|
||||
author: User
|
||||
original: Optional[Exercice]
|
||||
tags: List[Tag]
|
||||
|
||||
class ExerciceReadFull(ExerciceReadBase):
|
||||
supports: Supports
|
||||
|
||||
|
||||
|
||||
|
@ -7,7 +7,7 @@ from database.auth.crud import create_user_db
|
||||
from services.auth import get_current_user_optional, jwt_required
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
from database.auth.models import User, UserRead
|
||||
from database.exercices.models import Exercice, ExerciceRead
|
||||
from database.exercices.models import Exercice, ExerciceReadFull
|
||||
from fastapi_pagination import add_pagination
|
||||
from fastapi.responses import PlainTextResponse
|
||||
from fastapi.exceptions import RequestValidationError, ValidationError
|
||||
@ -161,9 +161,9 @@ def refresh_revoke(Authorize: AuthJWT = Depends()):
|
||||
|
||||
|
||||
class user(UserRead):
|
||||
exercices: List[ExerciceRead] = None
|
||||
exercices: List[ExerciceReadFull] = None
|
||||
|
||||
@app.post('/test', response_model=List[ExerciceRead] )
|
||||
@app.post('/test', response_model=List[ExerciceReadFull] )
|
||||
def test(db:Session= Depends(get_session)):
|
||||
#create_user_db('lilian', get_password_hash('Pomme937342'), db)
|
||||
create_user_db('lilian2', get_password_hash('Pomme937342'), db)
|
||||
|
@ -3,7 +3,7 @@ from typing import List
|
||||
from fastapi import APIRouter, Depends, Path, Query, UploadFile, HTTPException, status
|
||||
from database.auth.models import User
|
||||
from database.db import get_session
|
||||
from database.exercices.models import Exercice, ExerciceCreate, ExerciceEdit, ExerciceRead, ExercicesTagLink, Tag, TagCreate, TagRead
|
||||
from database.exercices.models import Exercice, ExerciceCreate, ExerciceEdit, ExerciceReadFull, ExercicesTagLink, Tag, TagCreate, TagRead, ExerciceRead
|
||||
from services.auth import get_current_user, get_current_user_optional
|
||||
from sqlmodel import Session, select, col
|
||||
from database.exercices.crud import add_tags_db, check_exercice_author, check_private, check_tag_author, create_exo_db, delete_exo_db, get_exo_dependency, clone_exo_db, parse_exo_tags, remove_tag_db, serialize_exo, update_exo_db, get_tags_dependency
|
||||
@ -11,7 +11,8 @@ from services.exoValidation import validate_file, validate_file_optionnal
|
||||
from services.io import add_fast_api_root, get_filename_from_path
|
||||
from fastapi.responses import FileResponse
|
||||
from sqlmodel import func
|
||||
|
||||
from fastapi_pagination import Page, paginate
|
||||
from fastapi_pagination.ext.sqlalchemy_future import paginate as p
|
||||
router = APIRouter(tags=['exercices'])
|
||||
|
||||
class ExoType(str, Enum):
|
||||
@ -30,7 +31,7 @@ def queryFilters_dependency(search: str = "", tags: List[int] | None = Depends(g
|
||||
return search, tags, type
|
||||
|
||||
|
||||
@router.post('/exercices', response_model=ExerciceRead, status_code=status.HTTP_201_CREATED)
|
||||
@router.post('/exercices', response_model=ExerciceReadFull, status_code=status.HTTP_201_CREATED)
|
||||
def create_exo(exercice: ExerciceCreate = Depends(ExerciceCreate.as_form), file: UploadFile = Depends(validate_file), user: User = Depends(get_current_user), db: Session = Depends(get_session)):
|
||||
file_obj = file['file'].file._file
|
||||
file_obj.name = file['file'].filename
|
||||
@ -39,7 +40,7 @@ def create_exo(exercice: ExerciceCreate = Depends(ExerciceCreate.as_form), file:
|
||||
return serialize_exo(exo=exo_obj, user_id=user.id, db=db)
|
||||
|
||||
|
||||
@router.post('/clone/{id_code}', response_model=ExerciceRead)
|
||||
@router.post('/clone/{id_code}', response_model=ExerciceReadFull)
|
||||
def clone_exo(exercice: Exercice | None = Depends(check_private), user: User = Depends(get_current_user), db: Session = Depends(get_session)):
|
||||
if not exercice:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail={
|
||||
@ -51,11 +52,10 @@ def clone_exo(exercice: Exercice | None = Depends(check_private), user: User = D
|
||||
return serialize_exo(exo=exo_obj, user_id=user.id, db=db)
|
||||
|
||||
|
||||
|
||||
@router.get('/exercices/user', response_model=List[ExerciceRead])
|
||||
@router.get('/exercices/user', response_model=Page[ExerciceRead|ExerciceReadFull])
|
||||
def get_user_exercices(user: User = Depends(get_current_user), queryFilters: tuple[str, List[int] | None] = Depends(queryFilters_dependency), db: Session = Depends(get_session)):
|
||||
search, tags, type = queryFilters
|
||||
|
||||
|
||||
if tags is not None and len(tags) != 0:
|
||||
statement = select(Exercice, func.group_concat(
|
||||
ExercicesTagLink.tag_id))
|
||||
@ -76,16 +76,18 @@ def get_user_exercices(user: User = Depends(get_current_user), queryFilters: tup
|
||||
statement = statement.join(ExercicesTagLink).where(
|
||||
col(ExercicesTagLink.tag_id).in_(tags)).group_by(ExercicesTagLink.exercice_id)
|
||||
|
||||
exercices = db.exec(statement).all()
|
||||
#exercices = db.exec(statement).all()
|
||||
page = p(db, statement)
|
||||
exercices = page.items
|
||||
if tags is not None and len(tags) != 0:
|
||||
exercices = filter_exo_by_tags(exercices, tags)
|
||||
|
||||
exercices = [
|
||||
serialize_exo(exo=e, user_id=user.id, db= db) for e in exercices]
|
||||
return exercices
|
||||
page.items = [
|
||||
serialize_exo(exo=e, user_id=user.id, db=db) for e in exercices]
|
||||
|
||||
return page
|
||||
|
||||
|
||||
@router.get('/exercices/public', response_model=List[ExerciceRead])
|
||||
@router.get('/exercices/public', response_model=Page[ExerciceRead|ExerciceReadFull])
|
||||
def get_public_exercices(user: User | None = Depends(get_current_user_optional), queryFilters: tuple[str, List[int] | None] = Depends(queryFilters_dependency), db: Session = Depends(get_session)):
|
||||
search, tags, type = queryFilters
|
||||
|
||||
@ -130,15 +132,15 @@ def get_public_exercices(user: User | None = Depends(get_current_user_optional),
|
||||
if type == ExoType.web:
|
||||
statement = statement.where(Exercice.web == True)
|
||||
exercices = db.exec(statement).all()
|
||||
return [serialize_exo(exo=e, user_id=None, db=db) for e in exercices]
|
||||
return paginate([serialize_exo(exo=e, user_id=None, db=db) for e in exercices])
|
||||
|
||||
|
||||
@router.get('/exercice/{id_code}', response_model=ExerciceRead)
|
||||
@router.get('/exercice/{id_code}', response_model=ExerciceReadFull)
|
||||
def get_exercice(exo: Exercice = Depends(check_private), user: User | None = Depends(get_current_user_optional), db: Session = Depends(get_session)):
|
||||
return serialize_exo(exo=exo, user_id=getattr(user, 'id', None), db=db)
|
||||
|
||||
|
||||
@router.put('/exercice/{id_code}', response_model=ExerciceRead)
|
||||
@router.put('/exercice/{id_code}', response_model=ExerciceReadFull)
|
||||
def update_exo(file: UploadFile = Depends(validate_file_optionnal), exo: Exercice = Depends(check_exercice_author), exercice: ExerciceEdit = Depends(ExerciceEdit.as_form), db: Session = Depends(get_session)):
|
||||
if exo is None:
|
||||
raise HTTPException(
|
||||
@ -167,7 +169,7 @@ def delete_exercice(exercice: Exercice | bool | None = Depends(check_exercice_au
|
||||
return {'detail': 'Exercice supprimé avec succès'}
|
||||
|
||||
|
||||
@router.post('/exercice/{id_code}/tags', response_model=ExerciceRead, tags=['tags'])
|
||||
@router.post('/exercice/{id_code}/tags', response_model=ExerciceReadFull, tags=['tags'])
|
||||
def add_tags(tags: List[TagCreate], exo: Exercice | None = Depends(get_exo_dependency), db: Session = Depends(get_session), user: User = Depends(get_current_user)):
|
||||
if exo is None:
|
||||
raise HTTPException(
|
||||
@ -176,7 +178,7 @@ def add_tags(tags: List[TagCreate], exo: Exercice | None = Depends(get_exo_depen
|
||||
return serialize_exo(exo=exo_obj, user_id=user.id, db=db)
|
||||
|
||||
|
||||
@router.delete('/exercice/{id_code}/tags/{tag_id}', response_model=ExerciceRead, tags=['tags'])
|
||||
@router.delete('/exercice/{id_code}/tags/{tag_id}', response_model=ExerciceReadFull, tags=['tags'])
|
||||
def remove_tag(exo: Exercice | None = Depends(get_exo_dependency), tag: Tag | None | bool = Depends(check_tag_author), db: Session = Depends(get_session)):
|
||||
if exo is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
|
||||
|
@ -337,7 +337,60 @@ def test_get_users_exos(client: TestClient):
|
||||
r = client.get('/exercices/user',
|
||||
headers={'Authorization': 'Bearer ' + token1})
|
||||
print(r.json())
|
||||
assert r.json() == [prv]
|
||||
assert r.json()['page'] == 1
|
||||
assert r.json()['size'] == 50
|
||||
assert r.json()["items"] == [prv]
|
||||
|
||||
|
||||
def test_get_users_exos_page(client: TestClient):
|
||||
token1 = test_register(client, username="lilian")['access']
|
||||
token2 = test_register(client, username="lilian2")['access']
|
||||
|
||||
prv = test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv2 = test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv3= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv4= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv5= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv6= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv7= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv8= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv9= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv10= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv11 = test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv12 = test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
|
||||
|
||||
r = client.get('/exercices/user',
|
||||
headers={'Authorization': 'Bearer ' + token1}, params={"page": 2, "size": 10})
|
||||
print(r.json())
|
||||
assert r.json()['page'] == 2
|
||||
assert r.json()['size'] == 10
|
||||
assert r.json()["items"] == [prv11, prv12]
|
||||
|
||||
def test_get_users_exos_page_up(client: TestClient):
|
||||
token1 = test_register(client, username="lilian")['access']
|
||||
token2 = test_register(client, username="lilian2")['access']
|
||||
|
||||
prv = test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv2 = test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv3= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv4= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv5= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv6= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv7= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv8= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv9= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv10= test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv11 = test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
prv12 = test_create(client, private=True, user={'token': token1, 'username': "lilian"})
|
||||
|
||||
|
||||
r = client.get('/exercices/user',
|
||||
headers={'Authorization': 'Bearer ' + token1}, params={"page": 2, "size": 10})
|
||||
print(r.json())
|
||||
assert r.json()['page'] == 2
|
||||
assert r.json()['size'] == 10
|
||||
assert r.json()["items"] == []
|
||||
|
||||
|
||||
def test_get_user_with_search(client: TestClient):
|
||||
@ -361,7 +414,7 @@ def test_get_user_with_search(client: TestClient):
|
||||
r = client.get('/exercices/user', params={"search": "test"},
|
||||
headers={'Authorization': 'Bearer ' + token1})
|
||||
print(r.json())
|
||||
assert r.json() == [exo1, exo2]
|
||||
assert r.json()['items'] == [exo1, exo2]
|
||||
|
||||
|
||||
def test_get_user_with_search(client: TestClient):
|
||||
@ -385,7 +438,7 @@ def test_get_user_with_search(client: TestClient):
|
||||
r = client.get('/exercices/user', params={"search": "test"},
|
||||
headers={'Authorization': 'Bearer ' + token1})
|
||||
print(r.json())
|
||||
assert r.json() == [exo1, exo2]
|
||||
assert r.json()['items'] == [exo1, exo2]
|
||||
|
||||
|
||||
def test_get_user_with_tags(client: TestClient):
|
||||
@ -415,7 +468,7 @@ def test_get_user_with_tags(client: TestClient):
|
||||
r = client.get('/exercices/user', params={'tags': [*[t['id_code'] for t in tags2], 'notexist']},
|
||||
headers={'Authorization': 'Bearer ' + token1})
|
||||
print(r.json())
|
||||
assert r.json() == [exo2, exo3]
|
||||
assert r.json()['items'] == [exo2, exo3]
|
||||
|
||||
|
||||
def test_get_user_with_tags_and_search(client: TestClient):
|
||||
@ -446,7 +499,7 @@ def test_get_user_with_tags_and_search(client: TestClient):
|
||||
r = client.get('/exercices/user', params={"search": "yes", 'tags': [t['id_code'] for t in tags2]},
|
||||
headers={'Authorization': 'Bearer ' + token1})
|
||||
print(r.json())
|
||||
assert r.json() == [exo3]
|
||||
assert r.json()['items'] == [exo3]
|
||||
|
||||
|
||||
def test_get_public_auth(client: TestClient):
|
||||
@ -463,7 +516,7 @@ def test_get_public_auth(client: TestClient):
|
||||
r = client.get('/exercices/public',
|
||||
headers={'Authorization': 'Bearer ' + token2})
|
||||
print(r.json())
|
||||
assert r.json() == [{**public2, 'is_author': False}]
|
||||
assert r.json()['items'] == [{**public2, 'is_author': False}]
|
||||
|
||||
|
||||
def test_get_public_auth_with_search(client: TestClient):
|
||||
@ -483,7 +536,7 @@ def test_get_public_auth_with_search(client: TestClient):
|
||||
r = client.get('/exercices/public',
|
||||
params={'search': "yes"}, headers={'Authorization': 'Bearer ' + token2})
|
||||
print(r.json())
|
||||
assert r.json() == [{**public2, 'is_author': False}]
|
||||
assert r.json()['items'] == [{**public2, 'is_author': False}]
|
||||
|
||||
|
||||
def test_get_public_no_auth(client: TestClient):
|
||||
@ -499,7 +552,7 @@ def test_get_public_no_auth(client: TestClient):
|
||||
|
||||
r = client.get('/exercices/public')
|
||||
print(r.json())
|
||||
assert r.json() == [{**public1, 'is_author': False},
|
||||
assert r.json()['items'] == [{**public1, 'is_author': False},
|
||||
{**public2, 'is_author': False}]
|
||||
|
||||
|
||||
@ -508,7 +561,7 @@ def test_get_exo_no_auth(client: TestClient):
|
||||
exo = test_add_tags(client, user={'token': token, 'username': "lilian"})
|
||||
|
||||
r = client.get('/exercice/' + exo['id_code'])
|
||||
assert r.json() == {**exo, "tags": [], 'is_author': False}
|
||||
assert r.json()['items'] == {**exo, "tags": [], 'is_author': False}
|
||||
|
||||
|
||||
def test_get_exo_no_auth_private(client: TestClient):
|
||||
|
13
frontend/.eslintignore
Normal file
13
frontend/.eslintignore
Normal file
@ -0,0 +1,13 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
yarn.lock
|
20
frontend/.eslintrc.cjs
Normal file
20
frontend/.eslintrc.cjs
Normal file
@ -0,0 +1,20 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
parser: '@typescript-eslint/parser',
|
||||
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'],
|
||||
plugins: ['svelte3', '@typescript-eslint'],
|
||||
ignorePatterns: ['*.cjs'],
|
||||
overrides: [{ files: ['*.svelte'], processor: 'svelte3/svelte3' }],
|
||||
settings: {
|
||||
'svelte3/typescript': () => require('typescript')
|
||||
},
|
||||
parserOptions: {
|
||||
sourceType: 'module',
|
||||
ecmaVersion: 2020
|
||||
},
|
||||
env: {
|
||||
browser: true,
|
||||
es2017: true,
|
||||
node: true
|
||||
}
|
||||
};
|
10
frontend/.gitignore
vendored
Normal file
10
frontend/.gitignore
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
vite.config.js.timestamp-*
|
||||
vite.config.ts.timestamp-*
|
1
frontend/.npmrc
Normal file
1
frontend/.npmrc
Normal file
@ -0,0 +1 @@
|
||||
engine-strict=true
|
13
frontend/.prettierignore
Normal file
13
frontend/.prettierignore
Normal file
@ -0,0 +1,13 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
yarn.lock
|
9
frontend/.prettierrc
Normal file
9
frontend/.prettierrc
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"useTabs": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100,
|
||||
"plugins": ["prettier-plugin-svelte"],
|
||||
"pluginSearchDirs": ["."],
|
||||
"overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
|
||||
}
|
@ -1,34 +1,38 @@
|
||||
## Usage
|
||||
# create-svelte
|
||||
|
||||
Those templates dependencies are maintained via [pnpm](https://pnpm.io) via `pnpm up -Lri`.
|
||||
Everything you need to build a Svelte project, powered by [`create-svelte`](https://github.com/sveltejs/kit/tree/master/packages/create-svelte).
|
||||
|
||||
This is the reason you see a `pnpm-lock.yaml`. That being said, any package manager will work. This file can be safely be removed once you clone a template.
|
||||
## Creating a project
|
||||
|
||||
If you're seeing this, you've probably already done this step. Congrats!
|
||||
|
||||
```bash
|
||||
$ npm install # or pnpm install or yarn install
|
||||
# create a new project in the current directory
|
||||
npm create svelte@latest
|
||||
|
||||
# create a new project in my-app
|
||||
npm create svelte@latest my-app
|
||||
```
|
||||
|
||||
### Learn more on the [Solid Website](https://solidjs.com) and come chat with us on our [Discord](https://discord.com/invite/solidjs)
|
||||
## Developing
|
||||
|
||||
## Available Scripts
|
||||
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
|
||||
|
||||
In the project directory, you can run:
|
||||
```bash
|
||||
npm run dev
|
||||
|
||||
### `npm dev` or `npm start`
|
||||
# or start the server and open the app in a new browser tab
|
||||
npm run dev -- --open
|
||||
```
|
||||
|
||||
Runs the app in the development mode.<br>
|
||||
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
|
||||
## Building
|
||||
|
||||
The page will reload if you make edits.<br>
|
||||
To create a production version of your app:
|
||||
|
||||
### `npm run build`
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
Builds the app for production to the `dist` folder.<br>
|
||||
It correctly bundles Solid in production mode and optimizes the build for the best performance.
|
||||
You can preview the production build with `npm run preview`.
|
||||
|
||||
The build is minified and the filenames include the hashes.<br>
|
||||
Your app is ready to be deployed!
|
||||
|
||||
## Deployment
|
||||
|
||||
You can deploy the `dist` folder to any static host provider (netlify, surge, now, etc.)
|
||||
> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment.
|
||||
|
@ -1,33 +1,35 @@
|
||||
{
|
||||
"name": "vite-template-solid",
|
||||
"version": "0.0.0",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "vite",
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"serve": "vite preview"
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"sass": "^1.54.3",
|
||||
"typescript": "^4.7.4",
|
||||
"vite": "^3.0.0",
|
||||
"vite-plugin-solid": "^2.3.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@solidjs/meta": "^0.28.0",
|
||||
"@solidjs/router": "^0.4.2",
|
||||
"axios": "^0.27.2",
|
||||
"chroma-js": "^2.4.2",
|
||||
"emotion-solid": "^1.1.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"solid-forms": "^0.4.5",
|
||||
"solid-icons": "^1.0.1",
|
||||
"solid-js": "^1.4.7",
|
||||
"solid-styled-components": "^0.28.4",
|
||||
"solid-styled-jsx": "^0.27.1",
|
||||
"solid-toast": "^0.3.4",
|
||||
"styled-jsx": "^3.4.4"
|
||||
}
|
||||
"name": "frontend",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "vite dev",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview",
|
||||
"test": "playwright test",
|
||||
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
|
||||
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
|
||||
"test:unit": "vitest",
|
||||
"lint": "prettier --plugin-search-dir . --check . && eslint .",
|
||||
"format": "prettier --plugin-search-dir . --write ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.28.1",
|
||||
"@sveltejs/adapter-auto": "^1.0.0",
|
||||
"@sveltejs/kit": "^1.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
||||
"@typescript-eslint/parser": "^5.45.0",
|
||||
"eslint": "^8.28.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-svelte3": "^4.0.0",
|
||||
"prettier": "^2.8.0",
|
||||
"prettier-plugin-svelte": "^2.8.1",
|
||||
"svelte": "^3.54.0",
|
||||
"svelte-check": "^2.9.2",
|
||||
"tslib": "^2.4.1",
|
||||
"typescript": "^4.9.3",
|
||||
"vite": "^4.0.0",
|
||||
"vitest": "^0.25.3"
|
||||
},
|
||||
"type": "module"
|
||||
}
|
||||
|
11
frontend/playwright.config.ts
Normal file
11
frontend/playwright.config.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import type { PlaywrightTestConfig } from '@playwright/test';
|
||||
|
||||
const config: PlaywrightTestConfig = {
|
||||
webServer: {
|
||||
command: 'npm run build && npm run preview',
|
||||
port: 4173
|
||||
},
|
||||
testDir: 'tests'
|
||||
};
|
||||
|
||||
export default config;
|
9
frontend/src/app.d.ts
vendored
Normal file
9
frontend/src/app.d.ts
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
// See https://kit.svelte.dev/docs/types#app
|
||||
// for information about these interfaces
|
||||
// and what to do when importing types
|
||||
declare namespace App {
|
||||
// interface Error {}
|
||||
// interface Locals {}
|
||||
// interface PageData {}
|
||||
// interface Platform {}
|
||||
}
|
12
frontend/src/app.html
Normal file
12
frontend/src/app.html
Normal file
@ -0,0 +1,12 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
|
||||
<meta name="viewport" content="width=device-width" />
|
||||
%sveltekit.head%
|
||||
</head>
|
||||
<body data-sveltekit-preload-data="hover">
|
||||
<div style="display: contents">%sveltekit.body%</div>
|
||||
</body>
|
||||
</html>
|
@ -1,147 +0,0 @@
|
||||
import { useNavigate } from "@solidjs/router";
|
||||
import jwtDecode from "jwt-decode";
|
||||
import { useContext } from "solid-js";
|
||||
import { createSignal } from "solid-js";
|
||||
import { Show } from "solid-js";
|
||||
import { createEffect } from "solid-js";
|
||||
import { createContext } from "solid-js";
|
||||
import { createStore } from "solid-js/store";
|
||||
import toast from "solid-toast";
|
||||
import { setSourceMapRange } from "typescript";
|
||||
import {
|
||||
check_access,
|
||||
get_user,
|
||||
login_request,
|
||||
refresh_request,
|
||||
register_request,
|
||||
revoke_access,
|
||||
revoke_refresh,
|
||||
} from "../requests/auth.requests.js";
|
||||
import { useLoginPopup } from "./loginPopUp.context.jsx";
|
||||
import { AiFillAndroid } from 'solid-icons/ai'
|
||||
import { useNotification } from "./notification.context.jsx";
|
||||
const AuthContext = createContext();
|
||||
|
||||
|
||||
const jwt_expire_check = (token) => {
|
||||
var { exp } = jwtDecode(token);
|
||||
return Date.now() >= exp * 1000;
|
||||
};
|
||||
export function AuthProvider(props) {
|
||||
const [username, setUsername] = createSignal(null);
|
||||
const [isAuthenticated, setAuthenticated] = createSignal(false);
|
||||
const [loading, setLoading] = createSignal(false);
|
||||
const [initialLoading, setInitialLoading] = createSignal(true);
|
||||
|
||||
const navigate = useNavigate();
|
||||
const notification = useNotification()
|
||||
const popup = useLoginPopup();
|
||||
|
||||
createEffect(() => {
|
||||
var token = localStorage.getItem("token");
|
||||
var refresh = localStorage.getItem("refresh_token");
|
||||
if (token != null) {
|
||||
var is_expired = jwt_expire_check(token);
|
||||
if (is_expired && refresh != null) {
|
||||
refresh_request(refresh)
|
||||
.then((r) => {
|
||||
localStorage.setItem("token", r.access_token);
|
||||
return r.access_token;
|
||||
})
|
||||
.then((t) => {
|
||||
setAuthenticated(true);
|
||||
var { sub } = jwtDecode(t);
|
||||
setUsername(sub);
|
||||
}).catch((err) => {
|
||||
if ( !!err.response.status && err.reponse.status == 422) {
|
||||
localStorage.removeItem("refresh_token");
|
||||
localStorage.removeItem("token");
|
||||
}
|
||||
});
|
||||
} else {
|
||||
check_access(token)
|
||||
.then(() => {
|
||||
setAuthenticated(true);
|
||||
var { sub } = jwtDecode(token);
|
||||
setUsername(sub);
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err.reponse.status == 422) {
|
||||
localStorage.removeItem("refresh_token");
|
||||
localStorage.removeItem("token");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
setInitialLoading(false);
|
||||
});
|
||||
|
||||
const login = (username, password) => {
|
||||
var data = new URLSearchParams({ username, password });
|
||||
return login_request(data).then((r) => {
|
||||
localStorage.setItem("token", r.access_token);
|
||||
localStorage.setItem("refresh_token", r.refresh_token);
|
||||
|
||||
var decoded = jwtDecode(r.access_token);
|
||||
setAuthenticated(true);
|
||||
setUsername(decoded.sub);
|
||||
|
||||
if (popup.active == true) {
|
||||
popup.next();
|
||||
} else {
|
||||
navigate("/dashboard");
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const signup = (username, password, password2) => {
|
||||
var data = new URLSearchParams({
|
||||
username: username,
|
||||
password: password,
|
||||
password_confirm: password2,
|
||||
});
|
||||
register_request(data).then((r) => {
|
||||
localStorage.setItem("token", r.access_token);
|
||||
localStorage.setItem("refresh_token", r.refresh_token);
|
||||
|
||||
var decoded = jwtDecode(r.access_token);
|
||||
setAuthenticated(true);
|
||||
setUsername(decoded.sub);
|
||||
|
||||
navigate("/dashboard");
|
||||
});
|
||||
};
|
||||
|
||||
const logout = () => {
|
||||
var token = localStorage.getItem("token");
|
||||
var refresh = localStorage.getItem("refresh_token");
|
||||
revoke_access(token)
|
||||
.then(() => {
|
||||
return revoke_refresh(refresh);
|
||||
})
|
||||
.then(() => {
|
||||
localStorage.removeItem("token");
|
||||
localStorage.removeItem("refresh_token");
|
||||
setUsername(null)
|
||||
setAuthenticated(false)
|
||||
navigate("/login");
|
||||
notification.success('Déconnexion', "Vous n'êtes plus connecté !")
|
||||
});
|
||||
};
|
||||
return (
|
||||
<AuthContext.Provider
|
||||
value={{
|
||||
username: username,
|
||||
isAuthenticated: isAuthenticated,
|
||||
loading: loading,
|
||||
login,
|
||||
signup,
|
||||
logout,
|
||||
}}
|
||||
>
|
||||
<Show when={!initialLoading()}>{props.children}</Show>
|
||||
</AuthContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export const useAuth = () => useContext(AuthContext);
|
7
frontend/src/index.test.ts
Normal file
7
frontend/src/index.test.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
|
||||
describe('sum test', () => {
|
||||
it('adds 1 + 2 to equal 3', () => {
|
||||
expect(1 + 2).toBe(3);
|
||||
});
|
||||
});
|
@ -1,126 +0,0 @@
|
||||
import { authInstance } from "../apis/auth.instance.js"
|
||||
|
||||
export const login_request= (data)=> {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/login",
|
||||
method: "post",
|
||||
data: data,
|
||||
headers: { "Content-Type": "application/x-www-form-urlencoded" },
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
export const register_request= (data)=> {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/register",
|
||||
method: "post",
|
||||
data: data,
|
||||
headers: { "Content-Type": "application/x-www-form-urlencoded" },
|
||||
})
|
||||
.then((r) => r.data).catch((err)=> {throw err});
|
||||
}
|
||||
export const refresh_request= (token)=> {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/refresh",
|
||||
method: "post",
|
||||
headers: { "Authorization": "Bearer " + token },
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
export const revoke_access = (token)=> {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/access-revoke",
|
||||
method: "delete",
|
||||
headers: { "Authorization": "Bearer " + token },
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export const check_access = (token)=> {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/check-access",
|
||||
method: "post",
|
||||
headers: { "Authorization": "Bearer " + token },
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export const revoke_refresh = (token)=> {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/refresh-revoke",
|
||||
method: "delete",
|
||||
headers: { "Authorization": "Bearer " + token },
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export const revoke_all = () => {
|
||||
revoke_access(localStorage.getItem('token'))
|
||||
revoke_refresh(localStorage.getItem('refresh_token'))
|
||||
}
|
||||
|
||||
export const get_user = () => {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/user",
|
||||
method: "get",
|
||||
headers: {
|
||||
...(localStorage.getItem('token') != null && {Authorization: "Bearer " + localStorage.getItem('token')}),
|
||||
},
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
export const update_user = (data) => {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/user",
|
||||
method: "put",
|
||||
data: data,
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
...(localStorage.getItem("token") != null && {
|
||||
Authorization: "Bearer " + localStorage.getItem("token"),
|
||||
}),
|
||||
},
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
|
||||
|
||||
export const update_password = (data) => {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/user/password",
|
||||
method: "put",
|
||||
data: data,
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
...(localStorage.getItem("token") != null && {
|
||||
Authorization: "Bearer " + localStorage.getItem("token"),
|
||||
}),
|
||||
},
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export const delete_user = (data) => {
|
||||
return authInstance
|
||||
.request({
|
||||
url: "/user/",
|
||||
method: "delete",
|
||||
data: data,
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
...(localStorage.getItem("token") != null && {
|
||||
Authorization: "Bearer " + localStorage.getItem("token"),
|
||||
}),
|
||||
},
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
2
frontend/src/routes/+page.svelte
Normal file
2
frontend/src/routes/+page.svelte
Normal file
@ -0,0 +1,2 @@
|
||||
<h1>Welcome to SvelteKit</h1>
|
||||
<p>Visit <a href="https://kit.svelte.dev">kit.svelte.dev</a> to read the documentation</p>
|
BIN
frontend/static/favicon.png
Normal file
BIN
frontend/static/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.5 KiB |
15
frontend/svelte.config.js
Normal file
15
frontend/svelte.config.js
Normal file
@ -0,0 +1,15 @@
|
||||
import adapter from '@sveltejs/adapter-auto';
|
||||
import { vitePreprocess } from '@sveltejs/kit/vite';
|
||||
|
||||
/** @type {import('@sveltejs/kit').Config} */
|
||||
const config = {
|
||||
// Consult https://kit.svelte.dev/docs/integrations#preprocessors
|
||||
// for more information about preprocessors
|
||||
preprocess: vitePreprocess(),
|
||||
|
||||
kit: {
|
||||
adapter: adapter()
|
||||
}
|
||||
};
|
||||
|
||||
export default config;
|
6
frontend/tests/test.ts
Normal file
6
frontend/tests/test.ts
Normal file
@ -0,0 +1,6 @@
|
||||
import { expect, test } from '@playwright/test';
|
||||
|
||||
test('index page has expected h1', async ({ page }) => {
|
||||
await page.goto('/');
|
||||
expect(await page.textContent('h1')).toBe('Welcome to SvelteKit');
|
||||
});
|
@ -1,17 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "node",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true,
|
||||
"jsx": "preserve",
|
||||
"jsxImportSource": "solid-js",
|
||||
"types": ["vite/client"],
|
||||
"noEmit": true,
|
||||
"isolatedModules": true,
|
||||
"plugins": [{ "name": "typescript-plugin-css-modules" }],
|
||||
"noImplicitAny": false
|
||||
}
|
||||
"extends": "./.svelte-kit/tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"allowJs": true,
|
||||
"checkJs": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": true,
|
||||
"strict": true
|
||||
}
|
||||
// Path aliases are handled by https://kit.svelte.dev/docs/configuration#alias
|
||||
//
|
||||
// If you want to overwrite includes/excludes, make sure to copy over the relevant includes/excludes
|
||||
// from the referenced tsconfig.json - TypeScript does not merge them in
|
||||
}
|
||||
|
11
frontend/vite.config.js
Normal file
11
frontend/vite.config.js
Normal file
@ -0,0 +1,11 @@
|
||||
import { sveltekit } from '@sveltejs/kit/vite';
|
||||
|
||||
/** @type {import('vite').UserConfig} */
|
||||
const config = {
|
||||
plugins: [sveltekit()],
|
||||
test: {
|
||||
include: ['src/**/*.{test,spec}.{js,ts}']
|
||||
}
|
||||
};
|
||||
|
||||
export default config;
|
34
frontend_old/README.md
Normal file
34
frontend_old/README.md
Normal file
@ -0,0 +1,34 @@
|
||||
## Usage
|
||||
|
||||
Those templates dependencies are maintained via [pnpm](https://pnpm.io) via `pnpm up -Lri`.
|
||||
|
||||
This is the reason you see a `pnpm-lock.yaml`. That being said, any package manager will work. This file can be safely be removed once you clone a template.
|
||||
|
||||
```bash
|
||||
$ npm install # or pnpm install or yarn install
|
||||
```
|
||||
|
||||
### Learn more on the [Solid Website](https://solidjs.com) and come chat with us on our [Discord](https://discord.com/invite/solidjs)
|
||||
|
||||
## Available Scripts
|
||||
|
||||
In the project directory, you can run:
|
||||
|
||||
### `npm dev` or `npm start`
|
||||
|
||||
Runs the app in the development mode.<br>
|
||||
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
|
||||
|
||||
The page will reload if you make edits.<br>
|
||||
|
||||
### `npm run build`
|
||||
|
||||
Builds the app for production to the `dist` folder.<br>
|
||||
It correctly bundles Solid in production mode and optimizes the build for the best performance.
|
||||
|
||||
The build is minified and the filenames include the hashes.<br>
|
||||
Your app is ready to be deployed!
|
||||
|
||||
## Deployment
|
||||
|
||||
You can deploy the `dist` folder to any static host provider (netlify, surge, now, etc.)
|
17
frontend_old/node_modules/.bin/sass
generated
vendored
Executable file
17
frontend_old/node_modules/.bin/sass
generated
vendored
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -z "$NODE_PATH" ]; then
|
||||
export NODE_PATH="/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
else
|
||||
export NODE_PATH="$NODE_PATH:/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
fi
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../sass/sass.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../sass/sass.js" "$@"
|
||||
fi
|
17
frontend_old/node_modules/.bin/tsc
generated
vendored
Executable file
17
frontend_old/node_modules/.bin/tsc
generated
vendored
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -z "$NODE_PATH" ]; then
|
||||
export NODE_PATH="/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
else
|
||||
export NODE_PATH="$NODE_PATH:/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
fi
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../typescript/bin/tsc" "$@"
|
||||
else
|
||||
exec node "$basedir/../typescript/bin/tsc" "$@"
|
||||
fi
|
17
frontend_old/node_modules/.bin/tsserver
generated
vendored
Executable file
17
frontend_old/node_modules/.bin/tsserver
generated
vendored
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -z "$NODE_PATH" ]; then
|
||||
export NODE_PATH="/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
else
|
||||
export NODE_PATH="$NODE_PATH:/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
fi
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../typescript/bin/tsserver" "$@"
|
||||
else
|
||||
exec node "$basedir/../typescript/bin/tsserver" "$@"
|
||||
fi
|
17
frontend_old/node_modules/.bin/vite
generated
vendored
Executable file
17
frontend_old/node_modules/.bin/vite
generated
vendored
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -z "$NODE_PATH" ]; then
|
||||
export NODE_PATH="/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
else
|
||||
export NODE_PATH="$NODE_PATH:/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
fi
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../vite/bin/vite.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../vite/bin/vite.js" "$@"
|
||||
fi
|
333
frontend_old/node_modules/.modules.yaml
generated
vendored
Normal file
333
frontend_old/node_modules/.modules.yaml
generated
vendored
Normal file
@ -0,0 +1,333 @@
|
||||
hoistPattern:
|
||||
- '*'
|
||||
hoistedDependencies:
|
||||
/@ampproject/remapping/2.2.0:
|
||||
'@ampproject/remapping': private
|
||||
/@babel/code-frame/7.18.6:
|
||||
'@babel/code-frame': private
|
||||
/@babel/compat-data/7.18.8:
|
||||
'@babel/compat-data': private
|
||||
/@babel/core/7.18.6:
|
||||
'@babel/core': private
|
||||
/@babel/generator/7.18.7:
|
||||
'@babel/generator': private
|
||||
/@babel/helper-annotate-as-pure/7.18.6:
|
||||
'@babel/helper-annotate-as-pure': private
|
||||
/@babel/helper-compilation-targets/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/helper-compilation-targets': private
|
||||
/@babel/helper-create-class-features-plugin/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/helper-create-class-features-plugin': private
|
||||
/@babel/helper-environment-visitor/7.18.6:
|
||||
'@babel/helper-environment-visitor': private
|
||||
/@babel/helper-function-name/7.18.6:
|
||||
'@babel/helper-function-name': private
|
||||
/@babel/helper-hoist-variables/7.18.6:
|
||||
'@babel/helper-hoist-variables': private
|
||||
/@babel/helper-member-expression-to-functions/7.18.6:
|
||||
'@babel/helper-member-expression-to-functions': private
|
||||
/@babel/helper-module-imports/7.18.6:
|
||||
'@babel/helper-module-imports': private
|
||||
/@babel/helper-module-transforms/7.18.8:
|
||||
'@babel/helper-module-transforms': private
|
||||
/@babel/helper-optimise-call-expression/7.18.6:
|
||||
'@babel/helper-optimise-call-expression': private
|
||||
/@babel/helper-plugin-utils/7.18.6:
|
||||
'@babel/helper-plugin-utils': private
|
||||
/@babel/helper-replace-supers/7.18.6:
|
||||
'@babel/helper-replace-supers': private
|
||||
/@babel/helper-simple-access/7.18.6:
|
||||
'@babel/helper-simple-access': private
|
||||
/@babel/helper-split-export-declaration/7.18.6:
|
||||
'@babel/helper-split-export-declaration': private
|
||||
/@babel/helper-validator-identifier/7.18.6:
|
||||
'@babel/helper-validator-identifier': private
|
||||
/@babel/helper-validator-option/7.18.6:
|
||||
'@babel/helper-validator-option': private
|
||||
/@babel/helpers/7.18.6:
|
||||
'@babel/helpers': private
|
||||
/@babel/highlight/7.18.6:
|
||||
'@babel/highlight': private
|
||||
/@babel/parser/7.18.8:
|
||||
'@babel/parser': private
|
||||
/@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/plugin-syntax-jsx': private
|
||||
/@babel/plugin-syntax-typescript/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/plugin-syntax-typescript': private
|
||||
/@babel/plugin-transform-typescript/7.18.8_@babel+core@7.18.6:
|
||||
'@babel/plugin-transform-typescript': private
|
||||
/@babel/preset-typescript/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/preset-typescript': private
|
||||
/@babel/template/7.18.6:
|
||||
'@babel/template': private
|
||||
/@babel/traverse/7.18.8:
|
||||
'@babel/traverse': private
|
||||
/@babel/types/7.8.3:
|
||||
'@babel/types': private
|
||||
/@emotion/cache/11.10.3:
|
||||
'@emotion/cache': private
|
||||
/@emotion/hash/0.9.0:
|
||||
'@emotion/hash': private
|
||||
/@emotion/is-prop-valid/1.2.0:
|
||||
'@emotion/is-prop-valid': private
|
||||
/@emotion/memoize/0.8.0:
|
||||
'@emotion/memoize': private
|
||||
/@emotion/serialize/1.1.0:
|
||||
'@emotion/serialize': private
|
||||
/@emotion/sheet/1.2.0:
|
||||
'@emotion/sheet': private
|
||||
/@emotion/unitless/0.8.0:
|
||||
'@emotion/unitless': private
|
||||
/@emotion/utils/1.2.0:
|
||||
'@emotion/utils': private
|
||||
/@emotion/weak-memoize/0.3.0:
|
||||
'@emotion/weak-memoize': private
|
||||
/@jridgewell/gen-mapping/0.1.1:
|
||||
'@jridgewell/gen-mapping': private
|
||||
/@jridgewell/resolve-uri/3.1.0:
|
||||
'@jridgewell/resolve-uri': private
|
||||
/@jridgewell/set-array/1.1.2:
|
||||
'@jridgewell/set-array': private
|
||||
/@jridgewell/sourcemap-codec/1.4.14:
|
||||
'@jridgewell/sourcemap-codec': private
|
||||
/@jridgewell/trace-mapping/0.3.14:
|
||||
'@jridgewell/trace-mapping': private
|
||||
/ansi-styles/3.2.1:
|
||||
ansi-styles: private
|
||||
/anymatch/3.1.2:
|
||||
anymatch: private
|
||||
/asynckit/0.4.0:
|
||||
asynckit: private
|
||||
/babel-plugin-jsx-dom-expressions/0.33.12_@babel+core@7.18.6:
|
||||
babel-plugin-jsx-dom-expressions: private
|
||||
/babel-plugin-syntax-jsx/6.18.0:
|
||||
babel-plugin-syntax-jsx: private
|
||||
/babel-plugin-transform-rename-import/2.3.0:
|
||||
babel-plugin-transform-rename-import: private
|
||||
/babel-preset-solid/1.4.6_@babel+core@7.18.6:
|
||||
babel-preset-solid: private
|
||||
/big.js/5.2.2:
|
||||
big.js: private
|
||||
/binary-extensions/2.2.0:
|
||||
binary-extensions: private
|
||||
/braces/3.0.2:
|
||||
braces: private
|
||||
/browserslist/4.21.2:
|
||||
browserslist: private
|
||||
/caniuse-lite/1.0.30001366:
|
||||
caniuse-lite: private
|
||||
/chalk/2.4.2:
|
||||
chalk: private
|
||||
/chokidar/3.5.3:
|
||||
chokidar: private
|
||||
/color-convert/1.9.3:
|
||||
color-convert: private
|
||||
/color-name/1.1.3:
|
||||
color-name: private
|
||||
/combined-stream/1.0.8:
|
||||
combined-stream: private
|
||||
/convert-source-map/1.7.0:
|
||||
convert-source-map: private
|
||||
/csstype/3.1.0:
|
||||
csstype: private
|
||||
/debug/4.3.4:
|
||||
debug: private
|
||||
/delayed-stream/1.0.0:
|
||||
delayed-stream: private
|
||||
/electron-to-chromium/1.4.189:
|
||||
electron-to-chromium: private
|
||||
/emojis-list/2.1.0:
|
||||
emojis-list: private
|
||||
/esbuild-android-64/0.14.49:
|
||||
esbuild-android-64: private
|
||||
/esbuild-android-arm64/0.14.49:
|
||||
esbuild-android-arm64: private
|
||||
/esbuild-darwin-64/0.14.49:
|
||||
esbuild-darwin-64: private
|
||||
/esbuild-darwin-arm64/0.14.49:
|
||||
esbuild-darwin-arm64: private
|
||||
/esbuild-freebsd-64/0.14.49:
|
||||
esbuild-freebsd-64: private
|
||||
/esbuild-freebsd-arm64/0.14.49:
|
||||
esbuild-freebsd-arm64: private
|
||||
/esbuild-linux-32/0.14.49:
|
||||
esbuild-linux-32: private
|
||||
/esbuild-linux-64/0.14.49:
|
||||
esbuild-linux-64: private
|
||||
/esbuild-linux-arm/0.14.49:
|
||||
esbuild-linux-arm: private
|
||||
/esbuild-linux-arm64/0.14.49:
|
||||
esbuild-linux-arm64: private
|
||||
/esbuild-linux-mips64le/0.14.49:
|
||||
esbuild-linux-mips64le: private
|
||||
/esbuild-linux-ppc64le/0.14.49:
|
||||
esbuild-linux-ppc64le: private
|
||||
/esbuild-linux-riscv64/0.14.49:
|
||||
esbuild-linux-riscv64: private
|
||||
/esbuild-linux-s390x/0.14.49:
|
||||
esbuild-linux-s390x: private
|
||||
/esbuild-netbsd-64/0.14.49:
|
||||
esbuild-netbsd-64: private
|
||||
/esbuild-openbsd-64/0.14.49:
|
||||
esbuild-openbsd-64: private
|
||||
/esbuild-sunos-64/0.14.49:
|
||||
esbuild-sunos-64: private
|
||||
/esbuild-windows-32/0.14.49:
|
||||
esbuild-windows-32: private
|
||||
/esbuild-windows-64/0.14.49:
|
||||
esbuild-windows-64: private
|
||||
/esbuild-windows-arm64/0.14.49:
|
||||
esbuild-windows-arm64: private
|
||||
/esbuild/0.14.49:
|
||||
esbuild: private
|
||||
/escalade/3.1.1:
|
||||
escalade: private
|
||||
/escape-string-regexp/1.0.5:
|
||||
escape-string-regexp: private
|
||||
/esutils/2.0.3:
|
||||
esutils: private
|
||||
/fast-deep-equal/3.1.3:
|
||||
fast-deep-equal: private
|
||||
/fill-range/7.0.1:
|
||||
fill-range: private
|
||||
/follow-redirects/1.15.1:
|
||||
follow-redirects: private
|
||||
/form-data/4.0.0:
|
||||
form-data: private
|
||||
/fsevents/2.3.2:
|
||||
fsevents: private
|
||||
/function-bind/1.1.1:
|
||||
function-bind: private
|
||||
/gensync/1.0.0-beta.2:
|
||||
gensync: private
|
||||
/glob-parent/5.1.2:
|
||||
glob-parent: private
|
||||
/globals/11.12.0:
|
||||
globals: private
|
||||
/goober/2.1.11_csstype@3.1.0:
|
||||
goober: private
|
||||
/has-flag/3.0.0:
|
||||
has-flag: private
|
||||
/has/1.0.3:
|
||||
has: private
|
||||
/html-entities/2.3.2:
|
||||
html-entities: private
|
||||
/immutable/4.1.0:
|
||||
immutable: private
|
||||
/is-binary-path/2.1.0:
|
||||
is-binary-path: private
|
||||
/is-core-module/2.9.0:
|
||||
is-core-module: private
|
||||
/is-extglob/2.1.1:
|
||||
is-extglob: private
|
||||
/is-glob/4.0.3:
|
||||
is-glob: private
|
||||
/is-number/7.0.0:
|
||||
is-number: private
|
||||
/is-what/4.1.7:
|
||||
is-what: private
|
||||
/js-tokens/4.0.0:
|
||||
js-tokens: private
|
||||
/jsesc/2.5.2:
|
||||
jsesc: private
|
||||
/json5/2.2.1:
|
||||
json5: private
|
||||
/loader-utils/1.2.3:
|
||||
loader-utils: private
|
||||
/lodash/4.17.21:
|
||||
lodash: private
|
||||
/merge-anything/5.0.2:
|
||||
merge-anything: private
|
||||
/mime-db/1.52.0:
|
||||
mime-db: private
|
||||
/mime-types/2.1.35:
|
||||
mime-types: private
|
||||
/minimist/1.2.6:
|
||||
minimist: private
|
||||
/ms/2.1.2:
|
||||
ms: private
|
||||
/nanoid/3.3.4:
|
||||
nanoid: private
|
||||
/node-releases/2.0.6:
|
||||
node-releases: private
|
||||
/normalize-path/3.0.0:
|
||||
normalize-path: private
|
||||
/path-parse/1.0.7:
|
||||
path-parse: private
|
||||
/picocolors/1.0.0:
|
||||
picocolors: private
|
||||
/picomatch/2.3.1:
|
||||
picomatch: private
|
||||
/postcss/8.4.14:
|
||||
postcss: private
|
||||
/readdirp/3.6.0:
|
||||
readdirp: private
|
||||
/resolve/1.22.1:
|
||||
resolve: private
|
||||
/rollup/2.76.0:
|
||||
rollup: private
|
||||
/safe-buffer/5.1.2:
|
||||
safe-buffer: private
|
||||
/semver/6.3.0:
|
||||
semver: private
|
||||
/solid-refresh/0.4.1_solid-js@1.4.7:
|
||||
solid-refresh: private
|
||||
/source-map-js/1.0.2:
|
||||
source-map-js: private
|
||||
/source-map/0.7.3:
|
||||
source-map: private
|
||||
/string-hash/1.1.3:
|
||||
string-hash: private
|
||||
/stylis-rule-sheet/0.0.10_stylis@3.5.4:
|
||||
stylis-rule-sheet: private
|
||||
/stylis/3.5.4:
|
||||
stylis: private
|
||||
/supports-color/5.5.0:
|
||||
supports-color: private
|
||||
/supports-preserve-symlinks-flag/1.0.0:
|
||||
supports-preserve-symlinks-flag: private
|
||||
/to-fast-properties/2.0.0:
|
||||
to-fast-properties: private
|
||||
/to-regex-range/5.0.1:
|
||||
to-regex-range: private
|
||||
/ts-toolbelt/9.6.0:
|
||||
ts-toolbelt: private
|
||||
/update-browserslist-db/1.0.4_browserslist@4.21.2:
|
||||
update-browserslist-db: private
|
||||
included:
|
||||
dependencies: true
|
||||
devDependencies: true
|
||||
optionalDependencies: true
|
||||
injectedDeps: {}
|
||||
layoutVersion: 5
|
||||
nodeLinker: isolated
|
||||
packageManager: pnpm@7.13.5
|
||||
pendingBuilds: []
|
||||
prunedAt: Thu, 24 Nov 2022 15:00:15 GMT
|
||||
publicHoistPattern:
|
||||
- '*eslint*'
|
||||
- '*prettier*'
|
||||
registries:
|
||||
default: https://registry.npmjs.org/
|
||||
skipped:
|
||||
- /esbuild-android-64/0.14.49
|
||||
- /esbuild-android-arm64/0.14.49
|
||||
- /esbuild-darwin-64/0.14.49
|
||||
- /esbuild-darwin-arm64/0.14.49
|
||||
- /esbuild-freebsd-64/0.14.49
|
||||
- /esbuild-freebsd-arm64/0.14.49
|
||||
- /esbuild-linux-32/0.14.49
|
||||
- /esbuild-linux-arm/0.14.49
|
||||
- /esbuild-linux-arm64/0.14.49
|
||||
- /esbuild-linux-mips64le/0.14.49
|
||||
- /esbuild-linux-ppc64le/0.14.49
|
||||
- /esbuild-linux-riscv64/0.14.49
|
||||
- /esbuild-linux-s390x/0.14.49
|
||||
- /esbuild-netbsd-64/0.14.49
|
||||
- /esbuild-openbsd-64/0.14.49
|
||||
- /esbuild-sunos-64/0.14.49
|
||||
- /esbuild-windows-32/0.14.49
|
||||
- /esbuild-windows-64/0.14.49
|
||||
- /esbuild-windows-arm64/0.14.49
|
||||
- /fsevents/2.3.2
|
||||
storeDir: /home/lilian/.local/share/pnpm/store/v3
|
||||
virtualStoreDir: .pnpm
|
202
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
202
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2019 Google LLC
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
218
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
218
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
@ -0,0 +1,218 @@
|
||||
# @ampproject/remapping
|
||||
|
||||
> Remap sequential sourcemaps through transformations to point at the original source code
|
||||
|
||||
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
||||
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
||||
with webpack", all pointing to the correct location in your original source code.
|
||||
|
||||
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
||||
they only need to generate an output sourcemap. This greatly simplifies building custom
|
||||
transformations (think a find-and-replace).
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @ampproject/remapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function remapping(
|
||||
map: SourceMap | SourceMap[],
|
||||
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
||||
options?: { excludeContent: boolean, decodedMappings: boolean }
|
||||
): SourceMap;
|
||||
|
||||
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
||||
// "source" location (where child sources are resolved relative to, or the location of original
|
||||
// source), and the ability to override the "content" of an original source for inclusion in the
|
||||
// output sourcemap.
|
||||
type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
}
|
||||
```
|
||||
|
||||
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
||||
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
||||
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
||||
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
||||
|
||||
```js
|
||||
// Babel transformed "helloworld.js" into "transformed.js"
|
||||
const transformedMap = JSON.stringify({
|
||||
file: 'transformed.js',
|
||||
// 1st column of 2nd line of output file translates into the 1st source
|
||||
// file, line 3, column 2
|
||||
mappings: ';CAEE',
|
||||
sources: ['helloworld.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
// Uglify minified "transformed.js" into "transformed.min.js"
|
||||
const minifiedTransformedMap = JSON.stringify({
|
||||
file: 'transformed.min.js',
|
||||
// 0th column of 1st line of output file translates into the 1st source
|
||||
// file, line 2, column 1.
|
||||
mappings: 'AACC',
|
||||
names: [],
|
||||
sources: ['transformed.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
// The "transformed.js" file is an transformed file.
|
||||
if (file === 'transformed.js') {
|
||||
// The root importer is empty.
|
||||
console.assert(ctx.importer === '');
|
||||
// The depth in the sourcemap tree we're currently loading.
|
||||
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
||||
console.assert(ctx.depth === 1);
|
||||
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
// Loader will be called to load transformedMap's source file pointers as well.
|
||||
console.assert(file === 'helloworld.js');
|
||||
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
||||
console.assert(ctx.importer === 'transformed.js');
|
||||
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
||||
console.assert(ctx.depth === 2);
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
In this example, `loader` will be called twice:
|
||||
|
||||
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
||||
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
||||
be traced through it into the source files it represents.
|
||||
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
||||
we return `null`.
|
||||
|
||||
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
||||
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
||||
column of the 2nd line of the file `helloworld.js`".
|
||||
|
||||
### Multiple transformations of a file
|
||||
|
||||
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
||||
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
||||
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
||||
written as:
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
[minifiedTransformedMap, transformedMap],
|
||||
() => null
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
### Advanced control of the loading graph
|
||||
|
||||
#### `source`
|
||||
|
||||
The `source` property can overridden to any value to change the location of the current load. Eg,
|
||||
for an original source file, it allows us to change the location to the original source regardless
|
||||
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
||||
relative resolving location for child sources of the loaded sourcemap.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
||||
// source files are loaded, they will now be relative to `src/`.
|
||||
ctx.source = 'src/transformed.js';
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'src/helloworld.js');
|
||||
// We could futher change the source of this original file, eg, to be inside a nested directory
|
||||
// itself. This will be reflected in the remapped sourcemap.
|
||||
ctx.source = 'src/nested/transformed.js';
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sources: ['src/nested/helloworld.js'],
|
||||
// };
|
||||
```
|
||||
|
||||
|
||||
#### `content`
|
||||
|
||||
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
||||
you to manually provide the source content of the original file regardless of whether the
|
||||
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
||||
the source content.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
||||
// would not include any `sourcesContent` values.
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'helloworld.js');
|
||||
// We can read the file to provide the source content.
|
||||
ctx.content = fs.readFileSync(file, 'utf8');
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sourcesContent: [
|
||||
// 'console.log("Hello world!")',
|
||||
// ],
|
||||
// };
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
#### excludeContent
|
||||
|
||||
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
||||
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
||||
the size out the sourcemap.
|
||||
|
||||
#### decodedMappings
|
||||
|
||||
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
||||
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
||||
encoding into a VLQ string.
|
204
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
204
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
@ -0,0 +1,204 @@
|
||||
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
||||
import { GenMapping, addSegment, setSourceContent, decodedMap, encodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const SOURCELESS_MAPPING = {
|
||||
source: null,
|
||||
column: null,
|
||||
line: null,
|
||||
name: null,
|
||||
content: null,
|
||||
};
|
||||
const EMPTY_SOURCES = [];
|
||||
function Source(map, sources, source, content) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content) {
|
||||
return Source(null, EMPTY_SOURCES, source, content);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
const gen = new GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
let lastSource = null;
|
||||
let lastSourceLine = null;
|
||||
let lastSourceColumn = null;
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
// So we traced a segment down into its original source file. Now push a
|
||||
// new segment pointing to this location.
|
||||
const { column, line, name, content, source } = traced;
|
||||
if (line === lastSourceLine && column === lastSourceColumn && source === lastSource) {
|
||||
continue;
|
||||
}
|
||||
lastSourceLine = line;
|
||||
lastSourceColumn = column;
|
||||
lastSource = source;
|
||||
// Sigh, TypeScript can't figure out source/line/column are either all null, or all non-null...
|
||||
addSegment(gen, i, genCol, source, line, column, name);
|
||||
if (content != null)
|
||||
setSourceContent(gen, source, content);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return { column, line, name, source: source.source, content: source.content };
|
||||
}
|
||||
const segment = traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
return OriginalSource(source, sourceContent);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? decodedMap(map) : encodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
export { remapping as default };
|
||||
//# sourceMappingURL=remapping.mjs.map
|
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
209
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
209
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,209 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
|
||||
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
|
||||
})(this, (function (traceMapping, genMapping) { 'use strict';
|
||||
|
||||
const SOURCELESS_MAPPING = {
|
||||
source: null,
|
||||
column: null,
|
||||
line: null,
|
||||
name: null,
|
||||
content: null,
|
||||
};
|
||||
const EMPTY_SOURCES = [];
|
||||
function Source(map, sources, source, content) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content) {
|
||||
return Source(null, EMPTY_SOURCES, source, content);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
const gen = new genMapping.GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = traceMapping.decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
let lastSource = null;
|
||||
let lastSourceLine = null;
|
||||
let lastSourceColumn = null;
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
// So we traced a segment down into its original source file. Now push a
|
||||
// new segment pointing to this location.
|
||||
const { column, line, name, content, source } = traced;
|
||||
if (line === lastSourceLine && column === lastSourceColumn && source === lastSource) {
|
||||
continue;
|
||||
}
|
||||
lastSourceLine = line;
|
||||
lastSourceColumn = column;
|
||||
lastSource = source;
|
||||
// Sigh, TypeScript can't figure out source/line/column are either all null, or all non-null...
|
||||
genMapping.addSegment(gen, i, genCol, source, line, column, name);
|
||||
if (content != null)
|
||||
genMapping.setSourceContent(gen, source, content);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return { column, line, name, source: source.source, content: source.content };
|
||||
}
|
||||
const segment = traceMapping.traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
return OriginalSource(source, sourceContent);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? genMapping.decodedMap(map) : genMapping.encodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
return remapping;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=remapping.umd.js.map
|
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
14
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import type { MapSource as MapSourceType } from './source-map-tree';
|
||||
import type { SourceMapInput, SourceMapLoader } from './types';
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
|
19
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
19
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
import SourceMap from './source-map';
|
||||
import type { SourceMapInput, SourceMapLoader, Options } from './types';
|
||||
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
|
48
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
48
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
import { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { TraceMap } from '@jridgewell/trace-mapping';
|
||||
export declare type SourceMapSegmentObject = {
|
||||
column: number;
|
||||
line: number;
|
||||
name: string;
|
||||
source: string;
|
||||
content: string | null;
|
||||
} | {
|
||||
column: null;
|
||||
line: null;
|
||||
name: null;
|
||||
source: null;
|
||||
content: null;
|
||||
};
|
||||
export declare type OriginalSource = {
|
||||
map: TraceMap;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: string | null;
|
||||
};
|
||||
export declare type MapSource = {
|
||||
map: TraceMap;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: string | null;
|
||||
};
|
||||
export declare type Sources = OriginalSource | MapSource;
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
export declare function OriginalSource(source: string, content: string | null): OriginalSource;
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
export declare function traceMappings(tree: MapSource): GenMapping;
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
|
17
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
17
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
import type { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
export default class SourceMap {
|
||||
file?: string | null;
|
||||
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
|
||||
sourceRoot?: string;
|
||||
names: string[];
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
constructor(map: GenMapping, options: Options);
|
||||
toString(): string;
|
||||
}
|
14
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
14
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapInput };
|
||||
export declare type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
};
|
||||
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
|
||||
export declare type Options = {
|
||||
excludeContent?: boolean;
|
||||
decodedMappings?: boolean;
|
||||
};
|
63
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/package.json
generated
vendored
Normal file
63
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/package.json
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
{
|
||||
"name": "@ampproject/remapping",
|
||||
"version": "2.2.0",
|
||||
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map",
|
||||
"remap"
|
||||
],
|
||||
"main": "dist/remapping.umd.js",
|
||||
"module": "dist/remapping.mjs",
|
||||
"typings": "dist/types/remapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"author": "Justin Ridgewell <jridgewell@google.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ampproject/remapping.git"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "jest --coverage",
|
||||
"test:watch": "jest --coverage --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.2",
|
||||
"@types/jest": "27.4.1",
|
||||
"@typescript-eslint/eslint-plugin": "5.20.0",
|
||||
"@typescript-eslint/parser": "5.20.0",
|
||||
"eslint": "8.14.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"jest": "27.5.1",
|
||||
"jest-config": "27.5.1",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.6.2",
|
||||
"rollup": "2.70.2",
|
||||
"ts-jest": "27.1.4",
|
||||
"tslib": "2.4.0",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/gen-mapping": "^0.1.0",
|
||||
"@jridgewell/trace-mapping": "^0.3.9"
|
||||
}
|
||||
}
|
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@jridgewell/gen-mapping
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@jridgewell/gen-mapping
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@jridgewell+gen-mapping@0.1.1/node_modules/@jridgewell/gen-mapping
|
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@jridgewell/trace-mapping
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@jridgewell/trace-mapping
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@jridgewell+trace-mapping@0.3.14/node_modules/@jridgewell/trace-mapping
|
22
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/LICENSE
generated
vendored
Normal file
22
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/README.md
generated
vendored
Normal file
19
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/README.md
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# @babel/code-frame
|
||||
|
||||
> Generate errors that contain a code frame that point to source locations.
|
||||
|
||||
See our website [@babel/code-frame](https://babeljs.io/docs/en/babel-code-frame) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/code-frame
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/code-frame --dev
|
||||
```
|
163
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/lib/index.js
generated
vendored
Normal file
163
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/lib/index.js
generated
vendored
Normal file
@ -0,0 +1,163 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.codeFrameColumns = codeFrameColumns;
|
||||
exports.default = _default;
|
||||
|
||||
var _highlight = require("@babel/highlight");
|
||||
|
||||
let deprecationWarningShown = false;
|
||||
|
||||
function getDefs(chalk) {
|
||||
return {
|
||||
gutter: chalk.grey,
|
||||
marker: chalk.red.bold,
|
||||
message: chalk.red.bold
|
||||
};
|
||||
}
|
||||
|
||||
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
|
||||
|
||||
function getMarkerLines(loc, source, opts) {
|
||||
const startLoc = Object.assign({
|
||||
column: 0,
|
||||
line: -1
|
||||
}, loc.start);
|
||||
const endLoc = Object.assign({}, startLoc, loc.end);
|
||||
const {
|
||||
linesAbove = 2,
|
||||
linesBelow = 3
|
||||
} = opts || {};
|
||||
const startLine = startLoc.line;
|
||||
const startColumn = startLoc.column;
|
||||
const endLine = endLoc.line;
|
||||
const endColumn = endLoc.column;
|
||||
let start = Math.max(startLine - (linesAbove + 1), 0);
|
||||
let end = Math.min(source.length, endLine + linesBelow);
|
||||
|
||||
if (startLine === -1) {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
if (endLine === -1) {
|
||||
end = source.length;
|
||||
}
|
||||
|
||||
const lineDiff = endLine - startLine;
|
||||
const markerLines = {};
|
||||
|
||||
if (lineDiff) {
|
||||
for (let i = 0; i <= lineDiff; i++) {
|
||||
const lineNumber = i + startLine;
|
||||
|
||||
if (!startColumn) {
|
||||
markerLines[lineNumber] = true;
|
||||
} else if (i === 0) {
|
||||
const sourceLength = source[lineNumber - 1].length;
|
||||
markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];
|
||||
} else if (i === lineDiff) {
|
||||
markerLines[lineNumber] = [0, endColumn];
|
||||
} else {
|
||||
const sourceLength = source[lineNumber - i].length;
|
||||
markerLines[lineNumber] = [0, sourceLength];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (startColumn === endColumn) {
|
||||
if (startColumn) {
|
||||
markerLines[startLine] = [startColumn, 0];
|
||||
} else {
|
||||
markerLines[startLine] = true;
|
||||
}
|
||||
} else {
|
||||
markerLines[startLine] = [startColumn, endColumn - startColumn];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
};
|
||||
}
|
||||
|
||||
function codeFrameColumns(rawLines, loc, opts = {}) {
|
||||
const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight.shouldHighlight)(opts);
|
||||
const chalk = (0, _highlight.getChalk)(opts);
|
||||
const defs = getDefs(chalk);
|
||||
|
||||
const maybeHighlight = (chalkFn, string) => {
|
||||
return highlighted ? chalkFn(string) : string;
|
||||
};
|
||||
|
||||
const lines = rawLines.split(NEWLINE);
|
||||
const {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
} = getMarkerLines(loc, lines, opts);
|
||||
const hasColumns = loc.start && typeof loc.start.column === "number";
|
||||
const numberMaxWidth = String(end).length;
|
||||
const highlightedLines = highlighted ? (0, _highlight.default)(rawLines, opts) : rawLines;
|
||||
let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => {
|
||||
const number = start + 1 + index;
|
||||
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
|
||||
const gutter = ` ${paddedNumber} |`;
|
||||
const hasMarker = markerLines[number];
|
||||
const lastMarkerLine = !markerLines[number + 1];
|
||||
|
||||
if (hasMarker) {
|
||||
let markerLine = "";
|
||||
|
||||
if (Array.isArray(hasMarker)) {
|
||||
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
|
||||
const numberOfMarkers = hasMarker[1] || 1;
|
||||
markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), " ", markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join("");
|
||||
|
||||
if (lastMarkerLine && opts.message) {
|
||||
markerLine += " " + maybeHighlight(defs.message, opts.message);
|
||||
}
|
||||
}
|
||||
|
||||
return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line.length > 0 ? ` ${line}` : "", markerLine].join("");
|
||||
} else {
|
||||
return ` ${maybeHighlight(defs.gutter, gutter)}${line.length > 0 ? ` ${line}` : ""}`;
|
||||
}
|
||||
}).join("\n");
|
||||
|
||||
if (opts.message && !hasColumns) {
|
||||
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
|
||||
}
|
||||
|
||||
if (highlighted) {
|
||||
return chalk.reset(frame);
|
||||
} else {
|
||||
return frame;
|
||||
}
|
||||
}
|
||||
|
||||
function _default(rawLines, lineNumber, colNumber, opts = {}) {
|
||||
if (!deprecationWarningShown) {
|
||||
deprecationWarningShown = true;
|
||||
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
|
||||
|
||||
if (process.emitWarning) {
|
||||
process.emitWarning(message, "DeprecationWarning");
|
||||
} else {
|
||||
const deprecationError = new Error(message);
|
||||
deprecationError.name = "DeprecationWarning";
|
||||
console.warn(new Error(message));
|
||||
}
|
||||
}
|
||||
|
||||
colNumber = Math.max(colNumber, 0);
|
||||
const location = {
|
||||
start: {
|
||||
column: colNumber,
|
||||
line: lineNumber
|
||||
}
|
||||
};
|
||||
return codeFrameColumns(rawLines, location, opts);
|
||||
}
|
30
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/package.json
generated
vendored
Normal file
30
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/package.json
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "@babel/code-frame",
|
||||
"version": "7.18.6",
|
||||
"description": "Generate errors that contain a code frame that point to source locations.",
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"homepage": "https://babel.dev/docs/en/next/babel-code-frame",
|
||||
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen",
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-code-frame"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"dependencies": {
|
||||
"@babel/highlight": "^7.18.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chalk": "^2.0.0",
|
||||
"chalk": "^2.0.0",
|
||||
"strip-ansi": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"type": "commonjs"
|
||||
}
|
1
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/highlight
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/highlight
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@babel+highlight@7.18.6/node_modules/@babel/highlight
|
22
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/LICENSE
generated
vendored
Normal file
22
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/README.md
generated
vendored
Normal file
19
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/README.md
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# @babel/compat-data
|
||||
|
||||
>
|
||||
|
||||
See our website [@babel/compat-data](https://babeljs.io/docs/en/babel-compat-data) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save @babel/compat-data
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/compat-data
|
||||
```
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/corejs2-built-ins.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/corejs2-built-ins.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/corejs2-built-ins.json");
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/corejs3-shipped-proposals.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/corejs3-shipped-proposals.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/corejs3-shipped-proposals.json");
|
1789
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/corejs2-built-ins.json
generated
vendored
Normal file
1789
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/corejs2-built-ins.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/corejs3-shipped-proposals.json
generated
vendored
Normal file
5
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/corejs3-shipped-proposals.json
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
[
|
||||
"esnext.global-this",
|
||||
"esnext.promise.all-settled",
|
||||
"esnext.string.match-all"
|
||||
]
|
18
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/native-modules.json
generated
vendored
Normal file
18
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/native-modules.json
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"es6.module": {
|
||||
"chrome": "61",
|
||||
"and_chr": "61",
|
||||
"edge": "16",
|
||||
"firefox": "60",
|
||||
"and_ff": "60",
|
||||
"node": "13.2.0",
|
||||
"opera": "48",
|
||||
"op_mob": "48",
|
||||
"safari": "10.1",
|
||||
"ios": "10.3",
|
||||
"samsung": "8.2",
|
||||
"android": "61",
|
||||
"electron": "2.0",
|
||||
"ios_saf": "10.3"
|
||||
}
|
||||
}
|
22
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/overlapping-plugins.json
generated
vendored
Normal file
22
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/overlapping-plugins.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"transform-async-to-generator": [
|
||||
"bugfix/transform-async-arrows-in-class"
|
||||
],
|
||||
"transform-parameters": [
|
||||
"bugfix/transform-edge-default-parameters",
|
||||
"bugfix/transform-safari-id-destructuring-collision-in-function-expression"
|
||||
],
|
||||
"transform-function-name": [
|
||||
"bugfix/transform-edge-function-name"
|
||||
],
|
||||
"transform-block-scoping": [
|
||||
"bugfix/transform-safari-block-shadowing",
|
||||
"bugfix/transform-safari-for-shadowing"
|
||||
],
|
||||
"transform-template-literals": [
|
||||
"bugfix/transform-tagged-template-caching"
|
||||
],
|
||||
"proposal-optional-chaining": [
|
||||
"bugfix/transform-v8-spread-parameters-in-optional-chaining"
|
||||
]
|
||||
}
|
157
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/plugin-bugfixes.json
generated
vendored
Normal file
157
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/plugin-bugfixes.json
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
||||
{
|
||||
"bugfix/transform-async-arrows-in-class": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "11",
|
||||
"node": "7.6",
|
||||
"ios": "11",
|
||||
"samsung": "6",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"bugfix/transform-edge-default-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "18",
|
||||
"firefox": "52",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-edge-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "79",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"bugfix/transform-safari-block-shadowing": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "12",
|
||||
"firefox": "44",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"ie": "11",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-safari-for-shadowing": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "12",
|
||||
"firefox": "4",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"ie": "11",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-safari-id-destructuring-collision-in-function-expression": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "14",
|
||||
"firefox": "2",
|
||||
"node": "6",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-tagged-template-caching": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "13",
|
||||
"node": "4",
|
||||
"ios": "13",
|
||||
"samsung": "3.4",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"bugfix/transform-v8-spread-parameters-in-optional-chaining": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "16.9",
|
||||
"ios": "13.4",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"proposal-optional-chaining": {
|
||||
"chrome": "80",
|
||||
"opera": "67",
|
||||
"edge": "80",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "14",
|
||||
"ios": "13.4",
|
||||
"samsung": "13",
|
||||
"electron": "8.0"
|
||||
},
|
||||
"transform-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-async-to-generator": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "10.1",
|
||||
"node": "7.6",
|
||||
"ios": "10.3",
|
||||
"samsung": "6",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"transform-template-literals": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "13",
|
||||
"firefox": "34",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "3.4",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "14",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-block-scoping": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "14",
|
||||
"firefox": "51",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
}
|
||||
}
|
478
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/plugins.json
generated
vendored
Normal file
478
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/plugins.json
generated
vendored
Normal file
@ -0,0 +1,478 @@
|
||||
{
|
||||
"proposal-class-static-block": {
|
||||
"chrome": "94",
|
||||
"opera": "80",
|
||||
"edge": "94",
|
||||
"firefox": "93",
|
||||
"node": "16.11",
|
||||
"electron": "15.0"
|
||||
},
|
||||
"proposal-private-property-in-object": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "90",
|
||||
"safari": "15",
|
||||
"node": "16.9",
|
||||
"ios": "15",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"proposal-class-properties": {
|
||||
"chrome": "74",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "90",
|
||||
"safari": "14.1",
|
||||
"node": "12",
|
||||
"ios": "15",
|
||||
"samsung": "11",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"proposal-private-methods": {
|
||||
"chrome": "84",
|
||||
"opera": "70",
|
||||
"edge": "84",
|
||||
"firefox": "90",
|
||||
"safari": "15",
|
||||
"node": "14.6",
|
||||
"ios": "15",
|
||||
"samsung": "14",
|
||||
"electron": "10.0"
|
||||
},
|
||||
"proposal-numeric-separator": {
|
||||
"chrome": "75",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "70",
|
||||
"safari": "13",
|
||||
"node": "12.5",
|
||||
"ios": "13",
|
||||
"samsung": "11",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"proposal-logical-assignment-operators": {
|
||||
"chrome": "85",
|
||||
"opera": "71",
|
||||
"edge": "85",
|
||||
"firefox": "79",
|
||||
"safari": "14",
|
||||
"node": "15",
|
||||
"ios": "14",
|
||||
"samsung": "14",
|
||||
"electron": "10.0"
|
||||
},
|
||||
"proposal-nullish-coalescing-operator": {
|
||||
"chrome": "80",
|
||||
"opera": "67",
|
||||
"edge": "80",
|
||||
"firefox": "72",
|
||||
"safari": "13.1",
|
||||
"node": "14",
|
||||
"ios": "13.4",
|
||||
"samsung": "13",
|
||||
"electron": "8.0"
|
||||
},
|
||||
"proposal-optional-chaining": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "16.9",
|
||||
"ios": "13.4",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"proposal-json-strings": {
|
||||
"chrome": "66",
|
||||
"opera": "53",
|
||||
"edge": "79",
|
||||
"firefox": "62",
|
||||
"safari": "12",
|
||||
"node": "10",
|
||||
"ios": "12",
|
||||
"samsung": "9",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-optional-catch-binding": {
|
||||
"chrome": "66",
|
||||
"opera": "53",
|
||||
"edge": "79",
|
||||
"firefox": "58",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "18",
|
||||
"firefox": "53",
|
||||
"node": "6",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"proposal-async-generator-functions": {
|
||||
"chrome": "63",
|
||||
"opera": "50",
|
||||
"edge": "79",
|
||||
"firefox": "57",
|
||||
"safari": "12",
|
||||
"node": "10",
|
||||
"ios": "12",
|
||||
"samsung": "8",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-object-rest-spread": {
|
||||
"chrome": "60",
|
||||
"opera": "47",
|
||||
"edge": "79",
|
||||
"firefox": "55",
|
||||
"safari": "11.1",
|
||||
"node": "8.3",
|
||||
"ios": "11.3",
|
||||
"samsung": "8",
|
||||
"electron": "2.0"
|
||||
},
|
||||
"transform-dotall-regex": {
|
||||
"chrome": "62",
|
||||
"opera": "49",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "8.10",
|
||||
"ios": "11.3",
|
||||
"samsung": "8",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-unicode-property-regex": {
|
||||
"chrome": "64",
|
||||
"opera": "51",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-named-capturing-groups-regex": {
|
||||
"chrome": "64",
|
||||
"opera": "51",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-async-to-generator": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "11",
|
||||
"node": "7.6",
|
||||
"ios": "11",
|
||||
"samsung": "6",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"transform-exponentiation-operator": {
|
||||
"chrome": "52",
|
||||
"opera": "39",
|
||||
"edge": "14",
|
||||
"firefox": "52",
|
||||
"safari": "10.1",
|
||||
"node": "7",
|
||||
"ios": "10.3",
|
||||
"samsung": "6",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "1.3"
|
||||
},
|
||||
"transform-template-literals": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "13",
|
||||
"firefox": "34",
|
||||
"safari": "13",
|
||||
"node": "4",
|
||||
"ios": "13",
|
||||
"samsung": "3.4",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-literals": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "53",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "79",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-arrow-functions": {
|
||||
"chrome": "47",
|
||||
"opera": "34",
|
||||
"edge": "13",
|
||||
"firefox": "43",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-block-scoped-functions": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "12",
|
||||
"firefox": "46",
|
||||
"safari": "10",
|
||||
"node": "4",
|
||||
"ie": "11",
|
||||
"ios": "10",
|
||||
"samsung": "3.4",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-classes": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-object-super": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-shorthand-properties": {
|
||||
"chrome": "43",
|
||||
"opera": "30",
|
||||
"edge": "12",
|
||||
"firefox": "33",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "0.27"
|
||||
},
|
||||
"transform-duplicate-keys": {
|
||||
"chrome": "42",
|
||||
"opera": "29",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "3.4",
|
||||
"electron": "0.25"
|
||||
},
|
||||
"transform-computed-properties": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "7.1",
|
||||
"node": "4",
|
||||
"ios": "8",
|
||||
"samsung": "4",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-for-of": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-sticky-regex": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "13",
|
||||
"firefox": "3",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-unicode-escapes": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "53",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-unicode-regex": {
|
||||
"chrome": "50",
|
||||
"opera": "37",
|
||||
"edge": "13",
|
||||
"firefox": "46",
|
||||
"safari": "12",
|
||||
"node": "6",
|
||||
"ios": "12",
|
||||
"samsung": "5",
|
||||
"electron": "1.1"
|
||||
},
|
||||
"transform-spread": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-destructuring": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-block-scoping": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "14",
|
||||
"firefox": "51",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-typeof-symbol": {
|
||||
"chrome": "38",
|
||||
"opera": "25",
|
||||
"edge": "12",
|
||||
"firefox": "36",
|
||||
"safari": "9",
|
||||
"node": "0.12",
|
||||
"ios": "9",
|
||||
"samsung": "3",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-new-target": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "14",
|
||||
"firefox": "41",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-regenerator": {
|
||||
"chrome": "50",
|
||||
"opera": "37",
|
||||
"edge": "13",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.1"
|
||||
},
|
||||
"transform-member-expression-literals": {
|
||||
"chrome": "7",
|
||||
"opera": "12",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "5.1",
|
||||
"node": "0.4",
|
||||
"ie": "9",
|
||||
"android": "4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-property-literals": {
|
||||
"chrome": "7",
|
||||
"opera": "12",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "5.1",
|
||||
"node": "0.4",
|
||||
"ie": "9",
|
||||
"android": "4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-reserved-words": {
|
||||
"chrome": "13",
|
||||
"opera": "10.50",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "3.1",
|
||||
"node": "0.6",
|
||||
"ie": "9",
|
||||
"android": "4.4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"proposal-export-namespace-from": {
|
||||
"chrome": "72",
|
||||
"and_chr": "72",
|
||||
"edge": "79",
|
||||
"firefox": "80",
|
||||
"and_ff": "80",
|
||||
"node": "13.2",
|
||||
"opera": "60",
|
||||
"op_mob": "51",
|
||||
"samsung": "11.0",
|
||||
"android": "72",
|
||||
"electron": "5.0"
|
||||
}
|
||||
}
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/native-modules.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/native-modules.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/native-modules.json");
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/overlapping-plugins.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/overlapping-plugins.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/overlapping-plugins.json");
|
40
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/package.json
generated
vendored
Normal file
40
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/package.json
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "@babel/compat-data",
|
||||
"version": "7.18.8",
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"license": "MIT",
|
||||
"description": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-compat-data"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"exports": {
|
||||
"./plugins": "./plugins.js",
|
||||
"./native-modules": "./native-modules.js",
|
||||
"./corejs2-built-ins": "./corejs2-built-ins.js",
|
||||
"./corejs3-shipped-proposals": "./corejs3-shipped-proposals.js",
|
||||
"./overlapping-plugins": "./overlapping-plugins.js",
|
||||
"./plugin-bugfixes": "./plugin-bugfixes.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build-data": "./scripts/download-compat-table.sh && node ./scripts/build-data.js && node ./scripts/build-modules-support.js && node ./scripts/build-bugfixes-targets.js"
|
||||
},
|
||||
"keywords": [
|
||||
"babel",
|
||||
"compat-table",
|
||||
"compat-data"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@mdn/browser-compat-data": "^4.0.10",
|
||||
"core-js-compat": "^3.22.1",
|
||||
"electron-to-chromium": "^1.4.113"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"type": "commonjs"
|
||||
}
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/plugin-bugfixes.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/plugin-bugfixes.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/plugin-bugfixes.json");
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/plugins.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/plugins.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/plugins.json");
|
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@ampproject/remapping
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@ampproject/remapping
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping
|
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/code-frame
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/code-frame
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@babel+code-frame@7.18.6/node_modules/@babel/code-frame
|
22
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/LICENSE
generated
vendored
Normal file
22
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/README.md
generated
vendored
Normal file
19
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/README.md
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# @babel/core
|
||||
|
||||
> Babel compiler core.
|
||||
|
||||
See our website [@babel/core](https://babeljs.io/docs/en/babel-core) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/core
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/core --dev
|
||||
```
|
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/cache-contexts.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/cache-contexts.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
0 && 0;
|
327
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/caching.js
generated
vendored
Normal file
327
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/caching.js
generated
vendored
Normal file
@ -0,0 +1,327 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.assertSimpleType = assertSimpleType;
|
||||
exports.makeStrongCache = makeStrongCache;
|
||||
exports.makeStrongCacheSync = makeStrongCacheSync;
|
||||
exports.makeWeakCache = makeWeakCache;
|
||||
exports.makeWeakCacheSync = makeWeakCacheSync;
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _async = require("../gensync-utils/async");
|
||||
|
||||
var _util = require("./util");
|
||||
|
||||
const synchronize = gen => {
|
||||
return _gensync()(gen).sync;
|
||||
};
|
||||
|
||||
function* genTrue() {
|
||||
return true;
|
||||
}
|
||||
|
||||
function makeWeakCache(handler) {
|
||||
return makeCachedFunction(WeakMap, handler);
|
||||
}
|
||||
|
||||
function makeWeakCacheSync(handler) {
|
||||
return synchronize(makeWeakCache(handler));
|
||||
}
|
||||
|
||||
function makeStrongCache(handler) {
|
||||
return makeCachedFunction(Map, handler);
|
||||
}
|
||||
|
||||
function makeStrongCacheSync(handler) {
|
||||
return synchronize(makeStrongCache(handler));
|
||||
}
|
||||
|
||||
function makeCachedFunction(CallCache, handler) {
|
||||
const callCacheSync = new CallCache();
|
||||
const callCacheAsync = new CallCache();
|
||||
const futureCache = new CallCache();
|
||||
return function* cachedFunction(arg, data) {
|
||||
const asyncContext = yield* (0, _async.isAsync)();
|
||||
const callCache = asyncContext ? callCacheAsync : callCacheSync;
|
||||
const cached = yield* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data);
|
||||
if (cached.valid) return cached.value;
|
||||
const cache = new CacheConfigurator(data);
|
||||
const handlerResult = handler(arg, cache);
|
||||
let finishLock;
|
||||
let value;
|
||||
|
||||
if ((0, _util.isIterableIterator)(handlerResult)) {
|
||||
const gen = handlerResult;
|
||||
value = yield* (0, _async.onFirstPause)(gen, () => {
|
||||
finishLock = setupAsyncLocks(cache, futureCache, arg);
|
||||
});
|
||||
} else {
|
||||
value = handlerResult;
|
||||
}
|
||||
|
||||
updateFunctionCache(callCache, cache, arg, value);
|
||||
|
||||
if (finishLock) {
|
||||
futureCache.delete(arg);
|
||||
finishLock.release(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
function* getCachedValue(cache, arg, data) {
|
||||
const cachedValue = cache.get(arg);
|
||||
|
||||
if (cachedValue) {
|
||||
for (const {
|
||||
value,
|
||||
valid
|
||||
} of cachedValue) {
|
||||
if (yield* valid(data)) return {
|
||||
valid: true,
|
||||
value
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
|
||||
function* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data) {
|
||||
const cached = yield* getCachedValue(callCache, arg, data);
|
||||
|
||||
if (cached.valid) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
if (asyncContext) {
|
||||
const cached = yield* getCachedValue(futureCache, arg, data);
|
||||
|
||||
if (cached.valid) {
|
||||
const value = yield* (0, _async.waitFor)(cached.value.promise);
|
||||
return {
|
||||
valid: true,
|
||||
value
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
|
||||
function setupAsyncLocks(config, futureCache, arg) {
|
||||
const finishLock = new Lock();
|
||||
updateFunctionCache(futureCache, config, arg, finishLock);
|
||||
return finishLock;
|
||||
}
|
||||
|
||||
function updateFunctionCache(cache, config, arg, value) {
|
||||
if (!config.configured()) config.forever();
|
||||
let cachedValue = cache.get(arg);
|
||||
config.deactivate();
|
||||
|
||||
switch (config.mode()) {
|
||||
case "forever":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: genTrue
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
|
||||
case "invalidate":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: config.validator()
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
|
||||
case "valid":
|
||||
if (cachedValue) {
|
||||
cachedValue.push({
|
||||
value,
|
||||
valid: config.validator()
|
||||
});
|
||||
} else {
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: config.validator()
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
class CacheConfigurator {
|
||||
constructor(data) {
|
||||
this._active = true;
|
||||
this._never = false;
|
||||
this._forever = false;
|
||||
this._invalidate = false;
|
||||
this._configured = false;
|
||||
this._pairs = [];
|
||||
this._data = void 0;
|
||||
this._data = data;
|
||||
}
|
||||
|
||||
simple() {
|
||||
return makeSimpleConfigurator(this);
|
||||
}
|
||||
|
||||
mode() {
|
||||
if (this._never) return "never";
|
||||
if (this._forever) return "forever";
|
||||
if (this._invalidate) return "invalidate";
|
||||
return "valid";
|
||||
}
|
||||
|
||||
forever() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never) {
|
||||
throw new Error("Caching has already been configured with .never()");
|
||||
}
|
||||
|
||||
this._forever = true;
|
||||
this._configured = true;
|
||||
}
|
||||
|
||||
never() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._forever) {
|
||||
throw new Error("Caching has already been configured with .forever()");
|
||||
}
|
||||
|
||||
this._never = true;
|
||||
this._configured = true;
|
||||
}
|
||||
|
||||
using(handler) {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never || this._forever) {
|
||||
throw new Error("Caching has already been configured with .never or .forever()");
|
||||
}
|
||||
|
||||
this._configured = true;
|
||||
const key = handler(this._data);
|
||||
const fn = (0, _async.maybeAsync)(handler, `You appear to be using an async cache handler, but Babel has been called synchronously`);
|
||||
|
||||
if ((0, _async.isThenable)(key)) {
|
||||
return key.then(key => {
|
||||
this._pairs.push([key, fn]);
|
||||
|
||||
return key;
|
||||
});
|
||||
}
|
||||
|
||||
this._pairs.push([key, fn]);
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
invalidate(handler) {
|
||||
this._invalidate = true;
|
||||
return this.using(handler);
|
||||
}
|
||||
|
||||
validator() {
|
||||
const pairs = this._pairs;
|
||||
return function* (data) {
|
||||
for (const [key, fn] of pairs) {
|
||||
if (key !== (yield* fn(data))) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
}
|
||||
|
||||
deactivate() {
|
||||
this._active = false;
|
||||
}
|
||||
|
||||
configured() {
|
||||
return this._configured;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function makeSimpleConfigurator(cache) {
|
||||
function cacheFn(val) {
|
||||
if (typeof val === "boolean") {
|
||||
if (val) cache.forever();else cache.never();
|
||||
return;
|
||||
}
|
||||
|
||||
return cache.using(() => assertSimpleType(val()));
|
||||
}
|
||||
|
||||
cacheFn.forever = () => cache.forever();
|
||||
|
||||
cacheFn.never = () => cache.never();
|
||||
|
||||
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
|
||||
|
||||
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
|
||||
|
||||
return cacheFn;
|
||||
}
|
||||
|
||||
function assertSimpleType(value) {
|
||||
if ((0, _async.isThenable)(value)) {
|
||||
throw new Error(`You appear to be using an async cache handler, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously handle your caching logic.`);
|
||||
}
|
||||
|
||||
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
|
||||
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
class Lock {
|
||||
constructor() {
|
||||
this.released = false;
|
||||
this.promise = void 0;
|
||||
this._resolve = void 0;
|
||||
this.promise = new Promise(resolve => {
|
||||
this._resolve = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
release(value) {
|
||||
this.released = true;
|
||||
|
||||
this._resolve(value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
0 && 0;
|
566
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
Normal file
566
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
Normal file
@ -0,0 +1,566 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.buildPresetChain = buildPresetChain;
|
||||
exports.buildPresetChainWalker = void 0;
|
||||
exports.buildRootChain = buildRootChain;
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _options = require("./validation/options");
|
||||
|
||||
var _patternToRegex = require("./pattern-to-regex");
|
||||
|
||||
var _printer = require("./printer");
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
var _configDescriptors = require("./config-descriptors");
|
||||
|
||||
const debug = _debug()("babel:config:config-chain");
|
||||
|
||||
function* buildPresetChain(arg, context) {
|
||||
const chain = yield* buildPresetChainWalker(arg, context);
|
||||
if (!chain) return null;
|
||||
return {
|
||||
plugins: dedupDescriptors(chain.plugins),
|
||||
presets: dedupDescriptors(chain.presets),
|
||||
options: chain.options.map(o => normalizeOptions(o)),
|
||||
files: new Set()
|
||||
};
|
||||
}
|
||||
|
||||
const buildPresetChainWalker = makeChainWalker({
|
||||
root: preset => loadPresetDescriptors(preset),
|
||||
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
|
||||
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
|
||||
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName),
|
||||
createLogger: () => () => {}
|
||||
});
|
||||
exports.buildPresetChainWalker = buildPresetChainWalker;
|
||||
const loadPresetDescriptors = (0, _caching.makeWeakCacheSync)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
|
||||
const loadPresetEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
|
||||
function* buildRootChain(opts, context) {
|
||||
let configReport, babelRcReport;
|
||||
const programmaticLogger = new _printer.ConfigPrinter();
|
||||
const programmaticChain = yield* loadProgrammaticChain({
|
||||
options: opts,
|
||||
dirname: context.cwd
|
||||
}, context, undefined, programmaticLogger);
|
||||
if (!programmaticChain) return null;
|
||||
const programmaticReport = yield* programmaticLogger.output();
|
||||
let configFile;
|
||||
|
||||
if (typeof opts.configFile === "string") {
|
||||
configFile = yield* (0, _files.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
|
||||
} else if (opts.configFile !== false) {
|
||||
configFile = yield* (0, _files.findRootConfig)(context.root, context.envName, context.caller);
|
||||
}
|
||||
|
||||
let {
|
||||
babelrc,
|
||||
babelrcRoots
|
||||
} = opts;
|
||||
let babelrcRootsDirectory = context.cwd;
|
||||
const configFileChain = emptyChain();
|
||||
const configFileLogger = new _printer.ConfigPrinter();
|
||||
|
||||
if (configFile) {
|
||||
const validatedFile = validateConfigFile(configFile);
|
||||
const result = yield* loadFileChain(validatedFile, context, undefined, configFileLogger);
|
||||
if (!result) return null;
|
||||
configReport = yield* configFileLogger.output();
|
||||
|
||||
if (babelrc === undefined) {
|
||||
babelrc = validatedFile.options.babelrc;
|
||||
}
|
||||
|
||||
if (babelrcRoots === undefined) {
|
||||
babelrcRootsDirectory = validatedFile.dirname;
|
||||
babelrcRoots = validatedFile.options.babelrcRoots;
|
||||
}
|
||||
|
||||
mergeChain(configFileChain, result);
|
||||
}
|
||||
|
||||
let ignoreFile, babelrcFile;
|
||||
let isIgnored = false;
|
||||
const fileChain = emptyChain();
|
||||
|
||||
if ((babelrc === true || babelrc === undefined) && typeof context.filename === "string") {
|
||||
const pkgData = yield* (0, _files.findPackageData)(context.filename);
|
||||
|
||||
if (pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
|
||||
({
|
||||
ignore: ignoreFile,
|
||||
config: babelrcFile
|
||||
} = yield* (0, _files.findRelativeConfig)(pkgData, context.envName, context.caller));
|
||||
|
||||
if (ignoreFile) {
|
||||
fileChain.files.add(ignoreFile.filepath);
|
||||
}
|
||||
|
||||
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
|
||||
isIgnored = true;
|
||||
}
|
||||
|
||||
if (babelrcFile && !isIgnored) {
|
||||
const validatedFile = validateBabelrcFile(babelrcFile);
|
||||
const babelrcLogger = new _printer.ConfigPrinter();
|
||||
const result = yield* loadFileChain(validatedFile, context, undefined, babelrcLogger);
|
||||
|
||||
if (!result) {
|
||||
isIgnored = true;
|
||||
} else {
|
||||
babelRcReport = yield* babelrcLogger.output();
|
||||
mergeChain(fileChain, result);
|
||||
}
|
||||
}
|
||||
|
||||
if (babelrcFile && isIgnored) {
|
||||
fileChain.files.add(babelrcFile.filepath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (context.showConfig) {
|
||||
console.log(`Babel configs on "${context.filename}" (ascending priority):\n` + [configReport, babelRcReport, programmaticReport].filter(x => !!x).join("\n\n") + "\n-----End Babel configs-----");
|
||||
}
|
||||
|
||||
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
|
||||
return {
|
||||
plugins: isIgnored ? [] : dedupDescriptors(chain.plugins),
|
||||
presets: isIgnored ? [] : dedupDescriptors(chain.presets),
|
||||
options: isIgnored ? [] : chain.options.map(o => normalizeOptions(o)),
|
||||
fileHandling: isIgnored ? "ignored" : "transpile",
|
||||
ignore: ignoreFile || undefined,
|
||||
babelrc: babelrcFile || undefined,
|
||||
config: configFile || undefined,
|
||||
files: chain.files
|
||||
};
|
||||
}
|
||||
|
||||
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
|
||||
if (typeof babelrcRoots === "boolean") return babelrcRoots;
|
||||
const absoluteRoot = context.root;
|
||||
|
||||
if (babelrcRoots === undefined) {
|
||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||
}
|
||||
|
||||
let babelrcPatterns = babelrcRoots;
|
||||
|
||||
if (!Array.isArray(babelrcPatterns)) {
|
||||
babelrcPatterns = [babelrcPatterns];
|
||||
}
|
||||
|
||||
babelrcPatterns = babelrcPatterns.map(pat => {
|
||||
return typeof pat === "string" ? _path().resolve(babelrcRootsDirectory, pat) : pat;
|
||||
});
|
||||
|
||||
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
|
||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||
}
|
||||
|
||||
return babelrcPatterns.some(pat => {
|
||||
if (typeof pat === "string") {
|
||||
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
|
||||
}
|
||||
|
||||
return pkgData.directories.some(directory => {
|
||||
return matchPattern(pat, babelrcRootsDirectory, directory, context);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const validateConfigFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("configfile", file.options)
|
||||
}));
|
||||
const validateBabelrcFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("babelrcfile", file.options)
|
||||
}));
|
||||
const validateExtendFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("extendsfile", file.options)
|
||||
}));
|
||||
const loadProgrammaticChain = makeChainWalker({
|
||||
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
|
||||
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
|
||||
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
|
||||
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName),
|
||||
createLogger: (input, context, baseLogger) => buildProgrammaticLogger(input, context, baseLogger)
|
||||
});
|
||||
const loadFileChainWalker = makeChainWalker({
|
||||
root: file => loadFileDescriptors(file),
|
||||
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
|
||||
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
|
||||
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName),
|
||||
createLogger: (file, context, baseLogger) => buildFileLogger(file.filepath, context, baseLogger)
|
||||
});
|
||||
|
||||
function* loadFileChain(input, context, files, baseLogger) {
|
||||
const chain = yield* loadFileChainWalker(input, context, files, baseLogger);
|
||||
|
||||
if (chain) {
|
||||
chain.files.add(input.filepath);
|
||||
}
|
||||
|
||||
return chain;
|
||||
}
|
||||
|
||||
const loadFileDescriptors = (0, _caching.makeWeakCacheSync)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
|
||||
const loadFileEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadFileOverridesDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
|
||||
function buildFileLogger(filepath, context, baseLogger) {
|
||||
if (!baseLogger) {
|
||||
return () => {};
|
||||
}
|
||||
|
||||
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Config, {
|
||||
filepath
|
||||
});
|
||||
}
|
||||
|
||||
function buildRootDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors) {
|
||||
return descriptors(dirname, options, alias);
|
||||
}
|
||||
|
||||
function buildProgrammaticLogger(_, context, baseLogger) {
|
||||
var _context$caller;
|
||||
|
||||
if (!baseLogger) {
|
||||
return () => {};
|
||||
}
|
||||
|
||||
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Programmatic, {
|
||||
callerName: (_context$caller = context.caller) == null ? void 0 : _context$caller.name
|
||||
});
|
||||
}
|
||||
|
||||
function buildEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, envName) {
|
||||
const opts = options.env && options.env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
|
||||
}
|
||||
|
||||
function buildOverrideDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index) {
|
||||
const opts = options.overrides && options.overrides[index];
|
||||
if (!opts) throw new Error("Assertion failure - missing override");
|
||||
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
|
||||
}
|
||||
|
||||
function buildOverrideEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index, envName) {
|
||||
const override = options.overrides && options.overrides[index];
|
||||
if (!override) throw new Error("Assertion failure - missing override");
|
||||
const opts = override.env && override.env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
|
||||
}
|
||||
|
||||
function makeChainWalker({
|
||||
root,
|
||||
env,
|
||||
overrides,
|
||||
overridesEnv,
|
||||
createLogger
|
||||
}) {
|
||||
return function* (input, context, files = new Set(), baseLogger) {
|
||||
const {
|
||||
dirname
|
||||
} = input;
|
||||
const flattenedConfigs = [];
|
||||
const rootOpts = root(input);
|
||||
|
||||
if (configIsApplicable(rootOpts, dirname, context)) {
|
||||
flattenedConfigs.push({
|
||||
config: rootOpts,
|
||||
envName: undefined,
|
||||
index: undefined
|
||||
});
|
||||
const envOpts = env(input, context.envName);
|
||||
|
||||
if (envOpts && configIsApplicable(envOpts, dirname, context)) {
|
||||
flattenedConfigs.push({
|
||||
config: envOpts,
|
||||
envName: context.envName,
|
||||
index: undefined
|
||||
});
|
||||
}
|
||||
|
||||
(rootOpts.options.overrides || []).forEach((_, index) => {
|
||||
const overrideOps = overrides(input, index);
|
||||
|
||||
if (configIsApplicable(overrideOps, dirname, context)) {
|
||||
flattenedConfigs.push({
|
||||
config: overrideOps,
|
||||
index,
|
||||
envName: undefined
|
||||
});
|
||||
const overrideEnvOpts = overridesEnv(input, index, context.envName);
|
||||
|
||||
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context)) {
|
||||
flattenedConfigs.push({
|
||||
config: overrideEnvOpts,
|
||||
index,
|
||||
envName: context.envName
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (flattenedConfigs.some(({
|
||||
config: {
|
||||
options: {
|
||||
ignore,
|
||||
only
|
||||
}
|
||||
}
|
||||
}) => shouldIgnore(context, ignore, only, dirname))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const chain = emptyChain();
|
||||
const logger = createLogger(input, context, baseLogger);
|
||||
|
||||
for (const {
|
||||
config,
|
||||
index,
|
||||
envName
|
||||
} of flattenedConfigs) {
|
||||
if (!(yield* mergeExtendsChain(chain, config.options, dirname, context, files, baseLogger))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
logger(config, index, envName);
|
||||
yield* mergeChainOpts(chain, config);
|
||||
}
|
||||
|
||||
return chain;
|
||||
};
|
||||
}
|
||||
|
||||
function* mergeExtendsChain(chain, opts, dirname, context, files, baseLogger) {
|
||||
if (opts.extends === undefined) return true;
|
||||
const file = yield* (0, _files.loadConfig)(opts.extends, dirname, context.envName, context.caller);
|
||||
|
||||
if (files.has(file)) {
|
||||
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
|
||||
}
|
||||
|
||||
files.add(file);
|
||||
const fileChain = yield* loadFileChain(validateExtendFile(file), context, files, baseLogger);
|
||||
files.delete(file);
|
||||
if (!fileChain) return false;
|
||||
mergeChain(chain, fileChain);
|
||||
return true;
|
||||
}
|
||||
|
||||
function mergeChain(target, source) {
|
||||
target.options.push(...source.options);
|
||||
target.plugins.push(...source.plugins);
|
||||
target.presets.push(...source.presets);
|
||||
|
||||
for (const file of source.files) {
|
||||
target.files.add(file);
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
function* mergeChainOpts(target, {
|
||||
options,
|
||||
plugins,
|
||||
presets
|
||||
}) {
|
||||
target.options.push(options);
|
||||
target.plugins.push(...(yield* plugins()));
|
||||
target.presets.push(...(yield* presets()));
|
||||
return target;
|
||||
}
|
||||
|
||||
function emptyChain() {
|
||||
return {
|
||||
options: [],
|
||||
presets: [],
|
||||
plugins: [],
|
||||
files: new Set()
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeOptions(opts) {
|
||||
const options = Object.assign({}, opts);
|
||||
delete options.extends;
|
||||
delete options.env;
|
||||
delete options.overrides;
|
||||
delete options.plugins;
|
||||
delete options.presets;
|
||||
delete options.passPerPreset;
|
||||
delete options.ignore;
|
||||
delete options.only;
|
||||
delete options.test;
|
||||
delete options.include;
|
||||
delete options.exclude;
|
||||
|
||||
if (Object.prototype.hasOwnProperty.call(options, "sourceMap")) {
|
||||
options.sourceMaps = options.sourceMap;
|
||||
delete options.sourceMap;
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
function dedupDescriptors(items) {
|
||||
const map = new Map();
|
||||
const descriptors = [];
|
||||
|
||||
for (const item of items) {
|
||||
if (typeof item.value === "function") {
|
||||
const fnKey = item.value;
|
||||
let nameMap = map.get(fnKey);
|
||||
|
||||
if (!nameMap) {
|
||||
nameMap = new Map();
|
||||
map.set(fnKey, nameMap);
|
||||
}
|
||||
|
||||
let desc = nameMap.get(item.name);
|
||||
|
||||
if (!desc) {
|
||||
desc = {
|
||||
value: item
|
||||
};
|
||||
descriptors.push(desc);
|
||||
if (!item.ownPass) nameMap.set(item.name, desc);
|
||||
} else {
|
||||
desc.value = item;
|
||||
}
|
||||
} else {
|
||||
descriptors.push({
|
||||
value: item
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return descriptors.reduce((acc, desc) => {
|
||||
acc.push(desc.value);
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
function configIsApplicable({
|
||||
options
|
||||
}, dirname, context) {
|
||||
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname));
|
||||
}
|
||||
|
||||
function configFieldIsApplicable(context, test, dirname) {
|
||||
const patterns = Array.isArray(test) ? test : [test];
|
||||
return matchesPatterns(context, patterns, dirname);
|
||||
}
|
||||
|
||||
function ignoreListReplacer(_key, value) {
|
||||
if (value instanceof RegExp) {
|
||||
return String(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function shouldIgnore(context, ignore, only, dirname) {
|
||||
if (ignore && matchesPatterns(context, ignore, dirname)) {
|
||||
var _context$filename;
|
||||
|
||||
const message = `No config is applied to "${(_context$filename = context.filename) != null ? _context$filename : "(unknown)"}" because it matches one of \`ignore: ${JSON.stringify(ignore, ignoreListReplacer)}\` from "${dirname}"`;
|
||||
debug(message);
|
||||
|
||||
if (context.showConfig) {
|
||||
console.log(message);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if (only && !matchesPatterns(context, only, dirname)) {
|
||||
var _context$filename2;
|
||||
|
||||
const message = `No config is applied to "${(_context$filename2 = context.filename) != null ? _context$filename2 : "(unknown)"}" because it fails to match one of \`only: ${JSON.stringify(only, ignoreListReplacer)}\` from "${dirname}"`;
|
||||
debug(message);
|
||||
|
||||
if (context.showConfig) {
|
||||
console.log(message);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function matchesPatterns(context, patterns, dirname) {
|
||||
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context));
|
||||
}
|
||||
|
||||
function matchPattern(pattern, dirname, pathToTest, context) {
|
||||
if (typeof pattern === "function") {
|
||||
return !!pattern(pathToTest, {
|
||||
dirname,
|
||||
envName: context.envName,
|
||||
caller: context.caller
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof pathToTest !== "string") {
|
||||
throw new Error(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`);
|
||||
}
|
||||
|
||||
if (typeof pattern === "string") {
|
||||
pattern = (0, _patternToRegex.default)(pattern, dirname);
|
||||
}
|
||||
|
||||
return pattern.test(pathToTest);
|
||||
}
|
||||
|
||||
0 && 0;
|
246
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
246
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
@ -0,0 +1,246 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createCachedDescriptors = createCachedDescriptors;
|
||||
exports.createDescriptor = createDescriptor;
|
||||
exports.createUncachedDescriptors = createUncachedDescriptors;
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
var _resolveTargets = require("./resolve-targets");
|
||||
|
||||
function isEqualDescriptor(a, b) {
|
||||
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && (a.file && a.file.request) === (b.file && b.file.request) && (a.file && a.file.resolved) === (b.file && b.file.resolved);
|
||||
}
|
||||
|
||||
function* handlerOf(value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
function optionsWithResolvedBrowserslistConfigFile(options, dirname) {
|
||||
if (typeof options.browserslistConfigFile === "string") {
|
||||
options.browserslistConfigFile = (0, _resolveTargets.resolveBrowserslistConfigFile)(options.browserslistConfigFile, dirname);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
function createCachedDescriptors(dirname, options, alias) {
|
||||
const {
|
||||
plugins,
|
||||
presets,
|
||||
passPerPreset
|
||||
} = options;
|
||||
return {
|
||||
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
|
||||
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => handlerOf([]),
|
||||
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => handlerOf([])
|
||||
};
|
||||
}
|
||||
|
||||
function createUncachedDescriptors(dirname, options, alias) {
|
||||
let plugins;
|
||||
let presets;
|
||||
return {
|
||||
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
|
||||
|
||||
*plugins() {
|
||||
if (!plugins) {
|
||||
plugins = yield* createPluginDescriptors(options.plugins || [], dirname, alias);
|
||||
}
|
||||
|
||||
return plugins;
|
||||
},
|
||||
|
||||
*presets() {
|
||||
if (!presets) {
|
||||
presets = yield* createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset);
|
||||
}
|
||||
|
||||
return presets;
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPresetDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCacheSync)(alias => (0, _caching.makeStrongCache)(function* (passPerPreset) {
|
||||
const descriptors = yield* createPresetDescriptors(items, dirname, alias, passPerPreset);
|
||||
return descriptors.map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc));
|
||||
}));
|
||||
});
|
||||
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPluginDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCache)(function* (alias) {
|
||||
const descriptors = yield* createPluginDescriptors(items, dirname, alias);
|
||||
return descriptors.map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc));
|
||||
});
|
||||
});
|
||||
const DEFAULT_OPTIONS = {};
|
||||
|
||||
function loadCachedDescriptor(cache, desc) {
|
||||
const {
|
||||
value,
|
||||
options = DEFAULT_OPTIONS
|
||||
} = desc;
|
||||
if (options === false) return desc;
|
||||
let cacheByOptions = cache.get(value);
|
||||
|
||||
if (!cacheByOptions) {
|
||||
cacheByOptions = new WeakMap();
|
||||
cache.set(value, cacheByOptions);
|
||||
}
|
||||
|
||||
let possibilities = cacheByOptions.get(options);
|
||||
|
||||
if (!possibilities) {
|
||||
possibilities = [];
|
||||
cacheByOptions.set(options, possibilities);
|
||||
}
|
||||
|
||||
if (possibilities.indexOf(desc) === -1) {
|
||||
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
|
||||
|
||||
if (matches.length > 0) {
|
||||
return matches[0];
|
||||
}
|
||||
|
||||
possibilities.push(desc);
|
||||
}
|
||||
|
||||
return desc;
|
||||
}
|
||||
|
||||
function* createPresetDescriptors(items, dirname, alias, passPerPreset) {
|
||||
return yield* createDescriptors("preset", items, dirname, alias, passPerPreset);
|
||||
}
|
||||
|
||||
function* createPluginDescriptors(items, dirname, alias) {
|
||||
return yield* createDescriptors("plugin", items, dirname, alias);
|
||||
}
|
||||
|
||||
function* createDescriptors(type, items, dirname, alias, ownPass) {
|
||||
const descriptors = yield* _gensync().all(items.map((item, index) => createDescriptor(item, dirname, {
|
||||
type,
|
||||
alias: `${alias}$${index}`,
|
||||
ownPass: !!ownPass
|
||||
})));
|
||||
assertNoDuplicates(descriptors);
|
||||
return descriptors;
|
||||
}
|
||||
|
||||
function* createDescriptor(pair, dirname, {
|
||||
type,
|
||||
alias,
|
||||
ownPass
|
||||
}) {
|
||||
const desc = (0, _item.getItemDescriptor)(pair);
|
||||
|
||||
if (desc) {
|
||||
return desc;
|
||||
}
|
||||
|
||||
let name;
|
||||
let options;
|
||||
let value = pair;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 3) {
|
||||
[value, options, name] = value;
|
||||
} else {
|
||||
[value, options] = value;
|
||||
}
|
||||
}
|
||||
|
||||
let file = undefined;
|
||||
let filepath = null;
|
||||
|
||||
if (typeof value === "string") {
|
||||
if (typeof type !== "string") {
|
||||
throw new Error("To resolve a string-based item, the type of item must be given");
|
||||
}
|
||||
|
||||
const resolver = type === "plugin" ? _files.loadPlugin : _files.loadPreset;
|
||||
const request = value;
|
||||
({
|
||||
filepath,
|
||||
value
|
||||
} = yield* resolver(value, dirname));
|
||||
file = {
|
||||
request,
|
||||
resolved: filepath
|
||||
};
|
||||
}
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Unexpected falsy value: ${String(value)}`);
|
||||
}
|
||||
|
||||
if (typeof value === "object" && value.__esModule) {
|
||||
if (value.default) {
|
||||
value = value.default;
|
||||
} else {
|
||||
throw new Error("Must export a default export when using ES6 modules.");
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof value !== "object" && typeof value !== "function") {
|
||||
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
|
||||
}
|
||||
|
||||
if (filepath !== null && typeof value === "object" && value) {
|
||||
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
alias: filepath || alias,
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
ownPass,
|
||||
file
|
||||
};
|
||||
}
|
||||
|
||||
function assertNoDuplicates(items) {
|
||||
const map = new Map();
|
||||
|
||||
for (const item of items) {
|
||||
if (typeof item.value !== "function") continue;
|
||||
let nameMap = map.get(item.value);
|
||||
|
||||
if (!nameMap) {
|
||||
nameMap = new Set();
|
||||
map.set(item.value, nameMap);
|
||||
}
|
||||
|
||||
if (nameMap.has(item.name)) {
|
||||
const conflicts = items.filter(i => i.value === item.value);
|
||||
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`, ``, `Duplicates detected are:`, `${JSON.stringify(conflicts, null, 2)}`].join("\n"));
|
||||
}
|
||||
|
||||
nameMap.add(item.name);
|
||||
}
|
||||
}
|
||||
|
||||
0 && 0;
|
360
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
360
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
@ -0,0 +1,360 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ROOT_CONFIG_FILENAMES = void 0;
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.resolveShowConfigPath = resolveShowConfigPath;
|
||||
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _fs() {
|
||||
const data = require("fs");
|
||||
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _json() {
|
||||
const data = require("json5");
|
||||
|
||||
_json = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
var _configApi = require("../helpers/config-api");
|
||||
|
||||
var _utils = require("./utils");
|
||||
|
||||
var _moduleTypes = require("./module-types");
|
||||
|
||||
var _patternToRegex = require("../pattern-to-regex");
|
||||
|
||||
var fs = require("../../gensync-utils/fs");
|
||||
|
||||
function _module() {
|
||||
const data = require("module");
|
||||
|
||||
_module = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const debug = _debug()("babel:config:loading:files:configuration");
|
||||
|
||||
const ROOT_CONFIG_FILENAMES = ["babel.config.js", "babel.config.cjs", "babel.config.mjs", "babel.config.json"];
|
||||
exports.ROOT_CONFIG_FILENAMES = ROOT_CONFIG_FILENAMES;
|
||||
const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs", ".babelrc.mjs", ".babelrc.json"];
|
||||
const BABELIGNORE_FILENAME = ".babelignore";
|
||||
|
||||
function findConfigUpwards(rootDir) {
|
||||
let dirname = rootDir;
|
||||
|
||||
for (;;) {
|
||||
for (const filename of ROOT_CONFIG_FILENAMES) {
|
||||
if (_fs().existsSync(_path().join(dirname, filename))) {
|
||||
return dirname;
|
||||
}
|
||||
}
|
||||
|
||||
const nextDir = _path().dirname(dirname);
|
||||
|
||||
if (dirname === nextDir) break;
|
||||
dirname = nextDir;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function* findRelativeConfig(packageData, envName, caller) {
|
||||
let config = null;
|
||||
let ignore = null;
|
||||
|
||||
const dirname = _path().dirname(packageData.filepath);
|
||||
|
||||
for (const loc of packageData.directories) {
|
||||
if (!config) {
|
||||
var _packageData$pkg;
|
||||
|
||||
config = yield* loadOneConfig(RELATIVE_CONFIG_FILENAMES, loc, envName, caller, ((_packageData$pkg = packageData.pkg) == null ? void 0 : _packageData$pkg.dirname) === loc ? packageToBabelConfig(packageData.pkg) : null);
|
||||
}
|
||||
|
||||
if (!ignore) {
|
||||
const ignoreLoc = _path().join(loc, BABELIGNORE_FILENAME);
|
||||
|
||||
ignore = yield* readIgnoreConfig(ignoreLoc);
|
||||
|
||||
if (ignore) {
|
||||
debug("Found ignore %o from %o.", ignore.filepath, dirname);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
config,
|
||||
ignore
|
||||
};
|
||||
}
|
||||
|
||||
function findRootConfig(dirname, envName, caller) {
|
||||
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
|
||||
}
|
||||
|
||||
function* loadOneConfig(names, dirname, envName, caller, previousConfig = null) {
|
||||
const configs = yield* _gensync().all(names.map(filename => readConfig(_path().join(dirname, filename), envName, caller)));
|
||||
const config = configs.reduce((previousConfig, config) => {
|
||||
if (config && previousConfig) {
|
||||
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().basename(previousConfig.filepath)}\n` + ` - ${config.filepath}\n` + `from ${dirname}`);
|
||||
}
|
||||
|
||||
return config || previousConfig;
|
||||
}, previousConfig);
|
||||
|
||||
if (config) {
|
||||
debug("Found configuration %o from %o.", config.filepath, dirname);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
function* loadConfig(name, dirname, envName, caller) {
|
||||
const filepath = (((v, w) => (v = v.split("."), w = w.split("."), +v[0] > +w[0] || v[0] == w[0] && +v[1] >= +w[1]))(process.versions.node, "8.9") ? require.resolve : (r, {
|
||||
paths: [b]
|
||||
}, M = require("module")) => {
|
||||
let f = M._findPath(r, M._nodeModulePaths(b).concat(b));
|
||||
|
||||
if (f) return f;
|
||||
f = new Error(`Cannot resolve module '${r}'`);
|
||||
f.code = "MODULE_NOT_FOUND";
|
||||
throw f;
|
||||
})(name, {
|
||||
paths: [dirname]
|
||||
});
|
||||
const conf = yield* readConfig(filepath, envName, caller);
|
||||
|
||||
if (!conf) {
|
||||
throw new Error(`Config file ${filepath} contains no configuration data`);
|
||||
}
|
||||
|
||||
debug("Loaded config %o from %o.", name, dirname);
|
||||
return conf;
|
||||
}
|
||||
|
||||
function readConfig(filepath, envName, caller) {
|
||||
const ext = _path().extname(filepath);
|
||||
|
||||
return ext === ".js" || ext === ".cjs" || ext === ".mjs" ? readConfigJS(filepath, {
|
||||
envName,
|
||||
caller
|
||||
}) : readConfigJSON5(filepath);
|
||||
}
|
||||
|
||||
const LOADING_CONFIGS = new Set();
|
||||
const readConfigJS = (0, _caching.makeStrongCache)(function* readConfigJS(filepath, cache) {
|
||||
if (!_fs().existsSync(filepath)) {
|
||||
cache.never();
|
||||
return null;
|
||||
}
|
||||
|
||||
if (LOADING_CONFIGS.has(filepath)) {
|
||||
cache.never();
|
||||
debug("Auto-ignoring usage of config %o.", filepath);
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options: {}
|
||||
};
|
||||
}
|
||||
|
||||
let options;
|
||||
|
||||
try {
|
||||
LOADING_CONFIGS.add(filepath);
|
||||
options = yield* (0, _moduleTypes.default)(filepath, "You appear to be using a native ECMAScript module configuration " + "file, which is only supported when running Babel asynchronously.");
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while loading config - ${err.message}`;
|
||||
throw err;
|
||||
} finally {
|
||||
LOADING_CONFIGS.delete(filepath);
|
||||
}
|
||||
|
||||
let assertCache = false;
|
||||
|
||||
if (typeof options === "function") {
|
||||
yield* [];
|
||||
options = options((0, _configApi.makeConfigAPI)(cache));
|
||||
assertCache = true;
|
||||
}
|
||||
|
||||
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Configuration should be an exported JavaScript object.`);
|
||||
}
|
||||
|
||||
if (typeof options.then === "function") {
|
||||
throw new Error(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`);
|
||||
}
|
||||
|
||||
if (assertCache && !cache.configured()) throwConfigError();
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const packageToBabelConfig = (0, _caching.makeWeakCacheSync)(file => {
|
||||
const babel = file.options["babel"];
|
||||
if (typeof babel === "undefined") return null;
|
||||
|
||||
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
|
||||
throw new Error(`${file.filepath}: .babel property must be an object`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: babel
|
||||
};
|
||||
});
|
||||
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
|
||||
try {
|
||||
options = _json().parse(content);
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while parsing config - ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!options) throw new Error(`${filepath}: No config detected`);
|
||||
|
||||
if (typeof options !== "object") {
|
||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Expected config object but found array`);
|
||||
}
|
||||
|
||||
delete options["$schema"];
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
const ignoreDir = _path().dirname(filepath);
|
||||
|
||||
const ignorePatterns = content.split("\n").map(line => line.replace(/#(.*?)$/, "").trim()).filter(line => !!line);
|
||||
|
||||
for (const pattern of ignorePatterns) {
|
||||
if (pattern[0] === "!") {
|
||||
throw new Error(`Negation of file paths is not supported.`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
|
||||
};
|
||||
});
|
||||
|
||||
function* resolveShowConfigPath(dirname) {
|
||||
const targetPath = process.env.BABEL_SHOW_CONFIG_FOR;
|
||||
|
||||
if (targetPath != null) {
|
||||
const absolutePath = _path().resolve(dirname, targetPath);
|
||||
|
||||
const stats = yield* fs.stat(absolutePath);
|
||||
|
||||
if (!stats.isFile()) {
|
||||
throw new Error(`${absolutePath}: BABEL_SHOW_CONFIG_FOR must refer to a regular file, directories are not supported.`);
|
||||
}
|
||||
|
||||
return absolutePath;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function throwConfigError() {
|
||||
throw new Error(`\
|
||||
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
|
||||
for various types of caching, using the first param of their handler functions:
|
||||
|
||||
module.exports = function(api) {
|
||||
// The API exposes the following:
|
||||
|
||||
// Cache the returned value forever and don't call this function again.
|
||||
api.cache(true);
|
||||
|
||||
// Don't cache at all. Not recommended because it will be very slow.
|
||||
api.cache(false);
|
||||
|
||||
// Cached based on the value of some function. If this function returns a value different from
|
||||
// a previously-encountered value, the plugins will re-evaluate.
|
||||
var env = api.cache(() => process.env.NODE_ENV);
|
||||
|
||||
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
|
||||
// any possible NODE_ENV value that might come up during plugin execution.
|
||||
var isProd = api.cache(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// .cache(fn) will perform a linear search though instances to find the matching plugin based
|
||||
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
|
||||
// previous instance whenever something changes, you may use:
|
||||
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// Note, we also expose the following more-verbose versions of the above examples:
|
||||
api.cache.forever(); // api.cache(true)
|
||||
api.cache.never(); // api.cache(false)
|
||||
api.cache.using(fn); // api.cache(fn)
|
||||
|
||||
// Return the value that will be cached.
|
||||
return { };
|
||||
};`);
|
||||
}
|
||||
|
||||
0 && 0;
|
43
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/import-meta-resolve.js
generated
vendored
Normal file
43
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/import-meta-resolve.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = resolve;
|
||||
|
||||
function _module() {
|
||||
const data = require("module");
|
||||
|
||||
_module = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _importMetaResolve = require("../../vendor/import-meta-resolve");
|
||||
|
||||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
||||
|
||||
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
||||
|
||||
let import_;
|
||||
|
||||
try {
|
||||
import_ = require("./import").default;
|
||||
} catch (_unused) {}
|
||||
|
||||
const importMetaResolveP = import_ && process.execArgv.includes("--experimental-import-meta-resolve") ? import_("data:text/javascript,export default import.meta.resolve").then(m => m.default || _importMetaResolve.resolve, () => _importMetaResolve.resolve) : Promise.resolve(_importMetaResolve.resolve);
|
||||
|
||||
function resolve(_x, _x2) {
|
||||
return _resolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _resolve() {
|
||||
_resolve = _asyncToGenerator(function* (specifier, parent) {
|
||||
return (yield importMetaResolveP)(specifier, parent);
|
||||
});
|
||||
return _resolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
0 && 0;
|
12
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/import.js
generated
vendored
Normal file
12
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/import.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = import_;
|
||||
|
||||
function import_(filepath) {
|
||||
return import(filepath);
|
||||
}
|
||||
|
||||
0 && 0;
|
69
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/index-browser.js
generated
vendored
Normal file
69
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/index-browser.js
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ROOT_CONFIG_FILENAMES = void 0;
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findPackageData = findPackageData;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.loadPlugin = loadPlugin;
|
||||
exports.loadPreset = loadPreset;
|
||||
exports.resolvePlugin = resolvePlugin;
|
||||
exports.resolvePreset = resolvePreset;
|
||||
exports.resolveShowConfigPath = resolveShowConfigPath;
|
||||
|
||||
function findConfigUpwards(rootDir) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function* findPackageData(filepath) {
|
||||
return {
|
||||
filepath,
|
||||
directories: [],
|
||||
pkg: null,
|
||||
isPackage: false
|
||||
};
|
||||
}
|
||||
|
||||
function* findRelativeConfig(pkgData, envName, caller) {
|
||||
return {
|
||||
config: null,
|
||||
ignore: null
|
||||
};
|
||||
}
|
||||
|
||||
function* findRootConfig(dirname, envName, caller) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function* loadConfig(name, dirname, envName, caller) {
|
||||
throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
|
||||
function* resolveShowConfigPath(dirname) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const ROOT_CONFIG_FILENAMES = [];
|
||||
exports.ROOT_CONFIG_FILENAMES = ROOT_CONFIG_FILENAMES;
|
||||
|
||||
function resolvePlugin(name, dirname) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolvePreset(name, dirname) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadPlugin(name, dirname) {
|
||||
throw new Error(`Cannot load plugin ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
|
||||
function loadPreset(name, dirname) {
|
||||
throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
|
||||
0 && 0;
|
87
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/index.js
generated
vendored
Normal file
87
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/index.js
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "ROOT_CONFIG_FILENAMES", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.ROOT_CONFIG_FILENAMES;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findConfigUpwards", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findConfigUpwards;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findPackageData", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _package.findPackageData;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findRelativeConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findRelativeConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findRootConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findRootConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.loadConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPlugin", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return plugins.loadPlugin;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPreset", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return plugins.loadPreset;
|
||||
}
|
||||
});
|
||||
exports.resolvePreset = exports.resolvePlugin = void 0;
|
||||
Object.defineProperty(exports, "resolveShowConfigPath", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.resolveShowConfigPath;
|
||||
}
|
||||
});
|
||||
|
||||
var _package = require("./package");
|
||||
|
||||
var _configuration = require("./configuration");
|
||||
|
||||
var plugins = require("./plugins");
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
({});
|
||||
|
||||
const resolvePlugin = _gensync()(plugins.resolvePlugin).sync;
|
||||
|
||||
exports.resolvePlugin = resolvePlugin;
|
||||
|
||||
const resolvePreset = _gensync()(plugins.resolvePreset).sync;
|
||||
|
||||
exports.resolvePreset = resolvePreset;
|
||||
0 && 0;
|
121
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/module-types.js
generated
vendored
Normal file
121
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/module-types.js
generated
vendored
Normal file
@ -0,0 +1,121 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = loadCjsOrMjsDefault;
|
||||
exports.supportsESM = void 0;
|
||||
|
||||
var _async = require("../../gensync-utils/async");
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _url() {
|
||||
const data = require("url");
|
||||
|
||||
_url = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _module() {
|
||||
const data = require("module");
|
||||
|
||||
_module = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _semver() {
|
||||
const data = require("semver");
|
||||
|
||||
_semver = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
||||
|
||||
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
||||
|
||||
let import_;
|
||||
|
||||
try {
|
||||
import_ = require("./import").default;
|
||||
} catch (_unused) {}
|
||||
|
||||
const supportsESM = _semver().satisfies(process.versions.node, "^12.17 || >=13.2");
|
||||
|
||||
exports.supportsESM = supportsESM;
|
||||
|
||||
function* loadCjsOrMjsDefault(filepath, asyncError, fallbackToTranspiledModule = false) {
|
||||
switch (guessJSModuleType(filepath)) {
|
||||
case "cjs":
|
||||
return loadCjsDefault(filepath, fallbackToTranspiledModule);
|
||||
|
||||
case "unknown":
|
||||
try {
|
||||
return loadCjsDefault(filepath, fallbackToTranspiledModule);
|
||||
} catch (e) {
|
||||
if (e.code !== "ERR_REQUIRE_ESM") throw e;
|
||||
}
|
||||
|
||||
case "mjs":
|
||||
if (yield* (0, _async.isAsync)()) {
|
||||
return yield* (0, _async.waitFor)(loadMjsDefault(filepath));
|
||||
}
|
||||
|
||||
throw new Error(asyncError);
|
||||
}
|
||||
}
|
||||
|
||||
function guessJSModuleType(filename) {
|
||||
switch (_path().extname(filename)) {
|
||||
case ".cjs":
|
||||
return "cjs";
|
||||
|
||||
case ".mjs":
|
||||
return "mjs";
|
||||
|
||||
default:
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
function loadCjsDefault(filepath, fallbackToTranspiledModule) {
|
||||
const module = require(filepath);
|
||||
|
||||
return module != null && module.__esModule ? module.default || (fallbackToTranspiledModule ? module : undefined) : module;
|
||||
}
|
||||
|
||||
function loadMjsDefault(_x) {
|
||||
return _loadMjsDefault.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _loadMjsDefault() {
|
||||
_loadMjsDefault = _asyncToGenerator(function* (filepath) {
|
||||
if (!import_) {
|
||||
throw new Error("Internal error: Native ECMAScript modules aren't supported" + " by this platform.\n");
|
||||
}
|
||||
|
||||
const module = yield import_((0, _url().pathToFileURL)(filepath));
|
||||
return module.default;
|
||||
});
|
||||
return _loadMjsDefault.apply(this, arguments);
|
||||
}
|
||||
|
||||
0 && 0;
|
77
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/package.js
generated
vendored
Normal file
77
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/package.js
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.findPackageData = findPackageData;
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _utils = require("./utils");
|
||||
|
||||
const PACKAGE_FILENAME = "package.json";
|
||||
|
||||
function* findPackageData(filepath) {
|
||||
let pkg = null;
|
||||
const directories = [];
|
||||
let isPackage = true;
|
||||
|
||||
let dirname = _path().dirname(filepath);
|
||||
|
||||
while (!pkg && _path().basename(dirname) !== "node_modules") {
|
||||
directories.push(dirname);
|
||||
pkg = yield* readConfigPackage(_path().join(dirname, PACKAGE_FILENAME));
|
||||
|
||||
const nextLoc = _path().dirname(dirname);
|
||||
|
||||
if (dirname === nextLoc) {
|
||||
isPackage = false;
|
||||
break;
|
||||
}
|
||||
|
||||
dirname = nextLoc;
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
directories,
|
||||
pkg,
|
||||
isPackage
|
||||
};
|
||||
}
|
||||
|
||||
const readConfigPackage = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
|
||||
try {
|
||||
options = JSON.parse(content);
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while parsing JSON - ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!options) throw new Error(`${filepath}: No config detected`);
|
||||
|
||||
if (typeof options !== "object") {
|
||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Expected config object but found array`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
0 && 0;
|
275
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
Normal file
275
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
Normal file
@ -0,0 +1,275 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.loadPlugin = loadPlugin;
|
||||
exports.loadPreset = loadPreset;
|
||||
exports.resolvePlugin = resolvePlugin;
|
||||
exports.resolvePreset = resolvePreset;
|
||||
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _async = require("../../gensync-utils/async");
|
||||
|
||||
var _moduleTypes = require("./module-types");
|
||||
|
||||
function _url() {
|
||||
const data = require("url");
|
||||
|
||||
_url = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _importMetaResolve = require("./import-meta-resolve");
|
||||
|
||||
function _module() {
|
||||
const data = require("module");
|
||||
|
||||
_module = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
||||
|
||||
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
||||
|
||||
const debug = _debug()("babel:config:loading:files:plugins");
|
||||
|
||||
const EXACT_RE = /^module:/;
|
||||
const BABEL_PLUGIN_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-plugin-)/;
|
||||
const BABEL_PRESET_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-preset-)/;
|
||||
const BABEL_PLUGIN_ORG_RE = /^(@babel\/)(?!plugin-|[^/]+\/)/;
|
||||
const BABEL_PRESET_ORG_RE = /^(@babel\/)(?!preset-|[^/]+\/)/;
|
||||
const OTHER_PLUGIN_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-plugin(?:-|\/|$)|[^/]+\/)/;
|
||||
const OTHER_PRESET_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-preset(?:-|\/|$)|[^/]+\/)/;
|
||||
const OTHER_ORG_DEFAULT_RE = /^(@(?!babel$)[^/]+)$/;
|
||||
|
||||
function* resolvePlugin(name, dirname) {
|
||||
return yield* resolveStandardizedName("plugin", name, dirname);
|
||||
}
|
||||
|
||||
function* resolvePreset(name, dirname) {
|
||||
return yield* resolveStandardizedName("preset", name, dirname);
|
||||
}
|
||||
|
||||
function* loadPlugin(name, dirname) {
|
||||
const filepath = yield* resolvePlugin(name, dirname);
|
||||
const value = yield* requireModule("plugin", filepath);
|
||||
debug("Loaded plugin %o from %o.", name, dirname);
|
||||
return {
|
||||
filepath,
|
||||
value
|
||||
};
|
||||
}
|
||||
|
||||
function* loadPreset(name, dirname) {
|
||||
const filepath = yield* resolvePreset(name, dirname);
|
||||
const value = yield* requireModule("preset", filepath);
|
||||
debug("Loaded preset %o from %o.", name, dirname);
|
||||
return {
|
||||
filepath,
|
||||
value
|
||||
};
|
||||
}
|
||||
|
||||
function standardizeName(type, name) {
|
||||
if (_path().isAbsolute(name)) return name;
|
||||
const isPreset = type === "preset";
|
||||
return name.replace(isPreset ? BABEL_PRESET_PREFIX_RE : BABEL_PLUGIN_PREFIX_RE, `babel-${type}-`).replace(isPreset ? BABEL_PRESET_ORG_RE : BABEL_PLUGIN_ORG_RE, `$1${type}-`).replace(isPreset ? OTHER_PRESET_ORG_RE : OTHER_PLUGIN_ORG_RE, `$1babel-${type}-`).replace(OTHER_ORG_DEFAULT_RE, `$1/babel-${type}`).replace(EXACT_RE, "");
|
||||
}
|
||||
|
||||
function* resolveAlternativesHelper(type, name) {
|
||||
const standardizedName = standardizeName(type, name);
|
||||
const {
|
||||
error,
|
||||
value
|
||||
} = yield standardizedName;
|
||||
if (!error) return value;
|
||||
if (error.code !== "MODULE_NOT_FOUND") throw error;
|
||||
|
||||
if (standardizedName !== name && !(yield name).error) {
|
||||
error.message += `\n- If you want to resolve "${name}", use "module:${name}"`;
|
||||
}
|
||||
|
||||
if (!(yield standardizeName(type, "@babel/" + name)).error) {
|
||||
error.message += `\n- Did you mean "@babel/${name}"?`;
|
||||
}
|
||||
|
||||
const oppositeType = type === "preset" ? "plugin" : "preset";
|
||||
|
||||
if (!(yield standardizeName(oppositeType, name)).error) {
|
||||
error.message += `\n- Did you accidentally pass a ${oppositeType} as a ${type}?`;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
function tryRequireResolve(id, {
|
||||
paths: [dirname]
|
||||
}) {
|
||||
try {
|
||||
return {
|
||||
error: null,
|
||||
value: (((v, w) => (v = v.split("."), w = w.split("."), +v[0] > +w[0] || v[0] == w[0] && +v[1] >= +w[1]))(process.versions.node, "8.9") ? require.resolve : (r, {
|
||||
paths: [b]
|
||||
}, M = require("module")) => {
|
||||
let f = M._findPath(r, M._nodeModulePaths(b).concat(b));
|
||||
|
||||
if (f) return f;
|
||||
f = new Error(`Cannot resolve module '${r}'`);
|
||||
f.code = "MODULE_NOT_FOUND";
|
||||
throw f;
|
||||
})(id, {
|
||||
paths: [dirname]
|
||||
})
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
error,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function tryImportMetaResolve(_x, _x2) {
|
||||
return _tryImportMetaResolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _tryImportMetaResolve() {
|
||||
_tryImportMetaResolve = _asyncToGenerator(function* (id, options) {
|
||||
try {
|
||||
return {
|
||||
error: null,
|
||||
value: yield (0, _importMetaResolve.default)(id, options)
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
error,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
});
|
||||
return _tryImportMetaResolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
function resolveStandardizedNameForRequire(type, name, dirname) {
|
||||
const it = resolveAlternativesHelper(type, name);
|
||||
let res = it.next();
|
||||
|
||||
while (!res.done) {
|
||||
res = it.next(tryRequireResolve(res.value, {
|
||||
paths: [dirname]
|
||||
}));
|
||||
}
|
||||
|
||||
return res.value;
|
||||
}
|
||||
|
||||
function resolveStandardizedNameForImport(_x3, _x4, _x5) {
|
||||
return _resolveStandardizedNameForImport.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _resolveStandardizedNameForImport() {
|
||||
_resolveStandardizedNameForImport = _asyncToGenerator(function* (type, name, dirname) {
|
||||
const parentUrl = (0, _url().pathToFileURL)(_path().join(dirname, "./babel-virtual-resolve-base.js")).href;
|
||||
const it = resolveAlternativesHelper(type, name);
|
||||
let res = it.next();
|
||||
|
||||
while (!res.done) {
|
||||
res = it.next(yield tryImportMetaResolve(res.value, parentUrl));
|
||||
}
|
||||
|
||||
return (0, _url().fileURLToPath)(res.value);
|
||||
});
|
||||
return _resolveStandardizedNameForImport.apply(this, arguments);
|
||||
}
|
||||
|
||||
const resolveStandardizedName = _gensync()({
|
||||
sync(type, name, dirname = process.cwd()) {
|
||||
return resolveStandardizedNameForRequire(type, name, dirname);
|
||||
},
|
||||
|
||||
async(type, name, dirname = process.cwd()) {
|
||||
return _asyncToGenerator(function* () {
|
||||
if (!_moduleTypes.supportsESM) {
|
||||
return resolveStandardizedNameForRequire(type, name, dirname);
|
||||
}
|
||||
|
||||
try {
|
||||
return yield resolveStandardizedNameForImport(type, name, dirname);
|
||||
} catch (e) {
|
||||
try {
|
||||
return resolveStandardizedNameForRequire(type, name, dirname);
|
||||
} catch (e2) {
|
||||
if (e.type === "MODULE_NOT_FOUND") throw e;
|
||||
if (e2.type === "MODULE_NOT_FOUND") throw e2;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
{
|
||||
var LOADING_MODULES = new Set();
|
||||
}
|
||||
|
||||
function* requireModule(type, name) {
|
||||
{
|
||||
if (!(yield* (0, _async.isAsync)()) && LOADING_MODULES.has(name)) {
|
||||
throw new Error(`Reentrant ${type} detected trying to load "${name}". This module is not ignored ` + "and is trying to load itself while compiling itself, leading to a dependency cycle. " + 'We recommend adding it to your "ignore" list in your babelrc, or to a .babelignore.');
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
{
|
||||
LOADING_MODULES.add(name);
|
||||
}
|
||||
return yield* (0, _moduleTypes.default)(name, `You appear to be using a native ECMAScript module ${type}, ` + "which is only supported when running Babel asynchronously.", true);
|
||||
} catch (err) {
|
||||
err.message = `[BABEL]: ${err.message} (While processing: ${name})`;
|
||||
throw err;
|
||||
} finally {
|
||||
{
|
||||
LOADING_MODULES.delete(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
0 && 0;
|
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/types.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/types.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
0 && 0;
|
46
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
Normal file
46
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.makeStaticFileCache = makeStaticFileCache;
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
var fs = require("../../gensync-utils/fs");
|
||||
|
||||
function _fs2() {
|
||||
const data = require("fs");
|
||||
|
||||
_fs2 = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function makeStaticFileCache(fn) {
|
||||
return (0, _caching.makeStrongCache)(function* (filepath, cache) {
|
||||
const cached = cache.invalidate(() => fileMtime(filepath));
|
||||
|
||||
if (cached === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fn(filepath, yield* fs.readFile(filepath, "utf8"));
|
||||
});
|
||||
}
|
||||
|
||||
function fileMtime(filepath) {
|
||||
if (!_fs2().existsSync(filepath)) return null;
|
||||
|
||||
try {
|
||||
return +_fs2().statSync(filepath).mtime;
|
||||
} catch (e) {
|
||||
if (e.code !== "ENOENT" && e.code !== "ENOTDIR") throw e;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
0 && 0;
|
380
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/full.js
generated
vendored
Normal file
380
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/full.js
generated
vendored
Normal file
@ -0,0 +1,380 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _async = require("../gensync-utils/async");
|
||||
|
||||
var _util = require("./util");
|
||||
|
||||
var context = require("../index");
|
||||
|
||||
var _plugin = require("./plugin");
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
var _configChain = require("./config-chain");
|
||||
|
||||
var _deepArray = require("./helpers/deep-array");
|
||||
|
||||
function _traverse() {
|
||||
const data = require("@babel/traverse");
|
||||
|
||||
_traverse = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
var _options = require("./validation/options");
|
||||
|
||||
var _plugins = require("./validation/plugins");
|
||||
|
||||
var _configApi = require("./helpers/config-api");
|
||||
|
||||
var _partial = require("./partial");
|
||||
|
||||
var Context = require("./cache-contexts");
|
||||
|
||||
var _default = _gensync()(function* loadFullConfig(inputOpts) {
|
||||
var _opts$assumptions;
|
||||
|
||||
const result = yield* (0, _partial.default)(inputOpts);
|
||||
|
||||
if (!result) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {
|
||||
options,
|
||||
context,
|
||||
fileHandling
|
||||
} = result;
|
||||
|
||||
if (fileHandling === "ignored") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const optionDefaults = {};
|
||||
const {
|
||||
plugins,
|
||||
presets
|
||||
} = options;
|
||||
|
||||
if (!plugins || !presets) {
|
||||
throw new Error("Assertion failure - plugins and presets exist");
|
||||
}
|
||||
|
||||
const presetContext = Object.assign({}, context, {
|
||||
targets: options.targets
|
||||
});
|
||||
|
||||
const toDescriptor = item => {
|
||||
const desc = (0, _item.getItemDescriptor)(item);
|
||||
|
||||
if (!desc) {
|
||||
throw new Error("Assertion failure - must be config item");
|
||||
}
|
||||
|
||||
return desc;
|
||||
};
|
||||
|
||||
const presetsDescriptors = presets.map(toDescriptor);
|
||||
const initialPluginsDescriptors = plugins.map(toDescriptor);
|
||||
const pluginDescriptorsByPass = [[]];
|
||||
const passes = [];
|
||||
const externalDependencies = [];
|
||||
const ignored = yield* enhanceError(context, function* recursePresetDescriptors(rawPresets, pluginDescriptorsPass) {
|
||||
const presets = [];
|
||||
|
||||
for (let i = 0; i < rawPresets.length; i++) {
|
||||
const descriptor = rawPresets[i];
|
||||
|
||||
if (descriptor.options !== false) {
|
||||
try {
|
||||
var preset = yield* loadPresetDescriptor(descriptor, presetContext);
|
||||
} catch (e) {
|
||||
if (e.code === "BABEL_UNKNOWN_OPTION") {
|
||||
(0, _options.checkNoUnwrappedItemOptionPairs)(rawPresets, i, "preset", e);
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
externalDependencies.push(preset.externalDependencies);
|
||||
|
||||
if (descriptor.ownPass) {
|
||||
presets.push({
|
||||
preset: preset.chain,
|
||||
pass: []
|
||||
});
|
||||
} else {
|
||||
presets.unshift({
|
||||
preset: preset.chain,
|
||||
pass: pluginDescriptorsPass
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (presets.length > 0) {
|
||||
pluginDescriptorsByPass.splice(1, 0, ...presets.map(o => o.pass).filter(p => p !== pluginDescriptorsPass));
|
||||
|
||||
for (const {
|
||||
preset,
|
||||
pass
|
||||
} of presets) {
|
||||
if (!preset) return true;
|
||||
pass.push(...preset.plugins);
|
||||
const ignored = yield* recursePresetDescriptors(preset.presets, pass);
|
||||
if (ignored) return true;
|
||||
preset.options.forEach(opts => {
|
||||
(0, _util.mergeOptions)(optionDefaults, opts);
|
||||
});
|
||||
}
|
||||
}
|
||||
})(presetsDescriptors, pluginDescriptorsByPass[0]);
|
||||
if (ignored) return null;
|
||||
const opts = optionDefaults;
|
||||
(0, _util.mergeOptions)(opts, options);
|
||||
const pluginContext = Object.assign({}, presetContext, {
|
||||
assumptions: (_opts$assumptions = opts.assumptions) != null ? _opts$assumptions : {}
|
||||
});
|
||||
yield* enhanceError(context, function* loadPluginDescriptors() {
|
||||
pluginDescriptorsByPass[0].unshift(...initialPluginsDescriptors);
|
||||
|
||||
for (const descs of pluginDescriptorsByPass) {
|
||||
const pass = [];
|
||||
passes.push(pass);
|
||||
|
||||
for (let i = 0; i < descs.length; i++) {
|
||||
const descriptor = descs[i];
|
||||
|
||||
if (descriptor.options !== false) {
|
||||
try {
|
||||
var plugin = yield* loadPluginDescriptor(descriptor, pluginContext);
|
||||
} catch (e) {
|
||||
if (e.code === "BABEL_UNKNOWN_PLUGIN_PROPERTY") {
|
||||
(0, _options.checkNoUnwrappedItemOptionPairs)(descs, i, "plugin", e);
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
pass.push(plugin);
|
||||
externalDependencies.push(plugin.externalDependencies);
|
||||
}
|
||||
}
|
||||
}
|
||||
})();
|
||||
opts.plugins = passes[0];
|
||||
opts.presets = passes.slice(1).filter(plugins => plugins.length > 0).map(plugins => ({
|
||||
plugins
|
||||
}));
|
||||
opts.passPerPreset = opts.presets.length > 0;
|
||||
return {
|
||||
options: opts,
|
||||
passes: passes,
|
||||
externalDependencies: (0, _deepArray.finalize)(externalDependencies)
|
||||
};
|
||||
});
|
||||
|
||||
exports.default = _default;
|
||||
|
||||
function enhanceError(context, fn) {
|
||||
return function* (arg1, arg2) {
|
||||
try {
|
||||
return yield* fn(arg1, arg2);
|
||||
} catch (e) {
|
||||
if (!/^\[BABEL\]/.test(e.message)) {
|
||||
e.message = `[BABEL] ${context.filename || "unknown"}: ${e.message}`;
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const makeDescriptorLoader = apiFactory => (0, _caching.makeWeakCache)(function* ({
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
alias
|
||||
}, cache) {
|
||||
if (options === false) throw new Error("Assertion failure");
|
||||
options = options || {};
|
||||
const externalDependencies = [];
|
||||
let item = value;
|
||||
|
||||
if (typeof value === "function") {
|
||||
const factory = (0, _async.maybeAsync)(value, `You appear to be using an async plugin/preset, but Babel has been called synchronously`);
|
||||
const api = Object.assign({}, context, apiFactory(cache, externalDependencies));
|
||||
|
||||
try {
|
||||
item = yield* factory(api, options, dirname);
|
||||
} catch (e) {
|
||||
if (alias) {
|
||||
e.message += ` (While processing: ${JSON.stringify(alias)})`;
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
if (!item || typeof item !== "object") {
|
||||
throw new Error("Plugin/Preset did not return an object.");
|
||||
}
|
||||
|
||||
if ((0, _async.isThenable)(item)) {
|
||||
yield* [];
|
||||
throw new Error(`You appear to be using a promise as a plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version. ` + `As an alternative, you can prefix the promise with "await". ` + `(While processing: ${JSON.stringify(alias)})`);
|
||||
}
|
||||
|
||||
if (externalDependencies.length > 0 && (!cache.configured() || cache.mode() === "forever")) {
|
||||
let error = `A plugin/preset has external untracked dependencies ` + `(${externalDependencies[0]}), but the cache `;
|
||||
|
||||
if (!cache.configured()) {
|
||||
error += `has not been configured to be invalidated when the external dependencies change. `;
|
||||
} else {
|
||||
error += ` has been configured to never be invalidated. `;
|
||||
}
|
||||
|
||||
error += `Plugins/presets should configure their cache to be invalidated when the external ` + `dependencies change, for example using \`api.cache.invalidate(() => ` + `statSync(filepath).mtimeMs)\` or \`api.cache.never()\`\n` + `(While processing: ${JSON.stringify(alias)})`;
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
return {
|
||||
value: item,
|
||||
options,
|
||||
dirname,
|
||||
alias,
|
||||
externalDependencies: (0, _deepArray.finalize)(externalDependencies)
|
||||
};
|
||||
});
|
||||
|
||||
const pluginDescriptorLoader = makeDescriptorLoader(_configApi.makePluginAPI);
|
||||
const presetDescriptorLoader = makeDescriptorLoader(_configApi.makePresetAPI);
|
||||
|
||||
function* loadPluginDescriptor(descriptor, context) {
|
||||
if (descriptor.value instanceof _plugin.default) {
|
||||
if (descriptor.options) {
|
||||
throw new Error("Passed options to an existing Plugin instance will not work.");
|
||||
}
|
||||
|
||||
return descriptor.value;
|
||||
}
|
||||
|
||||
return yield* instantiatePlugin(yield* pluginDescriptorLoader(descriptor, context), context);
|
||||
}
|
||||
|
||||
const instantiatePlugin = (0, _caching.makeWeakCache)(function* ({
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
alias,
|
||||
externalDependencies
|
||||
}, cache) {
|
||||
const pluginObj = (0, _plugins.validatePluginObject)(value);
|
||||
const plugin = Object.assign({}, pluginObj);
|
||||
|
||||
if (plugin.visitor) {
|
||||
plugin.visitor = _traverse().default.explode(Object.assign({}, plugin.visitor));
|
||||
}
|
||||
|
||||
if (plugin.inherits) {
|
||||
const inheritsDescriptor = {
|
||||
name: undefined,
|
||||
alias: `${alias}$inherits`,
|
||||
value: plugin.inherits,
|
||||
options,
|
||||
dirname
|
||||
};
|
||||
const inherits = yield* (0, _async.forwardAsync)(loadPluginDescriptor, run => {
|
||||
return cache.invalidate(data => run(inheritsDescriptor, data));
|
||||
});
|
||||
plugin.pre = chain(inherits.pre, plugin.pre);
|
||||
plugin.post = chain(inherits.post, plugin.post);
|
||||
plugin.manipulateOptions = chain(inherits.manipulateOptions, plugin.manipulateOptions);
|
||||
plugin.visitor = _traverse().default.visitors.merge([inherits.visitor || {}, plugin.visitor || {}]);
|
||||
|
||||
if (inherits.externalDependencies.length > 0) {
|
||||
if (externalDependencies.length === 0) {
|
||||
externalDependencies = inherits.externalDependencies;
|
||||
} else {
|
||||
externalDependencies = (0, _deepArray.finalize)([externalDependencies, inherits.externalDependencies]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new _plugin.default(plugin, options, alias, externalDependencies);
|
||||
});
|
||||
|
||||
const validateIfOptionNeedsFilename = (options, descriptor) => {
|
||||
if (options.test || options.include || options.exclude) {
|
||||
const formattedPresetName = descriptor.name ? `"${descriptor.name}"` : "/* your preset */";
|
||||
throw new Error([`Preset ${formattedPresetName} requires a filename to be set when babel is called directly,`, `\`\`\``, `babel.transformSync(code, { filename: 'file.ts', presets: [${formattedPresetName}] });`, `\`\`\``, `See https://babeljs.io/docs/en/options#filename for more information.`].join("\n"));
|
||||
}
|
||||
};
|
||||
|
||||
const validatePreset = (preset, context, descriptor) => {
|
||||
if (!context.filename) {
|
||||
const {
|
||||
options
|
||||
} = preset;
|
||||
validateIfOptionNeedsFilename(options, descriptor);
|
||||
|
||||
if (options.overrides) {
|
||||
options.overrides.forEach(overrideOptions => validateIfOptionNeedsFilename(overrideOptions, descriptor));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function* loadPresetDescriptor(descriptor, context) {
|
||||
const preset = instantiatePreset(yield* presetDescriptorLoader(descriptor, context));
|
||||
validatePreset(preset, context, descriptor);
|
||||
return {
|
||||
chain: yield* (0, _configChain.buildPresetChain)(preset, context),
|
||||
externalDependencies: preset.externalDependencies
|
||||
};
|
||||
}
|
||||
|
||||
const instantiatePreset = (0, _caching.makeWeakCacheSync)(({
|
||||
value,
|
||||
dirname,
|
||||
alias,
|
||||
externalDependencies
|
||||
}) => {
|
||||
return {
|
||||
options: (0, _options.validate)("preset", value),
|
||||
alias,
|
||||
dirname,
|
||||
externalDependencies
|
||||
};
|
||||
});
|
||||
|
||||
function chain(a, b) {
|
||||
const fns = [a, b].filter(Boolean);
|
||||
if (fns.length <= 1) return fns[0];
|
||||
return function (...args) {
|
||||
for (const fn of fns) {
|
||||
fn.apply(this, args);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
0 && 0;
|
109
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/helpers/config-api.js
generated
vendored
Normal file
109
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/helpers/config-api.js
generated
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.makeConfigAPI = makeConfigAPI;
|
||||
exports.makePluginAPI = makePluginAPI;
|
||||
exports.makePresetAPI = makePresetAPI;
|
||||
|
||||
function _semver() {
|
||||
const data = require("semver");
|
||||
|
||||
_semver = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _ = require("../../");
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
var Context = require("../cache-contexts");
|
||||
|
||||
function makeConfigAPI(cache) {
|
||||
const env = value => cache.using(data => {
|
||||
if (typeof value === "undefined") return data.envName;
|
||||
|
||||
if (typeof value === "function") {
|
||||
return (0, _caching.assertSimpleType)(value(data.envName));
|
||||
}
|
||||
|
||||
return (Array.isArray(value) ? value : [value]).some(entry => {
|
||||
if (typeof entry !== "string") {
|
||||
throw new Error("Unexpected non-string value");
|
||||
}
|
||||
|
||||
return entry === data.envName;
|
||||
});
|
||||
});
|
||||
|
||||
const caller = cb => cache.using(data => (0, _caching.assertSimpleType)(cb(data.caller)));
|
||||
|
||||
return {
|
||||
version: _.version,
|
||||
cache: cache.simple(),
|
||||
env,
|
||||
async: () => false,
|
||||
caller,
|
||||
assertVersion
|
||||
};
|
||||
}
|
||||
|
||||
function makePresetAPI(cache, externalDependencies) {
|
||||
const targets = () => JSON.parse(cache.using(data => JSON.stringify(data.targets)));
|
||||
|
||||
const addExternalDependency = ref => {
|
||||
externalDependencies.push(ref);
|
||||
};
|
||||
|
||||
return Object.assign({}, makeConfigAPI(cache), {
|
||||
targets,
|
||||
addExternalDependency
|
||||
});
|
||||
}
|
||||
|
||||
function makePluginAPI(cache, externalDependencies) {
|
||||
const assumption = name => cache.using(data => data.assumptions[name]);
|
||||
|
||||
return Object.assign({}, makePresetAPI(cache, externalDependencies), {
|
||||
assumption
|
||||
});
|
||||
}
|
||||
|
||||
function assertVersion(range) {
|
||||
if (typeof range === "number") {
|
||||
if (!Number.isInteger(range)) {
|
||||
throw new Error("Expected string or integer value.");
|
||||
}
|
||||
|
||||
range = `^${range}.0.0-0`;
|
||||
}
|
||||
|
||||
if (typeof range !== "string") {
|
||||
throw new Error("Expected string or integer value.");
|
||||
}
|
||||
|
||||
if (_semver().satisfies(_.version, range)) return;
|
||||
const limit = Error.stackTraceLimit;
|
||||
|
||||
if (typeof limit === "number" && limit < 25) {
|
||||
Error.stackTraceLimit = 25;
|
||||
}
|
||||
|
||||
const err = new Error(`Requires Babel "${range}", but was loaded with "${_.version}". ` + `If you are sure you have a compatible version of @babel/core, ` + `it is likely that something in your build process is loading the ` + `wrong version. Inspect the stack trace of this error to look for ` + `the first entry that doesn't mention "@babel/core" or "babel-core" ` + `to see what is calling Babel.`);
|
||||
|
||||
if (typeof limit === "number") {
|
||||
Error.stackTraceLimit = limit;
|
||||
}
|
||||
|
||||
throw Object.assign(err, {
|
||||
code: "BABEL_VERSION_UNSUPPORTED",
|
||||
version: _.version,
|
||||
range
|
||||
});
|
||||
}
|
||||
|
||||
0 && 0;
|
26
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/helpers/deep-array.js
generated
vendored
Normal file
26
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/helpers/deep-array.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.finalize = finalize;
|
||||
exports.flattenToSet = flattenToSet;
|
||||
|
||||
function finalize(deepArr) {
|
||||
return Object.freeze(deepArr);
|
||||
}
|
||||
|
||||
function flattenToSet(arr) {
|
||||
const result = new Set();
|
||||
const stack = [arr];
|
||||
|
||||
while (stack.length > 0) {
|
||||
for (const el of stack.pop()) {
|
||||
if (Array.isArray(el)) stack.push(el);else result.add(el);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
0 && 0;
|
12
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/helpers/environment.js
generated
vendored
Normal file
12
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/helpers/environment.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.getEnv = getEnv;
|
||||
|
||||
function getEnv(defaultValue = "development") {
|
||||
return process.env.BABEL_ENV || process.env.NODE_ENV || defaultValue;
|
||||
}
|
||||
|
||||
0 && 0;
|
77
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/index.js
generated
vendored
Normal file
77
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/index.js
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createConfigItem = createConfigItem;
|
||||
exports.createConfigItemSync = exports.createConfigItemAsync = void 0;
|
||||
Object.defineProperty(exports, "default", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _full.default;
|
||||
}
|
||||
});
|
||||
exports.loadPartialConfigSync = exports.loadPartialConfigAsync = exports.loadPartialConfig = exports.loadOptionsSync = exports.loadOptionsAsync = exports.loadOptions = void 0;
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _full = require("./full");
|
||||
|
||||
var _partial = require("./partial");
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
const loadOptionsRunner = _gensync()(function* (opts) {
|
||||
var _config$options;
|
||||
|
||||
const config = yield* (0, _full.default)(opts);
|
||||
return (_config$options = config == null ? void 0 : config.options) != null ? _config$options : null;
|
||||
});
|
||||
|
||||
const createConfigItemRunner = _gensync()(_item.createConfigItem);
|
||||
|
||||
const maybeErrback = runner => (opts, callback) => {
|
||||
if (callback === undefined && typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = undefined;
|
||||
}
|
||||
|
||||
return callback ? runner.errback(opts, callback) : runner.sync(opts);
|
||||
};
|
||||
|
||||
const loadPartialConfig = maybeErrback(_partial.loadPartialConfig);
|
||||
exports.loadPartialConfig = loadPartialConfig;
|
||||
const loadPartialConfigSync = _partial.loadPartialConfig.sync;
|
||||
exports.loadPartialConfigSync = loadPartialConfigSync;
|
||||
const loadPartialConfigAsync = _partial.loadPartialConfig.async;
|
||||
exports.loadPartialConfigAsync = loadPartialConfigAsync;
|
||||
const loadOptions = maybeErrback(loadOptionsRunner);
|
||||
exports.loadOptions = loadOptions;
|
||||
const loadOptionsSync = loadOptionsRunner.sync;
|
||||
exports.loadOptionsSync = loadOptionsSync;
|
||||
const loadOptionsAsync = loadOptionsRunner.async;
|
||||
exports.loadOptionsAsync = loadOptionsAsync;
|
||||
const createConfigItemSync = createConfigItemRunner.sync;
|
||||
exports.createConfigItemSync = createConfigItemSync;
|
||||
const createConfigItemAsync = createConfigItemRunner.async;
|
||||
exports.createConfigItemAsync = createConfigItemAsync;
|
||||
|
||||
function createConfigItem(target, options, callback) {
|
||||
if (callback !== undefined) {
|
||||
return createConfigItemRunner.errback(target, options, callback);
|
||||
} else if (typeof options === "function") {
|
||||
return createConfigItemRunner.errback(target, undefined, callback);
|
||||
} else {
|
||||
return createConfigItemRunner.sync(target, options);
|
||||
}
|
||||
}
|
||||
|
||||
0 && 0;
|
77
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/item.js
generated
vendored
Normal file
77
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/item.js
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createConfigItem = createConfigItem;
|
||||
exports.createItemFromDescriptor = createItemFromDescriptor;
|
||||
exports.getItemDescriptor = getItemDescriptor;
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _configDescriptors = require("./config-descriptors");
|
||||
|
||||
function createItemFromDescriptor(desc) {
|
||||
return new ConfigItem(desc);
|
||||
}
|
||||
|
||||
function* createConfigItem(value, {
|
||||
dirname = ".",
|
||||
type
|
||||
} = {}) {
|
||||
const descriptor = yield* (0, _configDescriptors.createDescriptor)(value, _path().resolve(dirname), {
|
||||
type,
|
||||
alias: "programmatic item"
|
||||
});
|
||||
return createItemFromDescriptor(descriptor);
|
||||
}
|
||||
|
||||
function getItemDescriptor(item) {
|
||||
if (item != null && item[CONFIG_ITEM_BRAND]) {
|
||||
return item._descriptor;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const CONFIG_ITEM_BRAND = Symbol.for("@babel/core@7 - ConfigItem");
|
||||
|
||||
class ConfigItem {
|
||||
constructor(descriptor) {
|
||||
this._descriptor = void 0;
|
||||
this[CONFIG_ITEM_BRAND] = true;
|
||||
this.value = void 0;
|
||||
this.options = void 0;
|
||||
this.dirname = void 0;
|
||||
this.name = void 0;
|
||||
this.file = void 0;
|
||||
this._descriptor = descriptor;
|
||||
Object.defineProperty(this, "_descriptor", {
|
||||
enumerable: false
|
||||
});
|
||||
Object.defineProperty(this, CONFIG_ITEM_BRAND, {
|
||||
enumerable: false
|
||||
});
|
||||
this.value = this._descriptor.value;
|
||||
this.options = this._descriptor.options;
|
||||
this.dirname = this._descriptor.dirname;
|
||||
this.name = this._descriptor.name;
|
||||
this.file = this._descriptor.file ? {
|
||||
request: this._descriptor.file.request,
|
||||
resolved: this._descriptor.file.resolved
|
||||
} : undefined;
|
||||
Object.freeze(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Object.freeze(ConfigItem.prototype);
|
||||
0 && 0;
|
198
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/partial.js
generated
vendored
Normal file
198
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/partial.js
generated
vendored
Normal file
@ -0,0 +1,198 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = loadPrivatePartialConfig;
|
||||
exports.loadPartialConfig = void 0;
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _plugin = require("./plugin");
|
||||
|
||||
var _util = require("./util");
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
var _configChain = require("./config-chain");
|
||||
|
||||
var _environment = require("./helpers/environment");
|
||||
|
||||
var _options = require("./validation/options");
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _resolveTargets = require("./resolve-targets");
|
||||
|
||||
const _excluded = ["showIgnoredFiles"];
|
||||
|
||||
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
|
||||
|
||||
function resolveRootMode(rootDir, rootMode) {
|
||||
switch (rootMode) {
|
||||
case "root":
|
||||
return rootDir;
|
||||
|
||||
case "upward-optional":
|
||||
{
|
||||
const upwardRootDir = (0, _files.findConfigUpwards)(rootDir);
|
||||
return upwardRootDir === null ? rootDir : upwardRootDir;
|
||||
}
|
||||
|
||||
case "upward":
|
||||
{
|
||||
const upwardRootDir = (0, _files.findConfigUpwards)(rootDir);
|
||||
if (upwardRootDir !== null) return upwardRootDir;
|
||||
throw Object.assign(new Error(`Babel was run with rootMode:"upward" but a root could not ` + `be found when searching upward from "${rootDir}".\n` + `One of the following config files must be in the directory tree: ` + `"${_files.ROOT_CONFIG_FILENAMES.join(", ")}".`), {
|
||||
code: "BABEL_ROOT_NOT_FOUND",
|
||||
dirname: rootDir
|
||||
});
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Assertion failure - unknown rootMode value.`);
|
||||
}
|
||||
}
|
||||
|
||||
function* loadPrivatePartialConfig(inputOpts) {
|
||||
if (inputOpts != null && (typeof inputOpts !== "object" || Array.isArray(inputOpts))) {
|
||||
throw new Error("Babel options must be an object, null, or undefined");
|
||||
}
|
||||
|
||||
const args = inputOpts ? (0, _options.validate)("arguments", inputOpts) : {};
|
||||
const {
|
||||
envName = (0, _environment.getEnv)(),
|
||||
cwd = ".",
|
||||
root: rootDir = ".",
|
||||
rootMode = "root",
|
||||
caller,
|
||||
cloneInputAst = true
|
||||
} = args;
|
||||
|
||||
const absoluteCwd = _path().resolve(cwd);
|
||||
|
||||
const absoluteRootDir = resolveRootMode(_path().resolve(absoluteCwd, rootDir), rootMode);
|
||||
const filename = typeof args.filename === "string" ? _path().resolve(cwd, args.filename) : undefined;
|
||||
const showConfigPath = yield* (0, _files.resolveShowConfigPath)(absoluteCwd);
|
||||
const context = {
|
||||
filename,
|
||||
cwd: absoluteCwd,
|
||||
root: absoluteRootDir,
|
||||
envName,
|
||||
caller,
|
||||
showConfig: showConfigPath === filename
|
||||
};
|
||||
const configChain = yield* (0, _configChain.buildRootChain)(args, context);
|
||||
if (!configChain) return null;
|
||||
const merged = {
|
||||
assumptions: {}
|
||||
};
|
||||
configChain.options.forEach(opts => {
|
||||
(0, _util.mergeOptions)(merged, opts);
|
||||
});
|
||||
const options = Object.assign({}, merged, {
|
||||
targets: (0, _resolveTargets.resolveTargets)(merged, absoluteRootDir),
|
||||
cloneInputAst,
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
browserslistConfigFile: false,
|
||||
passPerPreset: false,
|
||||
envName: context.envName,
|
||||
cwd: context.cwd,
|
||||
root: context.root,
|
||||
rootMode: "root",
|
||||
filename: typeof context.filename === "string" ? context.filename : undefined,
|
||||
plugins: configChain.plugins.map(descriptor => (0, _item.createItemFromDescriptor)(descriptor)),
|
||||
presets: configChain.presets.map(descriptor => (0, _item.createItemFromDescriptor)(descriptor))
|
||||
});
|
||||
return {
|
||||
options,
|
||||
context,
|
||||
fileHandling: configChain.fileHandling,
|
||||
ignore: configChain.ignore,
|
||||
babelrc: configChain.babelrc,
|
||||
config: configChain.config,
|
||||
files: configChain.files
|
||||
};
|
||||
}
|
||||
|
||||
const loadPartialConfig = _gensync()(function* (opts) {
|
||||
let showIgnoredFiles = false;
|
||||
|
||||
if (typeof opts === "object" && opts !== null && !Array.isArray(opts)) {
|
||||
var _opts = opts;
|
||||
({
|
||||
showIgnoredFiles
|
||||
} = _opts);
|
||||
opts = _objectWithoutPropertiesLoose(_opts, _excluded);
|
||||
_opts;
|
||||
}
|
||||
|
||||
const result = yield* loadPrivatePartialConfig(opts);
|
||||
if (!result) return null;
|
||||
const {
|
||||
options,
|
||||
babelrc,
|
||||
ignore,
|
||||
config,
|
||||
fileHandling,
|
||||
files
|
||||
} = result;
|
||||
|
||||
if (fileHandling === "ignored" && !showIgnoredFiles) {
|
||||
return null;
|
||||
}
|
||||
|
||||
(options.plugins || []).forEach(item => {
|
||||
if (item.value instanceof _plugin.default) {
|
||||
throw new Error("Passing cached plugin instances is not supported in " + "babel.loadPartialConfig()");
|
||||
}
|
||||
});
|
||||
return new PartialConfig(options, babelrc ? babelrc.filepath : undefined, ignore ? ignore.filepath : undefined, config ? config.filepath : undefined, fileHandling, files);
|
||||
});
|
||||
|
||||
exports.loadPartialConfig = loadPartialConfig;
|
||||
|
||||
class PartialConfig {
|
||||
constructor(options, babelrc, ignore, config, fileHandling, files) {
|
||||
this.options = void 0;
|
||||
this.babelrc = void 0;
|
||||
this.babelignore = void 0;
|
||||
this.config = void 0;
|
||||
this.fileHandling = void 0;
|
||||
this.files = void 0;
|
||||
this.options = options;
|
||||
this.babelignore = ignore;
|
||||
this.babelrc = babelrc;
|
||||
this.config = config;
|
||||
this.fileHandling = fileHandling;
|
||||
this.files = files;
|
||||
Object.freeze(this);
|
||||
}
|
||||
|
||||
hasFilesystemConfig() {
|
||||
return this.babelrc !== undefined || this.config !== undefined;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Object.freeze(PartialConfig.prototype);
|
||||
0 && 0;
|
46
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/pattern-to-regex.js
generated
vendored
Normal file
46
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/pattern-to-regex.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = pathToPattern;
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const sep = `\\${_path().sep}`;
|
||||
const endSep = `(?:${sep}|$)`;
|
||||
const substitution = `[^${sep}]+`;
|
||||
const starPat = `(?:${substitution}${sep})`;
|
||||
const starPatLast = `(?:${substitution}${endSep})`;
|
||||
const starStarPat = `${starPat}*?`;
|
||||
const starStarPatLast = `${starPat}*?${starPatLast}?`;
|
||||
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[|\\{}()[\]^$+*?.]/g, "\\$&");
|
||||
}
|
||||
|
||||
function pathToPattern(pattern, dirname) {
|
||||
const parts = _path().resolve(dirname, pattern).split(_path().sep);
|
||||
|
||||
return new RegExp(["^", ...parts.map((part, i) => {
|
||||
const last = i === parts.length - 1;
|
||||
if (part === "**") return last ? starStarPatLast : starStarPat;
|
||||
if (part === "*") return last ? starPatLast : starPat;
|
||||
|
||||
if (part.indexOf("*.") === 0) {
|
||||
return substitution + escapeRegExp(part.slice(1)) + (last ? endSep : sep);
|
||||
}
|
||||
|
||||
return escapeRegExp(part) + (last ? endSep : sep);
|
||||
})].join(""));
|
||||
}
|
||||
|
||||
0 && 0;
|
35
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/plugin.js
generated
vendored
Normal file
35
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/plugin.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
|
||||
var _deepArray = require("./helpers/deep-array");
|
||||
|
||||
class Plugin {
|
||||
constructor(plugin, options, key, externalDependencies = (0, _deepArray.finalize)([])) {
|
||||
this.key = void 0;
|
||||
this.manipulateOptions = void 0;
|
||||
this.post = void 0;
|
||||
this.pre = void 0;
|
||||
this.visitor = void 0;
|
||||
this.parserOverride = void 0;
|
||||
this.generatorOverride = void 0;
|
||||
this.options = void 0;
|
||||
this.externalDependencies = void 0;
|
||||
this.key = plugin.name || key;
|
||||
this.manipulateOptions = plugin.manipulateOptions;
|
||||
this.post = plugin.post;
|
||||
this.pre = plugin.pre;
|
||||
this.visitor = plugin.visitor || {};
|
||||
this.parserOverride = plugin.parserOverride;
|
||||
this.generatorOverride = plugin.generatorOverride;
|
||||
this.options = options;
|
||||
this.externalDependencies = externalDependencies;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.default = Plugin;
|
||||
0 && 0;
|
140
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/printer.js
generated
vendored
Normal file
140
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/printer.js
generated
vendored
Normal file
@ -0,0 +1,140 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ConfigPrinter = exports.ChainFormatter = void 0;
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const ChainFormatter = {
|
||||
Programmatic: 0,
|
||||
Config: 1
|
||||
};
|
||||
exports.ChainFormatter = ChainFormatter;
|
||||
const Formatter = {
|
||||
title(type, callerName, filepath) {
|
||||
let title = "";
|
||||
|
||||
if (type === ChainFormatter.Programmatic) {
|
||||
title = "programmatic options";
|
||||
|
||||
if (callerName) {
|
||||
title += " from " + callerName;
|
||||
}
|
||||
} else {
|
||||
title = "config " + filepath;
|
||||
}
|
||||
|
||||
return title;
|
||||
},
|
||||
|
||||
loc(index, envName) {
|
||||
let loc = "";
|
||||
|
||||
if (index != null) {
|
||||
loc += `.overrides[${index}]`;
|
||||
}
|
||||
|
||||
if (envName != null) {
|
||||
loc += `.env["${envName}"]`;
|
||||
}
|
||||
|
||||
return loc;
|
||||
},
|
||||
|
||||
*optionsAndDescriptors(opt) {
|
||||
const content = Object.assign({}, opt.options);
|
||||
delete content.overrides;
|
||||
delete content.env;
|
||||
const pluginDescriptors = [...(yield* opt.plugins())];
|
||||
|
||||
if (pluginDescriptors.length) {
|
||||
content.plugins = pluginDescriptors.map(d => descriptorToConfig(d));
|
||||
}
|
||||
|
||||
const presetDescriptors = [...(yield* opt.presets())];
|
||||
|
||||
if (presetDescriptors.length) {
|
||||
content.presets = [...presetDescriptors].map(d => descriptorToConfig(d));
|
||||
}
|
||||
|
||||
return JSON.stringify(content, undefined, 2);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
function descriptorToConfig(d) {
|
||||
var _d$file;
|
||||
|
||||
let name = (_d$file = d.file) == null ? void 0 : _d$file.request;
|
||||
|
||||
if (name == null) {
|
||||
if (typeof d.value === "object") {
|
||||
name = d.value;
|
||||
} else if (typeof d.value === "function") {
|
||||
name = `[Function: ${d.value.toString().slice(0, 50)} ... ]`;
|
||||
}
|
||||
}
|
||||
|
||||
if (name == null) {
|
||||
name = "[Unknown]";
|
||||
}
|
||||
|
||||
if (d.options === undefined) {
|
||||
return name;
|
||||
} else if (d.name == null) {
|
||||
return [name, d.options];
|
||||
} else {
|
||||
return [name, d.options, d.name];
|
||||
}
|
||||
}
|
||||
|
||||
class ConfigPrinter {
|
||||
constructor() {
|
||||
this._stack = [];
|
||||
}
|
||||
|
||||
configure(enabled, type, {
|
||||
callerName,
|
||||
filepath
|
||||
}) {
|
||||
if (!enabled) return () => {};
|
||||
return (content, index, envName) => {
|
||||
this._stack.push({
|
||||
type,
|
||||
callerName,
|
||||
filepath,
|
||||
content,
|
||||
index,
|
||||
envName
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
static *format(config) {
|
||||
let title = Formatter.title(config.type, config.callerName, config.filepath);
|
||||
const loc = Formatter.loc(config.index, config.envName);
|
||||
if (loc) title += ` ${loc}`;
|
||||
const content = yield* Formatter.optionsAndDescriptors(config.content);
|
||||
return `${title}\n${content}`;
|
||||
}
|
||||
|
||||
*output() {
|
||||
if (this._stack.length === 0) return "";
|
||||
const configs = yield* _gensync().all(this._stack.map(s => ConfigPrinter.format(s)));
|
||||
return configs.join("\n\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.ConfigPrinter = ConfigPrinter;
|
||||
0 && 0;
|
47
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/resolve-targets-browser.js
generated
vendored
Normal file
47
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/resolve-targets-browser.js
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.resolveBrowserslistConfigFile = resolveBrowserslistConfigFile;
|
||||
exports.resolveTargets = resolveTargets;
|
||||
|
||||
function _helperCompilationTargets() {
|
||||
const data = require("@babel/helper-compilation-targets");
|
||||
|
||||
_helperCompilationTargets = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function resolveBrowserslistConfigFile(browserslistConfigFile, configFilePath) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function resolveTargets(options, root) {
|
||||
const optTargets = options.targets;
|
||||
let targets;
|
||||
|
||||
if (typeof optTargets === "string" || Array.isArray(optTargets)) {
|
||||
targets = {
|
||||
browsers: optTargets
|
||||
};
|
||||
} else if (optTargets) {
|
||||
if ("esmodules" in optTargets) {
|
||||
targets = Object.assign({}, optTargets, {
|
||||
esmodules: "intersect"
|
||||
});
|
||||
} else {
|
||||
targets = optTargets;
|
||||
}
|
||||
}
|
||||
|
||||
return (0, _helperCompilationTargets().default)(targets, {
|
||||
ignoreBrowserslistConfig: true,
|
||||
browserslistEnv: options.browserslistEnv
|
||||
});
|
||||
}
|
||||
|
||||
0 && 0;
|
73
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/resolve-targets.js
generated
vendored
Normal file
73
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/resolve-targets.js
generated
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.resolveBrowserslistConfigFile = resolveBrowserslistConfigFile;
|
||||
exports.resolveTargets = resolveTargets;
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _helperCompilationTargets() {
|
||||
const data = require("@babel/helper-compilation-targets");
|
||||
|
||||
_helperCompilationTargets = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
({});
|
||||
|
||||
function resolveBrowserslistConfigFile(browserslistConfigFile, configFileDir) {
|
||||
return _path().resolve(configFileDir, browserslistConfigFile);
|
||||
}
|
||||
|
||||
function resolveTargets(options, root) {
|
||||
const optTargets = options.targets;
|
||||
let targets;
|
||||
|
||||
if (typeof optTargets === "string" || Array.isArray(optTargets)) {
|
||||
targets = {
|
||||
browsers: optTargets
|
||||
};
|
||||
} else if (optTargets) {
|
||||
if ("esmodules" in optTargets) {
|
||||
targets = Object.assign({}, optTargets, {
|
||||
esmodules: "intersect"
|
||||
});
|
||||
} else {
|
||||
targets = optTargets;
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
browserslistConfigFile
|
||||
} = options;
|
||||
let configFile;
|
||||
let ignoreBrowserslistConfig = false;
|
||||
|
||||
if (typeof browserslistConfigFile === "string") {
|
||||
configFile = browserslistConfigFile;
|
||||
} else {
|
||||
ignoreBrowserslistConfig = browserslistConfigFile === false;
|
||||
}
|
||||
|
||||
return (0, _helperCompilationTargets().default)(targets, {
|
||||
ignoreBrowserslistConfig,
|
||||
configFile,
|
||||
configPath: root,
|
||||
browserslistEnv: options.browserslistEnv
|
||||
});
|
||||
}
|
||||
|
||||
0 && 0;
|
33
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/util.js
generated
vendored
Normal file
33
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/util.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.isIterableIterator = isIterableIterator;
|
||||
exports.mergeOptions = mergeOptions;
|
||||
|
||||
function mergeOptions(target, source) {
|
||||
for (const k of Object.keys(source)) {
|
||||
if ((k === "parserOpts" || k === "generatorOpts" || k === "assumptions") && source[k]) {
|
||||
const parserOpts = source[k];
|
||||
const targetObj = target[k] || (target[k] = {});
|
||||
mergeDefaultFields(targetObj, parserOpts);
|
||||
} else {
|
||||
const val = source[k];
|
||||
if (val !== undefined) target[k] = val;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function mergeDefaultFields(target, source) {
|
||||
for (const k of Object.keys(source)) {
|
||||
const val = source[k];
|
||||
if (val !== undefined) target[k] = val;
|
||||
}
|
||||
}
|
||||
|
||||
function isIterableIterator(value) {
|
||||
return !!value && typeof value.next === "function" && typeof value[Symbol.iterator] === "function";
|
||||
}
|
||||
|
||||
0 && 0;
|
354
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/validation/option-assertions.js
generated
vendored
Normal file
354
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/validation/option-assertions.js
generated
vendored
Normal file
@ -0,0 +1,354 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.access = access;
|
||||
exports.assertArray = assertArray;
|
||||
exports.assertAssumptions = assertAssumptions;
|
||||
exports.assertBabelrcSearch = assertBabelrcSearch;
|
||||
exports.assertBoolean = assertBoolean;
|
||||
exports.assertCallerMetadata = assertCallerMetadata;
|
||||
exports.assertCompact = assertCompact;
|
||||
exports.assertConfigApplicableTest = assertConfigApplicableTest;
|
||||
exports.assertConfigFileSearch = assertConfigFileSearch;
|
||||
exports.assertFunction = assertFunction;
|
||||
exports.assertIgnoreList = assertIgnoreList;
|
||||
exports.assertInputSourceMap = assertInputSourceMap;
|
||||
exports.assertObject = assertObject;
|
||||
exports.assertPluginList = assertPluginList;
|
||||
exports.assertRootMode = assertRootMode;
|
||||
exports.assertSourceMaps = assertSourceMaps;
|
||||
exports.assertSourceType = assertSourceType;
|
||||
exports.assertString = assertString;
|
||||
exports.assertTargets = assertTargets;
|
||||
exports.msg = msg;
|
||||
|
||||
function _helperCompilationTargets() {
|
||||
const data = require("@babel/helper-compilation-targets");
|
||||
|
||||
_helperCompilationTargets = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _options = require("./options");
|
||||
|
||||
function msg(loc) {
|
||||
switch (loc.type) {
|
||||
case "root":
|
||||
return ``;
|
||||
|
||||
case "env":
|
||||
return `${msg(loc.parent)}.env["${loc.name}"]`;
|
||||
|
||||
case "overrides":
|
||||
return `${msg(loc.parent)}.overrides[${loc.index}]`;
|
||||
|
||||
case "option":
|
||||
return `${msg(loc.parent)}.${loc.name}`;
|
||||
|
||||
case "access":
|
||||
return `${msg(loc.parent)}[${JSON.stringify(loc.name)}]`;
|
||||
|
||||
default:
|
||||
throw new Error(`Assertion failure: Unknown type ${loc.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
function access(loc, name) {
|
||||
return {
|
||||
type: "access",
|
||||
name,
|
||||
parent: loc
|
||||
};
|
||||
}
|
||||
|
||||
function assertRootMode(loc, value) {
|
||||
if (value !== undefined && value !== "root" && value !== "upward" && value !== "upward-optional") {
|
||||
throw new Error(`${msg(loc)} must be a "root", "upward", "upward-optional" or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertSourceMaps(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean" && value !== "inline" && value !== "both") {
|
||||
throw new Error(`${msg(loc)} must be a boolean, "inline", "both", or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertCompact(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean" && value !== "auto") {
|
||||
throw new Error(`${msg(loc)} must be a boolean, "auto", or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertSourceType(loc, value) {
|
||||
if (value !== undefined && value !== "module" && value !== "script" && value !== "unambiguous") {
|
||||
throw new Error(`${msg(loc)} must be "module", "script", "unambiguous", or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertCallerMetadata(loc, value) {
|
||||
const obj = assertObject(loc, value);
|
||||
|
||||
if (obj) {
|
||||
if (typeof obj.name !== "string") {
|
||||
throw new Error(`${msg(loc)} set but does not contain "name" property string`);
|
||||
}
|
||||
|
||||
for (const prop of Object.keys(obj)) {
|
||||
const propLoc = access(loc, prop);
|
||||
const value = obj[prop];
|
||||
|
||||
if (value != null && typeof value !== "boolean" && typeof value !== "string" && typeof value !== "number") {
|
||||
throw new Error(`${msg(propLoc)} must be null, undefined, a boolean, a string, or a number.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertInputSourceMap(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean" && (typeof value !== "object" || !value)) {
|
||||
throw new Error(`${msg(loc)} must be a boolean, object, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertString(loc, value) {
|
||||
if (value !== undefined && typeof value !== "string") {
|
||||
throw new Error(`${msg(loc)} must be a string, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertFunction(loc, value) {
|
||||
if (value !== undefined && typeof value !== "function") {
|
||||
throw new Error(`${msg(loc)} must be a function, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertBoolean(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean") {
|
||||
throw new Error(`${msg(loc)} must be a boolean, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertObject(loc, value) {
|
||||
if (value !== undefined && (typeof value !== "object" || Array.isArray(value) || !value)) {
|
||||
throw new Error(`${msg(loc)} must be an object, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertArray(loc, value) {
|
||||
if (value != null && !Array.isArray(value)) {
|
||||
throw new Error(`${msg(loc)} must be an array, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertIgnoreList(loc, value) {
|
||||
const arr = assertArray(loc, value);
|
||||
|
||||
if (arr) {
|
||||
arr.forEach((item, i) => assertIgnoreItem(access(loc, i), item));
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
function assertIgnoreItem(loc, value) {
|
||||
if (typeof value !== "string" && typeof value !== "function" && !(value instanceof RegExp)) {
|
||||
throw new Error(`${msg(loc)} must be an array of string/Function/RegExp values, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertConfigApplicableTest(loc, value) {
|
||||
if (value === undefined) return value;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((item, i) => {
|
||||
if (!checkValidTest(item)) {
|
||||
throw new Error(`${msg(access(loc, i))} must be a string/Function/RegExp.`);
|
||||
}
|
||||
});
|
||||
} else if (!checkValidTest(value)) {
|
||||
throw new Error(`${msg(loc)} must be a string/Function/RegExp, or an array of those`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function checkValidTest(value) {
|
||||
return typeof value === "string" || typeof value === "function" || value instanceof RegExp;
|
||||
}
|
||||
|
||||
function assertConfigFileSearch(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean" && typeof value !== "string") {
|
||||
throw new Error(`${msg(loc)} must be a undefined, a boolean, a string, ` + `got ${JSON.stringify(value)}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertBabelrcSearch(loc, value) {
|
||||
if (value === undefined || typeof value === "boolean") return value;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((item, i) => {
|
||||
if (!checkValidTest(item)) {
|
||||
throw new Error(`${msg(access(loc, i))} must be a string/Function/RegExp.`);
|
||||
}
|
||||
});
|
||||
} else if (!checkValidTest(value)) {
|
||||
throw new Error(`${msg(loc)} must be a undefined, a boolean, a string/Function/RegExp ` + `or an array of those, got ${JSON.stringify(value)}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertPluginList(loc, value) {
|
||||
const arr = assertArray(loc, value);
|
||||
|
||||
if (arr) {
|
||||
arr.forEach((item, i) => assertPluginItem(access(loc, i), item));
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
function assertPluginItem(loc, value) {
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 0) {
|
||||
throw new Error(`${msg(loc)} must include an object`);
|
||||
}
|
||||
|
||||
if (value.length > 3) {
|
||||
throw new Error(`${msg(loc)} may only be a two-tuple or three-tuple`);
|
||||
}
|
||||
|
||||
assertPluginTarget(access(loc, 0), value[0]);
|
||||
|
||||
if (value.length > 1) {
|
||||
const opts = value[1];
|
||||
|
||||
if (opts !== undefined && opts !== false && (typeof opts !== "object" || Array.isArray(opts) || opts === null)) {
|
||||
throw new Error(`${msg(access(loc, 1))} must be an object, false, or undefined`);
|
||||
}
|
||||
}
|
||||
|
||||
if (value.length === 3) {
|
||||
const name = value[2];
|
||||
|
||||
if (name !== undefined && typeof name !== "string") {
|
||||
throw new Error(`${msg(access(loc, 2))} must be a string, or undefined`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
assertPluginTarget(loc, value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertPluginTarget(loc, value) {
|
||||
if ((typeof value !== "object" || !value) && typeof value !== "string" && typeof value !== "function") {
|
||||
throw new Error(`${msg(loc)} must be a string, object, function`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertTargets(loc, value) {
|
||||
if ((0, _helperCompilationTargets().isBrowsersQueryValid)(value)) return value;
|
||||
|
||||
if (typeof value !== "object" || !value || Array.isArray(value)) {
|
||||
throw new Error(`${msg(loc)} must be a string, an array of strings or an object`);
|
||||
}
|
||||
|
||||
const browsersLoc = access(loc, "browsers");
|
||||
const esmodulesLoc = access(loc, "esmodules");
|
||||
assertBrowsersList(browsersLoc, value.browsers);
|
||||
assertBoolean(esmodulesLoc, value.esmodules);
|
||||
|
||||
for (const key of Object.keys(value)) {
|
||||
const val = value[key];
|
||||
const subLoc = access(loc, key);
|
||||
if (key === "esmodules") assertBoolean(subLoc, val);else if (key === "browsers") assertBrowsersList(subLoc, val);else if (!Object.hasOwnProperty.call(_helperCompilationTargets().TargetNames, key)) {
|
||||
const validTargets = Object.keys(_helperCompilationTargets().TargetNames).join(", ");
|
||||
throw new Error(`${msg(subLoc)} is not a valid target. Supported targets are ${validTargets}`);
|
||||
} else assertBrowserVersion(subLoc, val);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertBrowsersList(loc, value) {
|
||||
if (value !== undefined && !(0, _helperCompilationTargets().isBrowsersQueryValid)(value)) {
|
||||
throw new Error(`${msg(loc)} must be undefined, a string or an array of strings`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertBrowserVersion(loc, value) {
|
||||
if (typeof value === "number" && Math.round(value) === value) return;
|
||||
if (typeof value === "string") return;
|
||||
throw new Error(`${msg(loc)} must be a string or an integer number`);
|
||||
}
|
||||
|
||||
function assertAssumptions(loc, value) {
|
||||
if (value === undefined) return;
|
||||
|
||||
if (typeof value !== "object" || value === null) {
|
||||
throw new Error(`${msg(loc)} must be an object or undefined.`);
|
||||
}
|
||||
|
||||
let root = loc;
|
||||
|
||||
do {
|
||||
root = root.parent;
|
||||
} while (root.type !== "root");
|
||||
|
||||
const inPreset = root.source === "preset";
|
||||
|
||||
for (const name of Object.keys(value)) {
|
||||
const subLoc = access(loc, name);
|
||||
|
||||
if (!_options.assumptionsNames.has(name)) {
|
||||
throw new Error(`${msg(subLoc)} is not a supported assumption.`);
|
||||
}
|
||||
|
||||
if (typeof value[name] !== "boolean") {
|
||||
throw new Error(`${msg(subLoc)} must be a boolean.`);
|
||||
}
|
||||
|
||||
if (inPreset && value[name] === false) {
|
||||
throw new Error(`${msg(subLoc)} cannot be set to 'false' inside presets.`);
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
0 && 0;
|
213
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/validation/options.js
generated
vendored
Normal file
213
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/validation/options.js
generated
vendored
Normal file
@ -0,0 +1,213 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.assumptionsNames = void 0;
|
||||
exports.checkNoUnwrappedItemOptionPairs = checkNoUnwrappedItemOptionPairs;
|
||||
exports.validate = validate;
|
||||
|
||||
var _plugin = require("../plugin");
|
||||
|
||||
var _removed = require("./removed");
|
||||
|
||||
var _optionAssertions = require("./option-assertions");
|
||||
|
||||
const ROOT_VALIDATORS = {
|
||||
cwd: _optionAssertions.assertString,
|
||||
root: _optionAssertions.assertString,
|
||||
rootMode: _optionAssertions.assertRootMode,
|
||||
configFile: _optionAssertions.assertConfigFileSearch,
|
||||
caller: _optionAssertions.assertCallerMetadata,
|
||||
filename: _optionAssertions.assertString,
|
||||
filenameRelative: _optionAssertions.assertString,
|
||||
code: _optionAssertions.assertBoolean,
|
||||
ast: _optionAssertions.assertBoolean,
|
||||
cloneInputAst: _optionAssertions.assertBoolean,
|
||||
envName: _optionAssertions.assertString
|
||||
};
|
||||
const BABELRC_VALIDATORS = {
|
||||
babelrc: _optionAssertions.assertBoolean,
|
||||
babelrcRoots: _optionAssertions.assertBabelrcSearch
|
||||
};
|
||||
const NONPRESET_VALIDATORS = {
|
||||
extends: _optionAssertions.assertString,
|
||||
ignore: _optionAssertions.assertIgnoreList,
|
||||
only: _optionAssertions.assertIgnoreList,
|
||||
targets: _optionAssertions.assertTargets,
|
||||
browserslistConfigFile: _optionAssertions.assertConfigFileSearch,
|
||||
browserslistEnv: _optionAssertions.assertString
|
||||
};
|
||||
const COMMON_VALIDATORS = {
|
||||
inputSourceMap: _optionAssertions.assertInputSourceMap,
|
||||
presets: _optionAssertions.assertPluginList,
|
||||
plugins: _optionAssertions.assertPluginList,
|
||||
passPerPreset: _optionAssertions.assertBoolean,
|
||||
assumptions: _optionAssertions.assertAssumptions,
|
||||
env: assertEnvSet,
|
||||
overrides: assertOverridesList,
|
||||
test: _optionAssertions.assertConfigApplicableTest,
|
||||
include: _optionAssertions.assertConfigApplicableTest,
|
||||
exclude: _optionAssertions.assertConfigApplicableTest,
|
||||
retainLines: _optionAssertions.assertBoolean,
|
||||
comments: _optionAssertions.assertBoolean,
|
||||
shouldPrintComment: _optionAssertions.assertFunction,
|
||||
compact: _optionAssertions.assertCompact,
|
||||
minified: _optionAssertions.assertBoolean,
|
||||
auxiliaryCommentBefore: _optionAssertions.assertString,
|
||||
auxiliaryCommentAfter: _optionAssertions.assertString,
|
||||
sourceType: _optionAssertions.assertSourceType,
|
||||
wrapPluginVisitorMethod: _optionAssertions.assertFunction,
|
||||
highlightCode: _optionAssertions.assertBoolean,
|
||||
sourceMaps: _optionAssertions.assertSourceMaps,
|
||||
sourceMap: _optionAssertions.assertSourceMaps,
|
||||
sourceFileName: _optionAssertions.assertString,
|
||||
sourceRoot: _optionAssertions.assertString,
|
||||
parserOpts: _optionAssertions.assertObject,
|
||||
generatorOpts: _optionAssertions.assertObject
|
||||
};
|
||||
{
|
||||
Object.assign(COMMON_VALIDATORS, {
|
||||
getModuleId: _optionAssertions.assertFunction,
|
||||
moduleRoot: _optionAssertions.assertString,
|
||||
moduleIds: _optionAssertions.assertBoolean,
|
||||
moduleId: _optionAssertions.assertString
|
||||
});
|
||||
}
|
||||
const knownAssumptions = ["arrayLikeIsIterable", "constantReexports", "constantSuper", "enumerableModuleMeta", "ignoreFunctionLength", "ignoreToPrimitiveHint", "iterableIsArray", "mutableTemplateObject", "noClassCalls", "noDocumentAll", "noIncompleteNsImportDetection", "noNewArrows", "objectRestNoSymbols", "privateFieldsAsProperties", "pureGetters", "setClassMethods", "setComputedProperties", "setPublicClassFields", "setSpreadProperties", "skipForOfIteratorClosing", "superIsCallableConstructor"];
|
||||
const assumptionsNames = new Set(knownAssumptions);
|
||||
exports.assumptionsNames = assumptionsNames;
|
||||
|
||||
function getSource(loc) {
|
||||
return loc.type === "root" ? loc.source : getSource(loc.parent);
|
||||
}
|
||||
|
||||
function validate(type, opts) {
|
||||
return validateNested({
|
||||
type: "root",
|
||||
source: type
|
||||
}, opts);
|
||||
}
|
||||
|
||||
function validateNested(loc, opts) {
|
||||
const type = getSource(loc);
|
||||
assertNoDuplicateSourcemap(opts);
|
||||
Object.keys(opts).forEach(key => {
|
||||
const optLoc = {
|
||||
type: "option",
|
||||
name: key,
|
||||
parent: loc
|
||||
};
|
||||
|
||||
if (type === "preset" && NONPRESET_VALIDATORS[key]) {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is not allowed in preset options`);
|
||||
}
|
||||
|
||||
if (type !== "arguments" && ROOT_VALIDATORS[key]) {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is only allowed in root programmatic options`);
|
||||
}
|
||||
|
||||
if (type !== "arguments" && type !== "configfile" && BABELRC_VALIDATORS[key]) {
|
||||
if (type === "babelrcfile" || type === "extendsfile") {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is not allowed in .babelrc or "extends"ed files, only in root programmatic options, ` + `or babel.config.js/config file options`);
|
||||
}
|
||||
|
||||
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is only allowed in root programmatic options, or babel.config.js/config file options`);
|
||||
}
|
||||
|
||||
const validator = COMMON_VALIDATORS[key] || NONPRESET_VALIDATORS[key] || BABELRC_VALIDATORS[key] || ROOT_VALIDATORS[key] || throwUnknownError;
|
||||
validator(optLoc, opts[key]);
|
||||
});
|
||||
return opts;
|
||||
}
|
||||
|
||||
function throwUnknownError(loc) {
|
||||
const key = loc.name;
|
||||
|
||||
if (_removed.default[key]) {
|
||||
const {
|
||||
message,
|
||||
version = 5
|
||||
} = _removed.default[key];
|
||||
throw new Error(`Using removed Babel ${version} option: ${(0, _optionAssertions.msg)(loc)} - ${message}`);
|
||||
} else {
|
||||
const unknownOptErr = new Error(`Unknown option: ${(0, _optionAssertions.msg)(loc)}. Check out https://babeljs.io/docs/en/babel-core/#options for more information about options.`);
|
||||
unknownOptErr.code = "BABEL_UNKNOWN_OPTION";
|
||||
throw unknownOptErr;
|
||||
}
|
||||
}
|
||||
|
||||
function has(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
|
||||
function assertNoDuplicateSourcemap(opts) {
|
||||
if (has(opts, "sourceMap") && has(opts, "sourceMaps")) {
|
||||
throw new Error(".sourceMap is an alias for .sourceMaps, cannot use both");
|
||||
}
|
||||
}
|
||||
|
||||
function assertEnvSet(loc, value) {
|
||||
if (loc.parent.type === "env") {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside of another .env block`);
|
||||
}
|
||||
|
||||
const parent = loc.parent;
|
||||
const obj = (0, _optionAssertions.assertObject)(loc, value);
|
||||
|
||||
if (obj) {
|
||||
for (const envName of Object.keys(obj)) {
|
||||
const env = (0, _optionAssertions.assertObject)((0, _optionAssertions.access)(loc, envName), obj[envName]);
|
||||
if (!env) continue;
|
||||
const envLoc = {
|
||||
type: "env",
|
||||
name: envName,
|
||||
parent
|
||||
};
|
||||
validateNested(envLoc, env);
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
function assertOverridesList(loc, value) {
|
||||
if (loc.parent.type === "env") {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside an .env block`);
|
||||
}
|
||||
|
||||
if (loc.parent.type === "overrides") {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside an .overrides block`);
|
||||
}
|
||||
|
||||
const parent = loc.parent;
|
||||
const arr = (0, _optionAssertions.assertArray)(loc, value);
|
||||
|
||||
if (arr) {
|
||||
for (const [index, item] of arr.entries()) {
|
||||
const objLoc = (0, _optionAssertions.access)(loc, index);
|
||||
const env = (0, _optionAssertions.assertObject)(objLoc, item);
|
||||
if (!env) throw new Error(`${(0, _optionAssertions.msg)(objLoc)} must be an object`);
|
||||
const overridesLoc = {
|
||||
type: "overrides",
|
||||
index,
|
||||
parent
|
||||
};
|
||||
validateNested(overridesLoc, env);
|
||||
}
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
function checkNoUnwrappedItemOptionPairs(items, index, type, e) {
|
||||
if (index === 0) return;
|
||||
const lastItem = items[index - 1];
|
||||
const thisItem = items[index];
|
||||
|
||||
if (lastItem.file && lastItem.options === undefined && typeof thisItem.value === "object") {
|
||||
e.message += `\n- Maybe you meant to use\n` + `"${type}s": [\n ["${lastItem.file.request}", ${JSON.stringify(thisItem.value, undefined, 2)}]\n]\n` + `To be a valid ${type}, its name and options should be wrapped in a pair of brackets`;
|
||||
}
|
||||
}
|
||||
|
||||
0 && 0;
|
73
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/validation/plugins.js
generated
vendored
Normal file
73
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/validation/plugins.js
generated
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.validatePluginObject = validatePluginObject;
|
||||
|
||||
var _optionAssertions = require("./option-assertions");
|
||||
|
||||
const VALIDATORS = {
|
||||
name: _optionAssertions.assertString,
|
||||
manipulateOptions: _optionAssertions.assertFunction,
|
||||
pre: _optionAssertions.assertFunction,
|
||||
post: _optionAssertions.assertFunction,
|
||||
inherits: _optionAssertions.assertFunction,
|
||||
visitor: assertVisitorMap,
|
||||
parserOverride: _optionAssertions.assertFunction,
|
||||
generatorOverride: _optionAssertions.assertFunction
|
||||
};
|
||||
|
||||
function assertVisitorMap(loc, value) {
|
||||
const obj = (0, _optionAssertions.assertObject)(loc, value);
|
||||
|
||||
if (obj) {
|
||||
Object.keys(obj).forEach(prop => assertVisitorHandler(prop, obj[prop]));
|
||||
|
||||
if (obj.enter || obj.exit) {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(loc)} cannot contain catch-all "enter" or "exit" handlers. Please target individual nodes.`);
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
function assertVisitorHandler(key, value) {
|
||||
if (value && typeof value === "object") {
|
||||
Object.keys(value).forEach(handler => {
|
||||
if (handler !== "enter" && handler !== "exit") {
|
||||
throw new Error(`.visitor["${key}"] may only have .enter and/or .exit handlers.`);
|
||||
}
|
||||
});
|
||||
} else if (typeof value !== "function") {
|
||||
throw new Error(`.visitor["${key}"] must be a function`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function validatePluginObject(obj) {
|
||||
const rootPath = {
|
||||
type: "root",
|
||||
source: "plugin"
|
||||
};
|
||||
Object.keys(obj).forEach(key => {
|
||||
const validator = VALIDATORS[key];
|
||||
|
||||
if (validator) {
|
||||
const optLoc = {
|
||||
type: "option",
|
||||
name: key,
|
||||
parent: rootPath
|
||||
};
|
||||
validator(optLoc, obj[key]);
|
||||
} else {
|
||||
const invalidPluginPropertyError = new Error(`.${key} is not a valid Plugin property`);
|
||||
invalidPluginPropertyError.code = "BABEL_UNKNOWN_PLUGIN_PROPERTY";
|
||||
throw invalidPluginPropertyError;
|
||||
}
|
||||
});
|
||||
return obj;
|
||||
}
|
||||
|
||||
0 && 0;
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user