frontend
This commit is contained in:
parent
936931f047
commit
9fbd7ced9c
@ -1,140 +0,0 @@
|
||||
from typing import List, Optional
|
||||
from fastapi_jwt_auth import AuthJWT
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from fastapi import APIRouter, Depends, FastAPI, Form, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
||||
from jose import JWTError, jwt
|
||||
from passlib.context import CryptContext
|
||||
from pydantic import BaseModel
|
||||
from api_old.schema.user import UserForm
|
||||
from config import Room_schema, User_schema, UserIn_Form, UserIn_schema
|
||||
from database.auth.crud import create_user_db, disable_user_db, delete_user_db, get_user_db, update_password_db, update_user_db
|
||||
from services.auth import PasswordSet, check_unique_user, create_access_token, authenticate_user, fresh_jwt_required, get_current_clientId, get_current_user, jwt_refresh_required, jwt_required, User, UserRegister, validate_passwords, validate_register_user
|
||||
from services.password import get_password_hash, validate_password
|
||||
from database.auth.models import UserModel
|
||||
from database.decorators import as_form
|
||||
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
token_type: str
|
||||
refresh_token: str
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/login", response_model=Token)
|
||||
async def login_for_access_token(user: User = Depends(authenticate_user), Authorize: AuthJWT = Depends()):
|
||||
access_token = Authorize.create_access_token(
|
||||
subject=user.username, fresh=True)
|
||||
refresh_token = Authorize.create_refresh_token(subject=user.clientId)
|
||||
return {"access_token": access_token, "refresh_token": refresh_token, "token_type": "bearer"}
|
||||
|
||||
|
||||
class Room(BaseModel):
|
||||
name: str
|
||||
id_code: str
|
||||
owner: bool
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
username: str = None
|
||||
firstname: str = None
|
||||
name: str = None
|
||||
email: str = None
|
||||
rooms: list[Room] = []
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
|
||||
|
||||
@router.get("/user", response_model=User)
|
||||
async def read_users_me(Authorize: AuthJWT = Depends(get_current_user)):
|
||||
Authorize.jwt_required()
|
||||
clientId = Authorize.get_jwt_subject()
|
||||
user = await get_user_db(clientId)
|
||||
if user is not None and user.disabled == False:
|
||||
print(user.room_owners)
|
||||
sc = await User_schema.from_tortoise_orm(user)
|
||||
sc = sc.dict()
|
||||
# sc['rooms'] = await Room_schema.from_queryset(await user.rooms.all())
|
||||
f = await Room_schema.from_queryset(user.rooms.all())
|
||||
ro = await user.room_owners.all().values('room_id')
|
||||
|
||||
rr = [r['room_id'] for r in ro]
|
||||
ff = [r.id for r in f]
|
||||
rooms = [{**r.dict(), "owner": r.id in ff} for r in f]
|
||||
print(rooms)
|
||||
sc = await User_schema.from_tortoise_orm(user)
|
||||
sc = sc.dict()
|
||||
|
||||
sc['rooms'] = rooms
|
||||
return sc
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail='User disabled')
|
||||
|
||||
|
||||
@router.delete('/user')
|
||||
async def delete_user(user: UserModel = Depends(authenticate_user)):
|
||||
await delete_user_db(user.username)
|
||||
return 'success'
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@router.put('/user')
|
||||
async def update_user(user: UserForm = Depends(UserForm.as_form), username: str = Depends(get_current_clientId), Authorize: AuthJWT = Depends()):
|
||||
user_obj = await update_user_db(username, **user.dict(exclude_unset=True))
|
||||
access_token = Authorize.create_access_token(
|
||||
subject=user.clientId)
|
||||
refresh_token = Authorize.create_refresh_token(subject=user.clientId)
|
||||
return {"access_token": access_token, "refresh_token": refresh_token, "token_type": "bearer"}
|
||||
|
||||
|
||||
@router.post('/register', response_model=Token)
|
||||
async def register(user: UserRegister = Depends(UserRegister.as_form), Authorize: AuthJWT = Depends()):
|
||||
username = await check_unique_user(user.username)
|
||||
|
||||
user = await create_user_db(user.username, get_password_hash(user.password))
|
||||
|
||||
access_token = Authorize.create_access_token(
|
||||
subject=user.username)
|
||||
refresh_token = Authorize.create_refresh_token(subject=user.username)
|
||||
return {"access_token": access_token, "refresh_token": refresh_token, "token_type": "bearer"}
|
||||
|
||||
|
||||
@router.post('/user/disable')
|
||||
async def disable_user(user: UserModel = Depends(authenticate_user)):
|
||||
await disable_user_db(user.username)
|
||||
return 'success'
|
||||
|
||||
|
||||
@router.put('/user/password')
|
||||
async def update_password(passwords: PasswordSet = Depends(validate_passwords),Authorize: AuthJWT=Depends(fresh_jwt_required)):
|
||||
username = Authorize.get_jwt_subject()
|
||||
user = await update_password_db(username, passwords.password)
|
||||
return await User_schema.from_tortoise_orm(user)
|
||||
|
||||
|
||||
@router.get('/users')
|
||||
async def get_users():
|
||||
return await User_schema.from_queryset(UserModel.all())
|
||||
|
||||
|
||||
@router.post('/refresh')
|
||||
async def refresh(Authorize: AuthJWT = Depends(jwt_refresh_required)):
|
||||
current_user = Authorize.get_jwt_subject()
|
||||
new_access_token = Authorize.create_access_token(subject=current_user)
|
||||
return {"access_token": new_access_token}
|
||||
|
||||
|
||||
|
||||
@router.post('/check-access')
|
||||
async def check_token(Authorize: AuthJWT = Depends(jwt_required)):
|
||||
return ""
|
@ -1,13 +0,0 @@
|
||||
from fastapi import APIRouter
|
||||
import apis.exercices.route_exercices
|
||||
import apis.auth.route_auth
|
||||
import apis.room.route_room
|
||||
import apis.room.websocket
|
||||
api_router = APIRouter()
|
||||
|
||||
api_router.include_router(apis.exercices.route_exercices.router)
|
||||
api_router.include_router(apis.auth.route_auth.router)
|
||||
api_router.include_router(apis.room.route_room.router)
|
||||
api_router.include_router(apis.room.websocket.router)
|
||||
|
||||
|
@ -1,165 +0,0 @@
|
||||
import csv
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
from fastapi import APIRouter, Depends, Form, UploadFile, status
|
||||
from api.schemas.exercices import ExerciceSchema
|
||||
from database.exercices.validators import get_support_compatibility_for_exo_source_from_data
|
||||
from database.auth.models import UserModel
|
||||
from generateur.generateur_csv import Csv_generator
|
||||
from database.decorators import as_form
|
||||
from database.exercices.models import Exercice
|
||||
from fastapi.exceptions import HTTPException
|
||||
from database.exercices.crud import add_tag_db, create_exo_db, delete_tag_db, get_exo_source_path, update_exo_db, delete_exo_db, clone_exo_db
|
||||
from config import Exercice_schema, ExerciceIn_form, ExerciceIn_schema, Exo_schema, TagIn_schema, User_schema
|
||||
from services.auth import check_author_exo, get_current_clientId, get_current_user, jwt_optional, jwt_required
|
||||
from services.io import get_abs_path_from_relative_to_root, get_filename_from_path
|
||||
from fastapi.responses import FileResponse, Response, StreamingResponse
|
||||
from pydantic import BaseModel
|
||||
from fastapi_jwt_auth import AuthJWT
|
||||
from fastapi_pagination import paginate, Page
|
||||
router = APIRouter()
|
||||
|
||||
# Exercices
|
||||
|
||||
|
||||
@router.get("/exercices", response_model=Page[Exo_schema])
|
||||
async def get_exercices():
|
||||
exo_list = await Exo_schema.from_queryset(Exercice.all())
|
||||
return paginate(exo_list)
|
||||
|
||||
|
||||
@router.get('/exercices/user', response_model=Page[Exo_schema])
|
||||
async def get_exercices(Authorize: AuthJWT=Depends(jwt_required)):
|
||||
username = Authorize.get_jwt_subject()
|
||||
user = await UserModel.get(username=username)
|
||||
|
||||
exo_list = await Exo_schema.from_queryset(Exercice.filter(author_id=user.id))
|
||||
return paginate(exo_list)
|
||||
|
||||
@router.get('/exercices/public', response_model=Page[Exo_schema])
|
||||
async def get_exercices(Authorize: AuthJWT=Depends(jwt_optional)):
|
||||
username = Authorize.get_jwt_subject()
|
||||
is_authenticated = username != None
|
||||
if is_authenticated:
|
||||
user = await UserModel.get(username=username)
|
||||
exo_list = Exercice.filter(author_id__not = user.id)
|
||||
return paginate(exo_list)
|
||||
exo_list = await Exo_schema.from_queryset(Exercice.all())
|
||||
return paginate(exo_list)
|
||||
|
||||
|
||||
@router.get('/exercice/{id_code}', response_model=ExerciceSchema)
|
||||
async def get_exercice(id_code: str, Authorize: AuthJWT= Depends(jwt_optional)):
|
||||
username = Authorize.get_jwt_subject()
|
||||
is_authenticated = username != None
|
||||
|
||||
exo = await Exercice.get(id_code=id_code)
|
||||
if is_authenticated:
|
||||
user = await UserModel.get(username=username)
|
||||
author = await exo.author
|
||||
is_author = author.id == user.id
|
||||
exo_obj = await Exercice_schema.from_tortoise_orm(exo)
|
||||
exo_dict = exo_obj.dict()
|
||||
exo_dict['is_author'] = is_author
|
||||
return exo_dict
|
||||
|
||||
exo_obj = await Exercice_schema.from_tortoise_orm(exo)
|
||||
exo_dict =exo_obj.dict()
|
||||
exo_dict['is_author'] = False
|
||||
return exo_dict
|
||||
|
||||
|
||||
async def validate_file(file: UploadFile):
|
||||
data = await file.read()
|
||||
try:
|
||||
exo_supports_compatibility = get_support_compatibility_for_exo_source_from_data(
|
||||
data)
|
||||
if not exo_supports_compatibility['isPdf'] and not exo_supports_compatibility['isCsv'] and not exo_supports_compatibility['isWeb']:
|
||||
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail={"exo_source":
|
||||
'[Error] : Exercice non valide (compatible avec aucun support)'})
|
||||
|
||||
except Exception as e:
|
||||
msg = e.args[0]
|
||||
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail={"exo_source": msg})
|
||||
await file.seek(0)
|
||||
return file
|
||||
|
||||
@router.post("/exercices", response_model=Exercice_schema)
|
||||
async def create_exercice(file: UploadFile = Depends(validate_file), exo: ExerciceIn_schema = Depends(ExerciceIn_form.as_form), current_user: UserModel = Depends(get_current_user)):
|
||||
file_obj = file.file._file
|
||||
file_obj.name = file.filename
|
||||
exo_obj = await create_exo_db(**{**exo.dict(exclude_unset=True)}, exo_source=file_obj, author_id=current_user.id)
|
||||
return await Exercice_schema.from_tortoise_orm(exo_obj)
|
||||
|
||||
|
||||
@router.delete("/exercices/{id_code}", response_model=str)
|
||||
async def delete_exercice(id_code: str, author: User_schema = Depends(check_author_exo)):
|
||||
await delete_exo_db(id_code)
|
||||
return "success"
|
||||
|
||||
|
||||
@router.put("/exercices/{id_code}", response_model=Exercice_schema)
|
||||
async def update_exercice(id_code: str, file: UploadFile, exo: ExerciceIn_form = Depends(ExerciceIn_form.as_form), author: User_schema = Depends(check_author_exo)):
|
||||
file_obj = file.file._file
|
||||
file_obj.name = file.filename
|
||||
exo_obj = await update_exo_db(id_code, **{**exo.dict(exclude_unset=True)}, exo_source=file_obj)
|
||||
return await Exercice_schema.from_tortoise_orm(exo_obj)
|
||||
|
||||
|
||||
@router.post('/exercices/{id_code}/clone', response_model=Exercice_schema)
|
||||
async def clone_exercice(id_code: str, user: User_schema = Depends(get_current_user)):
|
||||
exo_obj = await clone_exo_db(id_code, user.id)
|
||||
return await Exercice_schema.from_tortoise_orm(exo_obj)
|
||||
|
||||
|
||||
@router.get('/exercices/{id_code}/exo_source')
|
||||
async def get_exo_source(id_code: str, author: User_schema = Depends(check_author_exo)):
|
||||
path = await get_exo_source_path(id_code)
|
||||
filename = get_filename_from_path(path)
|
||||
return FileResponse(path, filename=filename)
|
||||
|
||||
# Tags
|
||||
|
||||
|
||||
@router.post('/exercices/{id_code}/tags', response_model=Exercice_schema)
|
||||
async def update_tag(id_code: str, tags_data: List[TagIn_schema], current_user: User_schema = Depends(get_current_user)):
|
||||
exo_obj = await add_tag_db(id_code, tags_data, current_user.id)
|
||||
return await Exercice_schema.from_tortoise_orm(exo_obj)
|
||||
|
||||
|
||||
@router.delete('/exercices/{exo_id}/tags/{tags_id}', response_model=Exercice_schema)
|
||||
async def remove_tag(exo_id: str, tag_id: str, owner: User_schema = Depends(check_author_exo)):
|
||||
exo_obj = await delete_tag_db(exo_id, tag_id)
|
||||
return await Exercice_schema.from_tortoise_orm(exo_obj)
|
||||
|
||||
|
||||
@router.get('/generator/csv/{exo_id}')
|
||||
async def generate_csv(exo_id: str, filename: str):
|
||||
exo = await Exercice.get(id_code=exo_id)
|
||||
|
||||
if exo.csvSupport == False:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='Impossible de générer cet exercice sur le support csv')
|
||||
|
||||
source_path = get_abs_path_from_relative_to_root(exo.exo_source)
|
||||
consigne = exo.consigne
|
||||
|
||||
buffer = io.StringIO()
|
||||
writer = csv.writer(buffer, delimiter=',',
|
||||
quotechar=',', quoting=csv.QUOTE_MINIMAL, dialect='excel') # mettre | comme sep un jour
|
||||
Csv_generator(source_path, 10, 10, 12, consigne, writer)
|
||||
|
||||
return StreamingResponse(iter([buffer.getvalue()]), headers={"Content-Disposition": f'attachment;filename="{filename}'}, media_type='text/csv')
|
||||
|
||||
|
||||
class ExoOption(BaseModel):
|
||||
id: str
|
||||
nbInExo: int
|
||||
nbOfExo: int
|
||||
|
||||
|
||||
@router.post('/generator/pdf')
|
||||
async def generate_pdf(exos_list: List[ExoOption]):
|
||||
return
|
@ -1,64 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, Request
|
||||
from config import AnonymousIn_schema, Room_schema, RoomIn_schema, User_schema
|
||||
from database.auth.models import UserModel
|
||||
from database.room.crud import create_room_anonymous_db, create_room_with_user_db, get_room_db
|
||||
from database.room.models import Room
|
||||
from services.auth import get_current_user_optional
|
||||
router = APIRouter()
|
||||
|
||||
@router.post('/rooms')
|
||||
async def create_room(roomData: RoomIn_schema, anonymous: AnonymousIn_schema = None, user: User_schema = Depends(get_current_user_optional)):
|
||||
if user is not None:
|
||||
room= await create_room_with_user_db(room = roomData, user=user)
|
||||
return await Room_schema.from_tortoise_orm(room)
|
||||
else:
|
||||
room = await create_room_anonymous_db(room = roomData, anonymous = anonymous)
|
||||
return await Room_schema.from_tortoise_orm(room)
|
||||
|
||||
@router.get('/room/{room_id}')
|
||||
async def get_room(room_id: str):
|
||||
room = await get_room_db(room_id)
|
||||
if room is None:
|
||||
return None
|
||||
return await Room_schema.from_tortoise_orm(room)
|
||||
|
||||
@router.get('/room/check/{room_id}')
|
||||
async def check_room(room_id:str):
|
||||
room = await get_room_db(room_id)
|
||||
if room is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
@router.post('/room/{room_id}/join')
|
||||
async def join_room(room_id: str, anonymous: AnonymousIn_schema = None, user: User_schema = Depends(get_current_user_optional)):
|
||||
room = await Room.get(id_code=room_id)
|
||||
user = await UserModel.get(id=user.id)
|
||||
if room.private == True:
|
||||
if user is not None:
|
||||
await room.users_waiters.add(user)
|
||||
return 'waiting'
|
||||
else:
|
||||
return
|
||||
else:
|
||||
if user is not None:
|
||||
await room.users.add(user)
|
||||
return 'logged in'
|
||||
|
||||
|
||||
@router.delete('/room/{room_id}')
|
||||
async def delete_room(room_id):
|
||||
return
|
||||
|
||||
|
||||
@router.get('/rooms')
|
||||
async def get_rooms():
|
||||
rooms = Room.all()
|
||||
return await Room_schema.from_queryset(rooms)
|
||||
|
||||
|
||||
@router.get('/test/{room_id}')
|
||||
async def test(room_id):
|
||||
room = await Room.get(id_code=room_id)
|
||||
ano = await room.anonymousmembers
|
||||
print(await ano.get(id_code='JTSGUC'))
|
||||
return "user"
|
@ -1,307 +0,0 @@
|
||||
import json
|
||||
from typing import Dict, List, Union
|
||||
from fastapi import Cookie, Depends, FastAPI, HTTPException, Query, WebSocket, status, APIRouter, WebSocketDisconnect, status
|
||||
from fastapi.responses import HTMLResponse
|
||||
from config import User_schema
|
||||
from database.auth.models import UserModel
|
||||
from database.exercices.crud import generate_unique_code
|
||||
from database.room.crud import check_anonymous_owner, check_user_in_room, check_user_owner, connect_room, create_waiter_anonymous, create_waiter_by_user, disconnect_room, get_member_by_code, validate_name_in_room
|
||||
from database.room.models import AnonymousMember, Room, RoomOwner, Waiter
|
||||
from services.auth import get_user_from_token
|
||||
from services.io import get_abs_path_from_relative_to_root
|
||||
import secrets
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get('/')
|
||||
def index():
|
||||
return HTMLResponse(open(get_abs_path_from_relative_to_root('/index.html'), 'r').read())
|
||||
|
||||
|
||||
class ConnectionManager:
|
||||
def __init__(self):
|
||||
self.active_connections: Dict[str,List[WebSocket]] = {}
|
||||
|
||||
async def add(self, group, ws):
|
||||
|
||||
if group not in self.active_connections:
|
||||
self.active_connections[group] = []
|
||||
|
||||
if ws not in self.active_connections[group]:
|
||||
self.active_connections[group].append(ws)
|
||||
|
||||
def remove(self, ws: WebSocket, group):
|
||||
if group in self.active_connections:
|
||||
if ws in self.active_connections[group]:
|
||||
self.active_connections[group].remove(ws)
|
||||
|
||||
async def send_personal_message(self, message: str, websocket: WebSocket):
|
||||
await websocket.send_text(message)
|
||||
|
||||
async def broadcast(self, message: str, group):
|
||||
if group in self.active_connections:
|
||||
for connection in self.active_connections[group]:
|
||||
await connection.send_json(message)
|
||||
|
||||
|
||||
manager = ConnectionManager()
|
||||
|
||||
class Consumer():
|
||||
def __init__(self, ws: WebSocket):
|
||||
self.ws : WebSocket = ws
|
||||
|
||||
async def connect(self):
|
||||
pass
|
||||
|
||||
async def receive(self):
|
||||
pass
|
||||
|
||||
async def disconnect(self):
|
||||
pass
|
||||
|
||||
async def run(self):
|
||||
await self.connect()
|
||||
try:
|
||||
while True:
|
||||
data = await self.ws.receive_text()
|
||||
await self.receive(data)
|
||||
except WebSocketDisconnect:
|
||||
await self.disconnect()
|
||||
|
||||
|
||||
class RoomConsumer(Consumer):
|
||||
def __init__(self, ws:WebSocket, room_id, manager:ConnectionManager):
|
||||
self.ws:WebSocket = ws
|
||||
self.room_id = room_id
|
||||
self.manager:ConnectionManager = manager
|
||||
self.owner = False
|
||||
|
||||
async def connect(self):
|
||||
await self.ws.accept()
|
||||
self.clientId = secrets.token_hex(32)
|
||||
await self.manager.add(self.ws, self.room_id)
|
||||
self.room : Room = await Room.get(id_code=self.room_id)
|
||||
self.status = None
|
||||
self.waiter = None
|
||||
self.user = None
|
||||
await self.ws.send_json({'type': 'accept'})
|
||||
|
||||
|
||||
|
||||
async def receive(self, data):
|
||||
json_data = json.loads(data)
|
||||
payload = json_data['data']
|
||||
type = json_data['type']
|
||||
|
||||
if type == 'auth':
|
||||
token = payload['token']
|
||||
self.user = await get_user_from_token(token)
|
||||
|
||||
if self.user is not None:
|
||||
await self.ws.send_json({'type': 'auth_success'})
|
||||
else:
|
||||
await self.ws.send_json({'type': 'auth_failed'})
|
||||
|
||||
|
||||
if type == "login" and self.room.private == True and self.user is not None:
|
||||
if await check_user_in_room(self.room, self.user):
|
||||
if await check_user_owner(self.room, self.user):
|
||||
self.owner = True
|
||||
await self.manager.add(f'{self.room_id}__owner', self.ws)
|
||||
else:
|
||||
await self.manager.add(self.room_id, self.ws)
|
||||
await connect_room(self.room, self.user.id)
|
||||
await self.manager.broadcast({'type': 'joined', 'data': {"name": self.user.username}}, self.room_id)
|
||||
await self.manager.broadcast({'type': 'joined', 'data': {"name": self.user.username}}, f'{self.room_id}__owner')
|
||||
|
||||
else:
|
||||
self.waiter = await create_waiter_by_user(self.room, self.user)
|
||||
|
||||
await self.ws.send_json({'data': "waiting"})
|
||||
await self.manager.add(f'{self.room_id}__waiting__{self.user.id}', self.ws)
|
||||
await self.manager.broadcast({'type': 'add_waiter', 'data': {"name": self.user.username, 'id': self.user.id}}, f'{self.room_id}__owner')
|
||||
|
||||
|
||||
if type == "login" and self.room.private == True and self.user is None:
|
||||
if 'relogin_code' in payload:
|
||||
|
||||
anonymous = await get_member_by_code(self.room, payload['relogin_code'])
|
||||
if anonymous is not None:
|
||||
if await check_anonymous_owner(self.room, anonymous):
|
||||
self.owner = True
|
||||
await self.manager.add(f'{self.room_id}__owner', self.ws)
|
||||
else:
|
||||
await self.manager.add(self.room_id, self.ws)
|
||||
self.anonymous = anonymous
|
||||
await connect_room(self.room, self.anonymous.id_code)
|
||||
await self.manager.broadcast(
|
||||
{'type': 'joined', 'data': {"name": anonymous.name}}, self.room_id)
|
||||
|
||||
else:
|
||||
valid_username = await validate_name_in_room(payload['name'])
|
||||
if valid_username == True:
|
||||
self.waiter = await create_waiter_anonymous(self.room, payload['name'])
|
||||
|
||||
await self.ws.send_json({'type': "waiting"})
|
||||
await self.manager.add(f'{self.room_id}__waiting__{self.waiter.id_code}', self.ws)
|
||||
await self.manager.broadcast({'type': 'add_waiter', 'data': {
|
||||
"name": self.waiter.name, 'id': self.waiter.id_code}}, f'{self.room_id}__owner')
|
||||
else:
|
||||
await self.ws.send_json({"type": "error", 'data': {"user_input": valid_username}})
|
||||
|
||||
|
||||
if type == "login" and self.room.private == False and self.user is not None:
|
||||
if await check_user_in_room(self.room, self.user):
|
||||
if await check_user_owner(self.room, self.user):
|
||||
self.owner = True
|
||||
await self.manager.add(f'{self.room_id}__owner', self.ws)
|
||||
else:
|
||||
await self.manager.add(self.room_id, self.ws)
|
||||
await connect_room(self.room, self.user.id)
|
||||
await self.manager.broadcast({'type': 'joined', 'data': {"name": self.user.username}}, self.room_id)
|
||||
await self.manager.broadcast({'type': 'joined', 'data': {"name": self.user.username}}, f'{self.room_id}__owner')
|
||||
else:
|
||||
await self.room.users.add(self.user)
|
||||
await self.manager.add(self.room_id, self.ws)
|
||||
await connect_room(self.room, self.user.id)
|
||||
await self.manager.broadcast(
|
||||
{'type': 'joined', 'data': {"name": self.user.username}}, self.room_id)
|
||||
|
||||
await self.manager.broadcast(
|
||||
{'type': 'joined', 'data': {"name": self.user.username}}, f'{self.room_id}__owner')
|
||||
|
||||
|
||||
if type == 'login' and self.room.private == False and self.user is None:
|
||||
if 'relogin_code' in payload:
|
||||
|
||||
anonymous = await get_member_by_code(self.room, payload['relogin_code'])
|
||||
if anonymous is not None:
|
||||
|
||||
if await check_anonymous_owner(self.room, anonymous):
|
||||
self.owner = True
|
||||
await self.manager.add(f'{self.room_id}__owner', self.ws)
|
||||
else:
|
||||
await self.manager.add(self.room_id, self.ws)
|
||||
self.anonymous = anonymous
|
||||
await connect_room(self.room, self.anonymous.id_code)
|
||||
await self.manager.broadcast(
|
||||
{'type': 'joined', 'data': {"name": anonymous.name}}, self.room_id)
|
||||
else:
|
||||
valid_username = await validate_name_in_room(self.room, payload['name'])
|
||||
if valid_username == True:
|
||||
code = await generate_unique_code(AnonymousMember)
|
||||
self.anonymous = await AnonymousMember.create(name=payload['name'], id_code=code, room_id=self.room.id)
|
||||
await self.manager.add(self.room_id, self.ws)
|
||||
self.owner = False
|
||||
await connect_room(self.room, self.anonymous.id_code)
|
||||
await self.manager.broadcast({'type': 'joined', 'data': {'name': self.anonymous.name}}, self.room_id)
|
||||
await self.manager.broadcast({'type': 'joined', 'data': {'name': self.anonymous.name, 'code': self.anonymous.id_code}}, f'{self.room_id}__owner')
|
||||
else:
|
||||
await self.ws.send({'type': "error", "data": {"user_input": valid_username}})
|
||||
|
||||
|
||||
|
||||
if type == 'accept_waiter':
|
||||
if self.owner == True:
|
||||
id = payload['id']
|
||||
await self.manager.broadcast({'type': 'log_waiter', 'data': {}}, f'{self.room_id}__waiting__{id}')
|
||||
|
||||
if type == 'refuse_waiter':
|
||||
if self.owner == True:
|
||||
id = payload['id']
|
||||
await self.manager.broadcast({'type': 'reject_waiter', 'data': {}}, f'{self.room_id}__waiting__{id}')
|
||||
|
||||
if type == 'log_waiter':
|
||||
if self.user is not None:
|
||||
await self.room.users.add(self.user)
|
||||
await self.waiter.delete()
|
||||
self.manager.remove('f{self.room_id}__waiting__{self.user.id}', self.ws)
|
||||
await self.manager.add(self.room_id, self.ws)
|
||||
|
||||
await connect_room(self.room, self.user.id)
|
||||
await self.manager.broadcast(
|
||||
{'type': 'joined', 'data': {"name": self.user.username}}, self.room_id)
|
||||
|
||||
await self.manager.broadcast(
|
||||
{'type': 'joined', 'data': {"name": self.user.username}}, f'{self.room_id}__owner')
|
||||
|
||||
else:
|
||||
code = await generate_unique_code(AnonymousMember)
|
||||
self.anonymous = await AnonymousMember.create(name=self.waiter.name, id_code=code, room_id=self.room.id)
|
||||
|
||||
self.manager.remove(self.ws, f'{self.room_id}__waiting__{self.waiter.id_code}')
|
||||
await self.waiter.delete()
|
||||
self.waiter = None
|
||||
await self.manager.add(self.room_id, self.ws)
|
||||
self.owner = False
|
||||
|
||||
await connect_room(self.room, self.anonymous.id_code)
|
||||
await self.manager.broadcast({'type': 'joined', 'data': {'name': self.anonymous.name}}, self.room_id)
|
||||
await self.manager.broadcast({'type': 'joined', 'data': {'name': self.anonymous.name, 'code': self.anonymous.id_code}}, f'{self.room_id}__owner')
|
||||
elif type == 'ban':
|
||||
if self.owner == True:
|
||||
status = payload['status']
|
||||
name = ""
|
||||
if status == 'user':
|
||||
user = await UserModel.get(id=payload['id'])
|
||||
await disconnect_room(self.room, user.id)
|
||||
name = user.username
|
||||
await self.room.users.remove(user)
|
||||
elif status == 'anonymous':
|
||||
anonymous = await AnonymousMember.get(id_code=payload['id'])
|
||||
name = anonymous.name
|
||||
await disconnect_room(self.room, anonymous.id_code)
|
||||
await anonymous.delete()
|
||||
|
||||
await self.manager.broadcast({'type': 'leave', 'data': {"name": name}}, self.room_id)
|
||||
await self.manager.broadcast({'type': 'leave', 'data': {"name": name}},f'{self.room_id}__owner')
|
||||
|
||||
elif type == "leave":
|
||||
name = ""
|
||||
if self.user is not None:
|
||||
name = self.user.username
|
||||
await self.room.users.remove(self.user)
|
||||
|
||||
else:
|
||||
name = self.anonymous.name
|
||||
await self.anonymous.delete()
|
||||
|
||||
await self.manager.broadcast({'type': 'leave', 'data': {"name": name}}, self.room_id)
|
||||
await self.manager.broadcast({'type': 'leave', 'data': {"name": name}}, f'{self.room_id}__owner')
|
||||
|
||||
async def disconnect(self):
|
||||
if self.waiter != None:
|
||||
self.manager.remove(self.ws, f'{self.room_id}__waiting__{self.waiter.id_code}')
|
||||
await self.manager.broadcast({'type': "disconnect_waiter", 'data': {'name': self.waiter.name}}, self.room_id)
|
||||
await self.manager.broadcast({'type': "disconnect_waiter", 'data': {'name': self.waiter.name}}, f'{self.room_id}__owner')
|
||||
await self.waiter.delete()
|
||||
if self.owner == True:
|
||||
if self.user is not None:
|
||||
disconnect_room(self.room, self.user.id)
|
||||
else:
|
||||
disconnect_room(self.room, self.anonymous.id_code)
|
||||
self.manager.remove(self.ws, f'{self.room_id}__owner')
|
||||
else:
|
||||
self.manager.remove(self.ws, self.room_id)
|
||||
if self.user is not None:
|
||||
disconnect_room(self.room, self.user.id)
|
||||
await self.manager.broadcast({'type': "disconnect", 'data': {'name': self.user.username}}, self.room_id)
|
||||
await self.manager.broadcast({'type': "disconnect", 'data': {'name': self.user.username}}, f'{self.room_id}__owner')
|
||||
elif self.anonymous is not None:
|
||||
disconnect_room(self.room, self.anonymous.id_code)
|
||||
await self.manager.broadcast({'type': "disconnect_waiter", 'data': {'name': self.anonymous.name}}, self.room_id)
|
||||
await self.manager.broadcast({'type': "disconnect_waiter", 'data': {'name': self.anonymous.name}}, f'{self.room_id}__owner')
|
||||
await self.manager.broadcast({'type': "disconnect", 'data': {'name': self}}, self.room_id)
|
||||
await self.manager.broadcast({'type': f"Client left the chat"}, f'{self.room_id}__owner')
|
||||
|
||||
|
||||
async def check_room(room_id):
|
||||
room = await Room.get_or_none(id_code = room_id)
|
||||
if room == None:
|
||||
raise HTTPException(status_code = status.HTTP_404_NOT_FOUND, detail = 'Room does not exist ')
|
||||
return room_id
|
||||
|
||||
@router.websocket('/ws/{room_id}')
|
||||
async def room_ws(ws: WebSocket, room_id:str = Depends(check_room), ):
|
||||
consumer = RoomConsumer(ws, room_id, manager)
|
||||
await consumer.run()
|
@ -1,58 +0,0 @@
|
||||
from tortoise.contrib.pydantic import pydantic_model_creator
|
||||
from database.decorators import as_form
|
||||
from database.auth.models import UserModel
|
||||
from database.exercices.models import Exercice, Tag
|
||||
from database.room.models import Room, Parcours, AnonymousMember
|
||||
from tortoise import Tortoise
|
||||
|
||||
|
||||
Tortoise.init_models(['database.exercices.models',
|
||||
'database.auth.models', "database.room.models"], "models")
|
||||
|
||||
Exercice_schema = pydantic_model_creator(Exercice, name="exercice", include=[
|
||||
"name", "tags", 'id_code', "consigne", 'pdfSupport', "csvSupport", 'examples'])
|
||||
|
||||
ExerciceIn_schema = pydantic_model_creator(
|
||||
Exercice, name="exerciceIn", exclude_readonly=True, exclude=['id_code', 'exo_source', "tags_id", 'author_id', 'origin'])
|
||||
|
||||
Exo_schema = pydantic_model_creator(Exercice, name="exerciceszzz", include=[
|
||||
'name', 'id_code', 'tags'])
|
||||
@as_form
|
||||
class ExerciceIn_form(ExerciceIn_schema):
|
||||
pass
|
||||
|
||||
|
||||
Tag_schema = pydantic_model_creator(Tag, name="tag", exclude=['owner', "id", ])
|
||||
TagIn_schema = pydantic_model_creator(
|
||||
Tag, name="tagIn", exclude_readonly=True, exclude=['owner_id'])
|
||||
|
||||
User_schema = pydantic_model_creator(UserModel, name='users', include=[
|
||||
'username', 'email', "name", "firstname"])
|
||||
UserIn_schema = pydantic_model_creator(
|
||||
UserModel, name='usersIn', exclude_readonly=True)
|
||||
|
||||
|
||||
@as_form
|
||||
class UserIn_Form(UserIn_schema):
|
||||
pass
|
||||
|
||||
|
||||
Room_schema = pydantic_model_creator(
|
||||
Room, name='room', include=["id", 'name', 'id_code'])
|
||||
|
||||
RoomIn_schema = pydantic_model_creator(Room, name='roomIn', exclude_readonly=True, exclude=[
|
||||
'created_at', 'online', 'id_code', 'users_waiters'])
|
||||
|
||||
Anonymous_schema = pydantic_model_creator(
|
||||
AnonymousMember, name='anonymousMember')
|
||||
AnonymousIn_schema = pydantic_model_creator(
|
||||
AnonymousMember, name='anonymousMemberIn', exclude_readonly=True, exclude=['id_code', 'room_id'])
|
||||
|
||||
Parcours_schema = pydantic_model_creator(Parcours, name='parcours')
|
||||
ParcoursIn_schema = pydantic_model_creator(
|
||||
Parcours, name='parcoursIn', exclude_readonly=True)
|
||||
|
||||
|
||||
SECRET_KEY = "6323081020d8939e6385dd688a26cbca0bb34ed91997959167637319ba4f6f3e"
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = 30
|
@ -1,40 +0,0 @@
|
||||
from config import User_schema
|
||||
from database.exercices.crud import generate_unique_code
|
||||
from services.password import get_password_hash
|
||||
from .models import UserModel
|
||||
|
||||
|
||||
async def get_user_db(username):
|
||||
return await UserModel.get_or_none(username=username)
|
||||
|
||||
async def get_user_from_clientId_db(clientId):
|
||||
return await UserModel.get_or_none(clientId=clientId)
|
||||
|
||||
|
||||
async def create_user_db(username, password):
|
||||
#id_code = generate_unique_code(UserModel)
|
||||
return await UserModel.create(username=username, hashed_password=password)
|
||||
|
||||
async def disable_user_db(username):
|
||||
user =await UserModel.get(username=username)
|
||||
user_obj = await User_schema.from_tortoise_orm(user)
|
||||
await user.update_from_dict({**user_obj.dict(exclude_unset=True), 'disabled': True}).save()
|
||||
|
||||
return user
|
||||
|
||||
async def delete_user_db(username):
|
||||
user = await UserModel.get(username=username)
|
||||
await user.delete()
|
||||
|
||||
|
||||
async def update_user_db(username_id: str, **kwargs):
|
||||
user = await UserModel.get(username=username_id)
|
||||
await user.update_from_dict({**kwargs}).save()
|
||||
return user
|
||||
|
||||
async def update_password_db(username, password):
|
||||
print(username)
|
||||
user = await UserModel.get(username=username)
|
||||
|
||||
await user.update_from_dict({'hashed_password': get_password_hash(password)}).save(update_fields=["hashed_password"])
|
||||
return user
|
@ -1,22 +0,0 @@
|
||||
import uuid
|
||||
from tortoise.models import Model
|
||||
from tortoise import fields
|
||||
|
||||
|
||||
class UserModel(Model):
|
||||
id = fields.IntField(pk=True)
|
||||
clientId = fields.UUIDField(unique=True, default = uuid.uuid4)
|
||||
username = fields.CharField(max_length = 100, unique=True)
|
||||
hashed_password = fields.CharField(max_length = 255)
|
||||
email = fields.CharField(null=True, max_length=255)
|
||||
|
||||
name = fields.CharField(null=True, max_length=255)
|
||||
firstname=fields.CharField(null=True, max_length=255)
|
||||
|
||||
disabled = fields.BooleanField(default=False)
|
||||
|
||||
class PydanticMeta:
|
||||
exclude=['hashed_password']
|
||||
|
||||
class Meta:
|
||||
table = "users"
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,32 +0,0 @@
|
||||
import inspect
|
||||
from typing import Type
|
||||
|
||||
from fastapi import Form
|
||||
from pydantic import BaseModel
|
||||
from pydantic.fields import ModelField
|
||||
|
||||
def as_form(cls: Type[BaseModel]):
|
||||
new_parameters = []
|
||||
|
||||
for field_name, model_field in cls.__fields__.items():
|
||||
model_field: ModelField # type: ignore
|
||||
|
||||
new_parameters.append(
|
||||
inspect.Parameter(
|
||||
model_field.alias,
|
||||
inspect.Parameter.POSITIONAL_ONLY,
|
||||
default=Form(...) if model_field.required else Form(
|
||||
model_field.default),
|
||||
annotation=model_field.outer_type_,
|
||||
)
|
||||
)
|
||||
|
||||
async def as_form_func(**data):
|
||||
return cls(**data)
|
||||
|
||||
sig = inspect.signature(as_form_func)
|
||||
sig = sig.replace(parameters=new_parameters)
|
||||
as_form_func.__signature__ = sig # type: ignore
|
||||
setattr(cls, 'as_form', as_form_func)
|
||||
return cls
|
||||
|
@ -1,102 +0,0 @@
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
from typing import List
|
||||
from config import Exercice_schema, TagIn_schema
|
||||
from services.io import get_abs_path_from_relative_to_root, get_ancestor, get_parent_dir, remove_fastapi_root, remove_if_exists, get_or_create_dir
|
||||
from tortoise import Model
|
||||
from generateur.generateur_main import Generateur
|
||||
from .models import Exercice, Tag
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
async def create_exo_db(*args, **kwargs) -> Exercice:
|
||||
code = await generate_unique_code(Exercice)
|
||||
return await Exercice.create(*args, **{**kwargs, 'id_code': code})
|
||||
|
||||
|
||||
async def delete_exo_db(id_code: str):
|
||||
exo = await Exercice.get(id_code=id_code)
|
||||
path = get_abs_path_from_relative_to_root( exo.exo_source)
|
||||
|
||||
parent = get_parent_dir(path)
|
||||
|
||||
shutil.rmtree(parent)
|
||||
|
||||
return await exo.delete()
|
||||
|
||||
async def update_exo_db(id_code: str, **kwargs) -> Exercice:
|
||||
exo = await Exercice.get(id_code=id_code)
|
||||
path = get_abs_path_from_relative_to_root(exo.exo_source)
|
||||
|
||||
remove_if_exists(path)
|
||||
|
||||
await exo.update_from_dict({**kwargs, 'origin_id': exo.origin_id, 'isOriginal': exo.isOriginal}).save()
|
||||
return exo
|
||||
|
||||
|
||||
#flag
|
||||
async def get_or_create_tag(id_code: str, data: List[TagIn_schema]):
|
||||
tag = await Tag.get_or_none(id_code=id_code)
|
||||
if tag == None:
|
||||
code = await generate_unique_code(Tag)
|
||||
return await Tag.create(**{**data,'id_code': code})
|
||||
return tag
|
||||
|
||||
async def add_or_remove_tag(exo_id_code: str, tag: Tag):
|
||||
exo = await Exercice.get(id_code = exo_id_code)
|
||||
is_present = await exo.tags.all().get_or_none(id_code=tag.id_code)
|
||||
if is_present == None:
|
||||
await exo.tags.add(tag)
|
||||
else:
|
||||
await exo.tags.remove(tag)
|
||||
return exo
|
||||
|
||||
async def add_tag_db(id_code: str, tags_data: List[TagIn_schema], user_id:int) -> Exercice:
|
||||
exo = await Exercice.get(id_code = id_code)
|
||||
for t in tags_data:
|
||||
tag = await get_or_create_tag(t.id_code, {**t.dict(exclude_unset=True), 'owner_id': user_id})
|
||||
await exo.tags.add(tag)
|
||||
return exo
|
||||
|
||||
async def delete_tag_db(exo_id: str, tag_id: str) -> Exercice:
|
||||
exo = await Exercice.get(id_code=exo_id)
|
||||
tag = await exo.tags.all().get(id_code=tag_id)
|
||||
await exo.tags.remove(tag)
|
||||
return exo
|
||||
|
||||
|
||||
def clone_exo_source(path, id_code):
|
||||
upload_root = get_ancestor(path, 2)
|
||||
path = get_abs_path_from_relative_to_root(path)
|
||||
new_path = get_abs_path_from_relative_to_root(os.path.join(upload_root, id_code))
|
||||
get_or_create_dir((new_path))
|
||||
|
||||
return remove_fastapi_root(shutil.copy(path, new_path))
|
||||
|
||||
|
||||
async def clone_exo_db(id_code:str, user_id):
|
||||
exo = await Exercice.get(id_code=id_code)
|
||||
new_id_code = await generate_unique_code(Exercice)
|
||||
|
||||
exo_obj = await Exercice_schema.from_tortoise_orm(exo)
|
||||
exo_obj = exo_obj.dict(exclude_unset=True)
|
||||
exo_obj.pop('tags')
|
||||
exo_obj.pop('exercices')
|
||||
|
||||
path = clone_exo_source(exo.exo_source, new_id_code)
|
||||
|
||||
new_exo = Exercice(**{**exo_obj, 'id_code': new_id_code, 'exo_source': path,
|
||||
"isOriginal": False, 'author_id': user_id}, origin_id=exo.id)
|
||||
|
||||
await new_exo.save()
|
||||
return new_exo
|
||||
|
||||
async def get_exo_source_path(id_code: str):
|
||||
exo = await Exercice.get(id_code=id_code)
|
||||
path = get_abs_path_from_relative_to_root(exo.exo_source)
|
||||
return path
|
@ -1,62 +0,0 @@
|
||||
import os
|
||||
import typing
|
||||
import uuid
|
||||
from fastapi import UploadFile
|
||||
from tortoise.fields import TextField
|
||||
from tortoise import ConfigurationError
|
||||
import io
|
||||
from services.io import delete_root_slash, get_abs_path_from_relative_to_root, get_filename_from_path, get_or_create_dir, is_binary_file, get_filename, remove_fastapi_root
|
||||
|
||||
|
||||
|
||||
|
||||
class FileField(TextField):
|
||||
def __init__(self, *, upload_root: str, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.upload_root = delete_root_slash(upload_root)
|
||||
|
||||
self.upload_root = get_or_create_dir(os.path.join(os.environ.get('FASTAPI_ROOT_URL'), self.upload_root))
|
||||
|
||||
|
||||
def _is_binary(self, file: UploadFile):
|
||||
return isinstance(file, io.BytesIO)
|
||||
|
||||
def to_db_value(self, value: typing.IO, instance):
|
||||
if not isinstance(value, str):
|
||||
print(instance)
|
||||
super().validate(value)
|
||||
value.seek(0)
|
||||
is_binary = self._is_binary(value)
|
||||
name = get_filename(value)
|
||||
|
||||
parent = get_or_create_dir(os.path.join(self.upload_root, instance.id_code))
|
||||
|
||||
mode = 'w+' if not is_binary else 'wb+'
|
||||
|
||||
path = os.path.join(self.upload_root, parent, name)
|
||||
|
||||
with open(path, mode) as f:
|
||||
f.write(value.read())
|
||||
|
||||
return remove_fastapi_root(path)
|
||||
return value
|
||||
|
||||
def to_python_value(self, value: str):
|
||||
|
||||
if not self._is_binary:
|
||||
if is_binary_file(value):
|
||||
mode = 'rb'
|
||||
buffer = io.BytesIO()
|
||||
else:
|
||||
mode = 'r'
|
||||
buffer = io.StringIO()
|
||||
|
||||
buffer.name =get_filename_from_path(value)
|
||||
|
||||
with open(get_abs_path_from_relative_to_root(value), mode) as f:
|
||||
buffer.write(f.read())
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
return value
|
||||
|
@ -1,106 +0,0 @@
|
||||
import asyncio
|
||||
from io import BytesIO
|
||||
import io
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
|
||||
from tortoise.models import Model
|
||||
from tortoise import fields
|
||||
from tortoise.contrib.pydantic import pydantic_model_creator
|
||||
import async_to_sync as sync
|
||||
from tortoise.manager import Manager
|
||||
from generateur.generateur_main import Generateur
|
||||
|
||||
from services.io import get_abs_path_from_relative_to_root
|
||||
|
||||
from .validators import ExoSourceValidator, get_support_compatibility_for_exo_source_from_path, get_support_compatibility_for_exo_source_from_data
|
||||
|
||||
from .customField import FileField
|
||||
|
||||
|
||||
class Tag(Model):
|
||||
id = fields.IntField(pk=True)
|
||||
id_code = fields.CharField(unique=True, default="", max_length=15)
|
||||
|
||||
name = fields.CharField(max_length=35)
|
||||
color = fields.CharField(max_length=100)
|
||||
|
||||
owner = fields.ForeignKeyField('models.UserModel')
|
||||
|
||||
|
||||
class Exercice(Model):
|
||||
id = fields.IntField(pk=True)
|
||||
id_code = fields.CharField(default="", max_length=10, unique=True)
|
||||
|
||||
name = fields.CharField(max_length=50)
|
||||
|
||||
consigne = fields.CharField(max_length=200, null=True, default="")
|
||||
|
||||
exo_source = FileField(upload_root="/uploads",
|
||||
validators=[ExoSourceValidator()])
|
||||
|
||||
updated_at = fields.DatetimeField(auto_now=True)
|
||||
|
||||
private = fields.BooleanField(default=False)
|
||||
|
||||
tags = fields.ManyToManyField('models.Tag', null=True)
|
||||
|
||||
origin = fields.ForeignKeyField('models.Exercice', null = True)
|
||||
isOriginal = fields.BooleanField(default = True)
|
||||
|
||||
author = fields.ForeignKeyField('models.UserModel')
|
||||
|
||||
def pdfSupport(self) -> bool:
|
||||
if not isinstance(self.exo_source, io.BytesIO):
|
||||
if os.path.exists(get_abs_path_from_relative_to_root(self.exo_source)):
|
||||
support_compatibility = get_support_compatibility_for_exo_source_from_path(
|
||||
get_abs_path_from_relative_to_root(self.exo_source))
|
||||
return support_compatibility['isPdf']
|
||||
return False
|
||||
else:
|
||||
self.exo_source.seek(0)
|
||||
support_compatibility = get_support_compatibility_for_exo_source_from_data(
|
||||
self.exo_source.read())
|
||||
return support_compatibility['isPdf']
|
||||
|
||||
def csvSupport(self) -> bool:
|
||||
if not isinstance(self.exo_source, io.BytesIO):
|
||||
if os.path.exists(get_abs_path_from_relative_to_root(self.exo_source)):
|
||||
support_compatibility = get_support_compatibility_for_exo_source_from_path(
|
||||
get_abs_path_from_relative_to_root(self.exo_source))
|
||||
return support_compatibility['isCsv']
|
||||
return False
|
||||
else:
|
||||
self.exo_source.seek(0)
|
||||
support_compatibility = get_support_compatibility_for_exo_source_from_data(
|
||||
self.exo_source.read())
|
||||
return support_compatibility['isCsv']
|
||||
|
||||
def webSupport(self) -> bool:
|
||||
|
||||
if not isinstance(self.exo_source, io.BytesIO):
|
||||
if os.path.exists(get_abs_path_from_relative_to_root(self.exo_source)):
|
||||
support_compatibility = get_support_compatibility_for_exo_source_from_path(
|
||||
get_abs_path_from_relative_to_root(self.exo_source))
|
||||
return support_compatibility['isWeb']
|
||||
return False
|
||||
else:
|
||||
self.exo_source.seek(0)
|
||||
support_compatibility = get_support_compatibility_for_exo_source_from_data(
|
||||
self.exo_source.read())
|
||||
return support_compatibility['isWeb']
|
||||
|
||||
def examples(self) -> dict:
|
||||
if not isinstance(self.exo_source, io.BytesIO):
|
||||
return {
|
||||
"type": "Csv" if self.csvSupport() else "web" if self.webSupport() else None,
|
||||
"data": Generateur(get_abs_path_from_relative_to_root(self.exo_source), 3, "csv" if self.csvSupport() else "web" if self.pdfSupport() else None, True) if self.csvSupport() == True or self.webSupport() == True else None
|
||||
}
|
||||
return {}
|
||||
|
||||
class PydanticMeta:
|
||||
computed = ["pdfSupport", "csvSupport", "webSupport", 'examples']
|
||||
exclude = ["exo_source", 'id', "exercices"]
|
||||
|
||||
|
@ -1,93 +0,0 @@
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
import typing
|
||||
from tortoise.validators import Validator
|
||||
from tortoise.exceptions import ValidationError
|
||||
import importlib.util
|
||||
import types
|
||||
from services.timeout import timeout
|
||||
|
||||
from .customField import is_binary_file
|
||||
|
||||
|
||||
def checkExoSupportCompatibility(obj):
|
||||
isPdf = False if (obj['pdf'] == None or (
|
||||
obj['calcul'] == False and obj['pdf'] == False)) else True
|
||||
|
||||
isCsv = False if (obj['csv'] == None or (
|
||||
obj['calcul'] == False and obj['csv'] == False)) else True
|
||||
|
||||
isWeb = False if (obj['web'] == None or (
|
||||
obj['calcul'] == False and obj['web'] == False)) else True
|
||||
|
||||
return {
|
||||
'isPdf': isPdf, 'isCsv': isCsv, 'isWeb': isWeb}
|
||||
|
||||
def get_module_from_string(value: str) -> types.ModuleType:
|
||||
spec = types.ModuleType('exo')
|
||||
try:
|
||||
exec(value, spec.__dict__)
|
||||
except Exception as err:
|
||||
raise ValidationError(f'[Error] : {err}')
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
|
||||
def execute_main_if_present(spec):
|
||||
try:
|
||||
return spec.main()
|
||||
except AttributeError as atrerror:
|
||||
raise ValidationError(f"[Error] : function 'main' is missing")
|
||||
except Exception as e:
|
||||
raise ValidationError(f'[Error] : {e}')
|
||||
|
||||
def get_spec_with_timeout(data, time):
|
||||
with timeout(time, ValidationError('[Error] : Script took too long')):
|
||||
return get_module_from_string(data)
|
||||
|
||||
def fill_empty_values(object):
|
||||
default_object = {"calcul": False, 'pdf': False, 'csv': False,
|
||||
'web': False, 'correction': False}
|
||||
return {**default_object, **object}
|
||||
|
||||
|
||||
|
||||
def get_support_compatibility_for_exo_source_from_data(data: str):
|
||||
|
||||
spec = get_spec_with_timeout(data, 5)
|
||||
result = execute_main_if_present(spec)
|
||||
|
||||
|
||||
result = fill_empty_values(result)
|
||||
|
||||
exo_supports_compatibility = checkExoSupportCompatibility(result)
|
||||
return exo_supports_compatibility
|
||||
|
||||
|
||||
def get_support_compatibility_for_exo_source_from_path(path):
|
||||
if not os.path.exists(path):
|
||||
raise ValidationError('[Error] : No such file or directory')
|
||||
is_binary = is_binary_file(path)
|
||||
|
||||
if is_binary:
|
||||
mode = 'rb'
|
||||
else:
|
||||
mode = 'r'
|
||||
with open(path, mode) as f:
|
||||
data = f.read() if mode == "r" else f.read().decode('utf8')
|
||||
return get_support_compatibility_for_exo_source_from_data(data)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class ExoSourceValidator(Validator):
|
||||
"""
|
||||
A validator to validate ...
|
||||
"""
|
||||
def __call__(self, value: typing.IO):
|
||||
exo_supports_compatibility = get_support_compatibility_for_exo_source_from_data(
|
||||
value.read())
|
||||
if not exo_supports_compatibility['isPdf'] and not exo_supports_compatibility['isCsv'] and not exo_supports_compatibility['isWeb']:
|
||||
raise ValidationError('[Error] : Exercice non valide (compatible avec aucun support)')
|
@ -1,79 +0,0 @@
|
||||
import time
|
||||
from typing import List
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException
|
||||
|
||||
import backend.api.database.exercices.crud as crud, database, backend.api.database.exercices.models as models, schemas
|
||||
from database import db_state_default
|
||||
|
||||
database.db.connect()
|
||||
database.db.create_tables([models.User, models.Item])
|
||||
database.db.close()
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
sleep_time = 10
|
||||
|
||||
|
||||
async def reset_db_state():
|
||||
database.db._state._state.set(db_state_default.copy())
|
||||
database.db._state.reset()
|
||||
|
||||
|
||||
def get_db(db_state=Depends(reset_db_state)):
|
||||
try:
|
||||
database.db.connect()
|
||||
yield
|
||||
finally:
|
||||
if not database.db.is_closed():
|
||||
database.db.close()
|
||||
|
||||
|
||||
@app.post("/users/", response_model=schemas.User, dependencies=[Depends(get_db)])
|
||||
def create_user(user: schemas.UserCreate):
|
||||
db_user = crud.get_user_by_email(email=user.email)
|
||||
if db_user:
|
||||
raise HTTPException(status_code=400, detail="Email already registered")
|
||||
return crud.create_user(user=user)
|
||||
|
||||
|
||||
@app.get("/users/", response_model=List[schemas.User], dependencies=[Depends(get_db)])
|
||||
def read_users(skip: int = 0, limit: int = 100):
|
||||
users = crud.get_users(skip=skip, limit=limit)
|
||||
return users
|
||||
|
||||
|
||||
@app.get(
|
||||
"/users/{user_id}", response_model=schemas.User, dependencies=[Depends(get_db)]
|
||||
)
|
||||
def read_user(user_id: int):
|
||||
db_user = crud.get_user(user_id=user_id)
|
||||
if db_user is None:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
return db_user
|
||||
|
||||
|
||||
@app.post(
|
||||
"/users/{user_id}/items/",
|
||||
response_model=schemas.Item,
|
||||
dependencies=[Depends(get_db)],
|
||||
)
|
||||
def create_item_for_user(user_id: int, item: schemas.ItemCreate):
|
||||
return crud.create_user_item(item=item, user_id=user_id)
|
||||
|
||||
|
||||
@app.get("/items/", response_model=List[schemas.Item], dependencies=[Depends(get_db)])
|
||||
def read_items(skip: int = 0, limit: int = 100):
|
||||
items = crud.get_items(skip=skip, limit=limit)
|
||||
return items
|
||||
|
||||
|
||||
@app.get(
|
||||
"/slowusers/", response_model=List[schemas.User], dependencies=[Depends(get_db)]
|
||||
)
|
||||
def read_slow_users(skip: int = 0, limit: int = 100):
|
||||
global sleep_time
|
||||
sleep_time = max(0, sleep_time - 1)
|
||||
time.sleep(sleep_time) # Fake long processing request
|
||||
users = crud.get_users(skip=skip, limit=limit)
|
||||
return users
|
@ -1,90 +0,0 @@
|
||||
from config import AnonymousIn_schema, RoomIn_schema, User_schema
|
||||
from database.auth.models import UserModel
|
||||
from .models import AnonymousMember, Room, RoomOwner, Waiter
|
||||
from database.exercices.crud import generate_unique_code
|
||||
|
||||
|
||||
|
||||
async def create_room_with_user_db(room: RoomIn_schema, user: User_schema):
|
||||
code = await generate_unique_code(Room)
|
||||
|
||||
room_obj = await Room.create(**room.dict(exclude_unset=True), id_code=code)
|
||||
|
||||
user = await UserModel.get(id=user.id)
|
||||
await room_obj.users.add(user)
|
||||
await RoomOwner.create(room_id=room_obj.id, user_id=user.id)
|
||||
return room_obj
|
||||
|
||||
async def create_room_anonymous_db(room: RoomIn_schema, anonymous: AnonymousIn_schema):
|
||||
code = await generate_unique_code(Room)
|
||||
|
||||
room_obj = await Room.create(**room.dict(exclude_unset=True), id_code=code)
|
||||
|
||||
anonymous_code = await generate_unique_code(AnonymousMember)
|
||||
anonymous = await AnonymousMember.create(**anonymous.dict(exclude_unset=True), id_code=anonymous_code, room_id=room_obj.id)
|
||||
|
||||
await RoomOwner.create(room_id=room_obj.id, anonymous_id=anonymous.id)
|
||||
return room_obj
|
||||
|
||||
|
||||
async def get_room_db(room_id:str):
|
||||
room = await Room.get_or_none(id_code=room_id)
|
||||
return room
|
||||
|
||||
|
||||
|
||||
async def check_user_in_room(room: Room, user: UserModel):
|
||||
return await room.users.filter(id=user.id).count() != 0
|
||||
|
||||
async def get_member_by_code(room: Room, code: str):
|
||||
anonymous = await room.anonymousmembers
|
||||
filtered_anonymous = [
|
||||
m for m in anonymous if m.id_code == code]
|
||||
if len(filtered_anonymous) == 0:
|
||||
return None
|
||||
return filtered_anonymous[0]
|
||||
|
||||
async def check_user_owner(room: Room, user: UserModel):
|
||||
room_owner = await room.room_owner
|
||||
user_owner = await room_owner.user
|
||||
if user_owner == None:
|
||||
return False
|
||||
return user_owner.id == user.id
|
||||
|
||||
async def check_anonymous_owner(room: Room, anonymous: AnonymousMember):
|
||||
room_owner = await room.room_owner
|
||||
anonymous_owner = await room_owner.anonymous
|
||||
if anonymous_owner == None:
|
||||
return False
|
||||
return anonymous_owner.id_code == anonymous.id_code
|
||||
|
||||
|
||||
|
||||
async def create_waiter_by_user(room: Room, user: UserModel ):
|
||||
code = await generate_unique_code(Waiter)
|
||||
return await Waiter.create(room_id=room.id, user_id=user.id, name=user.username, id_code=code)
|
||||
|
||||
async def create_waiter_anonymous(room: Room, name: str):
|
||||
code = await generate_unique_code(Waiter)
|
||||
return await Waiter.create(name=name, id_code=code, room_id=room.id)
|
||||
|
||||
async def connect_room(room: Room, code):
|
||||
online = room.online
|
||||
await room.update_from_dict({'online': [*online, code]}).save(update_fields=['online'])
|
||||
return
|
||||
async def disconnect_room(room: Room, code):
|
||||
online = room.online
|
||||
await room.update_from_dict({'online': [o for o in online if o!=code]}).save(update_fields=['online'])
|
||||
return
|
||||
|
||||
async def validate_name_in_room(room: Room, name):
|
||||
anonymous = await room.anonymousmembers
|
||||
if len([a for a in anonymous if a == name]) != 0:
|
||||
return "Pseudo déjà utilisé"
|
||||
if len(name) < 3:
|
||||
return "Pseudo trop court"
|
||||
if len(name) > 30:
|
||||
return "Pseudo trop long"
|
||||
return True
|
||||
|
||||
|
@ -1,72 +0,0 @@
|
||||
from email.policy import default
|
||||
from tortoise.models import Model
|
||||
from tortoise import fields
|
||||
|
||||
|
||||
class Room(Model):
|
||||
id = fields.IntField(pk=True)
|
||||
id_code = fields.CharField(max_length=30, unique=True)
|
||||
|
||||
name = fields.CharField(max_length=255)
|
||||
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
|
||||
public_result = fields.BooleanField(default=False)
|
||||
private = fields.BooleanField(default = True)
|
||||
|
||||
online = fields.JSONField(default = list)
|
||||
|
||||
users = fields.ManyToManyField('models.UserModel')
|
||||
|
||||
class PydanticMeta:
|
||||
exlude=('users__email')
|
||||
|
||||
class AnonymousMember(Model):
|
||||
id = fields.IntField(pk=True)
|
||||
id_code = fields.CharField(max_length=30, unique=True)
|
||||
|
||||
name = fields.CharField(max_length=255)
|
||||
|
||||
room = fields.ForeignKeyField('models.Room')
|
||||
|
||||
class PydanticMeta:
|
||||
exclude=['room_owner', "id_code", 'id', 'challenger']
|
||||
|
||||
|
||||
class Waiter(Model):
|
||||
id = fields.IntField(pk=True)
|
||||
name = fields.CharField(max_length=255)
|
||||
id_code = fields.CharField(max_length=30, unique=True)
|
||||
room = fields.ForeignKeyField('models.Room')
|
||||
user = fields.ForeignKeyField("models.UserModel", null=True)
|
||||
|
||||
class RoomOwner(Model):
|
||||
user = fields.ForeignKeyField('models.UserModel', null = True)
|
||||
anonymous = fields.OneToOneField('models.AnonymousMember', null=True)
|
||||
room = fields.OneToOneField('models.Room', null=True)
|
||||
class Meta:
|
||||
table='room_owner'
|
||||
|
||||
|
||||
class Parcours(Model):
|
||||
id = fields.IntField(pk=True)
|
||||
id_code = fields.CharField(max_length=30, unique=True)
|
||||
|
||||
name = fields.CharField(max_length=255)
|
||||
created_at = fields.DateField(auto_now_add=True)
|
||||
|
||||
room = fields.ForeignKeyField('models.Room')
|
||||
|
||||
timer = fields.IntField(default=10)
|
||||
|
||||
exercices = fields.JSONField(default=list)
|
||||
|
||||
success_condition = fields.IntField(default = 10)
|
||||
|
||||
|
||||
class Challenger(Model):
|
||||
parcours = fields.ForeignKeyField('models.Parcours')
|
||||
anonymous = fields.OneToOneField('models.AnonymousMember', null=True)
|
||||
user = fields.OneToOneField("models.UserModel", null=True)
|
||||
|
||||
challenges = fields.JSONField(default=list)
|
Binary file not shown.
@ -1,72 +0,0 @@
|
||||
from .generateur_main import Generateur
|
||||
PAGE_LINES = {
|
||||
10: 53,
|
||||
12: 49,
|
||||
14: 39,
|
||||
16: 34,
|
||||
18: 31
|
||||
}
|
||||
MAX_LENGTH = {
|
||||
10: 38,
|
||||
12: 32,
|
||||
14: 25,
|
||||
16: 23,
|
||||
18: 20
|
||||
}
|
||||
|
||||
|
||||
def Csv_generator(path, nb_in_serie, nb_page, police, consigne, writer):
|
||||
exo_exemple = Generateur(path, 1, 'csv')
|
||||
if len(consigne) < MAX_LENGTH[police] and len(consigne) > len(exo_exemple):
|
||||
longueur_max = len(consigne) + 5
|
||||
elif len(consigne) > MAX_LENGTH[police] and len(consigne) > len(exo_exemple):
|
||||
longueur_max = MAX_LENGTH[police]
|
||||
elif len(consigne) > MAX_LENGTH[police] and len(consigne) < len(exo_exemple):
|
||||
longueur_max = len(exo_exemple)
|
||||
elif len(consigne) < MAX_LENGTH[police] and len(consigne) < len(exo_exemple):
|
||||
longueur_max = len(exo_exemple)
|
||||
else:
|
||||
longueur_max = len(exo_exemple)
|
||||
|
||||
consigne_lines = []
|
||||
if len(consigne) > 30:
|
||||
cons = consigne.replace(',', ' ').split(' ')
|
||||
text_longueur = ''
|
||||
for i in cons:
|
||||
text_longueur = text_longueur + i + ' '
|
||||
if len(text_longueur) > longueur_max:
|
||||
consigne_lines.append(text_longueur)
|
||||
text_longueur = ''
|
||||
# print(text_longueur)
|
||||
else:
|
||||
consigne_lines.append(consigne)
|
||||
serie_page_vertical = int(PAGE_LINES[police] /
|
||||
(nb_in_serie + 1 + len(consigne_lines)))
|
||||
|
||||
rest_line = PAGE_LINES[police] - (serie_page_vertical * nb_in_serie +
|
||||
serie_page_vertical * len(consigne_lines) + serie_page_vertical)
|
||||
|
||||
max_length = len(exo_exemple) if len(
|
||||
exo_exemple) > longueur_max else longueur_max
|
||||
max_in_line = 2 * MAX_LENGTH[police]
|
||||
space = max_in_line / 8
|
||||
|
||||
nb_in_line = int(max_in_line / (max_length + space)) + 1
|
||||
|
||||
for p in range(nb_page):
|
||||
for c in range(serie_page_vertical):
|
||||
|
||||
for w in consigne_lines:
|
||||
writer.writerow([*[w, ""] * nb_in_line])
|
||||
|
||||
for k in range(nb_in_serie):
|
||||
calcul_list = list(
|
||||
map(lambda calc: calc['calcul'], Generateur(path, nb_in_line, 'csv')))
|
||||
n = 1
|
||||
for i in range(n, len(calcul_list) + n + 1, n+1):
|
||||
calcul_list.insert(i, '')
|
||||
writer.writerow(calcul_list)
|
||||
writer.writerow([''])
|
||||
|
||||
for r in range(rest_line):
|
||||
writer.writerow([''])
|
@ -1,49 +0,0 @@
|
||||
import re
|
||||
import importlib.util
|
||||
|
||||
|
||||
def getObjectKey(obj, key):
|
||||
if obj[key] == None:
|
||||
return None
|
||||
return key if obj[key] != False else 'calcul' if obj['calcul'] != False else None
|
||||
|
||||
|
||||
def getCorrectionKey(obj, key):
|
||||
return key if (obj[key] != False and obj['correction'] == False) else 'calcul' if(obj['calcul'] != False and obj['correction'] == False) else 'correction' if obj['correction'] != False else None
|
||||
|
||||
|
||||
def parseCorrection(calc, replacer='...'):
|
||||
exp_list = re.findall(r"\[([A-Za-z0-9_]+)\]", calc)
|
||||
for exp in exp_list:
|
||||
calc = calc.replace(f'[{exp}]', replacer)
|
||||
return calc
|
||||
|
||||
|
||||
def Generateur(path, quantity, key, forcedCorrection=False):
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
"tmp", path)
|
||||
tmp = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(tmp)
|
||||
try:
|
||||
main_func = tmp.main
|
||||
except:
|
||||
return None
|
||||
main_result = main_func()
|
||||
default_object = {"calcul": False, 'pdf': False, 'csv': False,
|
||||
'web': False, 'correction': False} # les valeurs par défaut
|
||||
# Si l'utilisateur n'a pas entré une valeur, elle est définie à False
|
||||
print(main_result)
|
||||
result_object = {**default_object, **main_result}
|
||||
object_key = getObjectKey(result_object, key)
|
||||
correction_key = getCorrectionKey(result_object, key)
|
||||
op_list = []
|
||||
try:
|
||||
replacer = tmp.CORRECTION_REPLACER
|
||||
except:
|
||||
replacer = '...'
|
||||
for i in range(quantity):
|
||||
main_result = main_func()
|
||||
main = {**default_object, **main_result}
|
||||
op_list.append({'calcul': parseCorrection(main[
|
||||
object_key], replacer) if (forcedCorrection or (key != 'web' and main['correction'] == False)) else main[object_key], 'correction': main[correction_key]})
|
||||
return op_list
|
@ -1,135 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Chat</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>WebSocket Room</h1>
|
||||
|
||||
<section id="connection">
|
||||
<h2>Connection</h2>
|
||||
<form action="" onsubmit="login(event)">
|
||||
<input type="text" placeholder="Username..." id="username" />
|
||||
<input type="text" placeholder="Password..." id="password" />
|
||||
<button>Se connecter</button>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<h2>Room</h2>
|
||||
|
||||
<section>
|
||||
<h3>Create or Join</h3>
|
||||
|
||||
<form action="" onsubmit="join(event)">
|
||||
<input type="text" id="room_code" />
|
||||
<input type="text" id="name" />
|
||||
<input type="text" id="reco" />
|
||||
<button>join</button>
|
||||
</form>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<p>Members</p>
|
||||
<u id="members"></u>
|
||||
|
||||
<p>Waiters</p>
|
||||
<u id="waiters"></u>
|
||||
|
||||
<script>
|
||||
let ws = null;
|
||||
let token = null;
|
||||
async function login(e) {
|
||||
e.preventDefault();
|
||||
var username = document.getElementById("username").value;
|
||||
var password = document.getElementById("password").value;
|
||||
var form = new FormData();
|
||||
form.append("username", username);
|
||||
form.append("password", password);
|
||||
var data = new URLSearchParams(form);
|
||||
|
||||
token = await fetch("http://localhost:8001/login", {
|
||||
method: "post",
|
||||
body: data,
|
||||
headers: { "Content-Type": "application/x-www-form-urlencoded" },
|
||||
})
|
||||
.then((res) => {
|
||||
return res.json();
|
||||
})
|
||||
.then((r) => {
|
||||
var token = r["access_token"];
|
||||
var connect_section = document.getElementById("connection");
|
||||
var p = document.createElement("p");
|
||||
var name = JSON.parse(atob(r["access_token"].split(".")[1]))["sub"];
|
||||
name = document.createTextNode(`Connected as ${name}`);
|
||||
|
||||
connect_section.appendChild(p);
|
||||
connect_section.appendChild(name);
|
||||
return token;
|
||||
});
|
||||
}
|
||||
|
||||
function join(e) {
|
||||
e.preventDefault();
|
||||
var room_code = document.getElementById("room_code").value;
|
||||
ws = new WebSocket(`ws://localhost:8001/ws/${room_code}`, [], {
|
||||
headers: { Authorization: "Bearer " + token },
|
||||
});
|
||||
|
||||
ws.onmessage = (msg) => {
|
||||
var type = JSON.parse(msg.data)["type"];
|
||||
var data = JSON.parse(msg.data)["data"];
|
||||
console.log("TYPE", type, type == "add_waiter");
|
||||
if (type == "accept") {
|
||||
if (token == null) {
|
||||
var name = document.getElementById("name").value;
|
||||
var reco = document.getElementById("reco").value;
|
||||
if (name == "") {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "login",
|
||||
data: { relogin_code: reco },
|
||||
})
|
||||
);
|
||||
} else {
|
||||
ws.send(
|
||||
JSON.stringify({ type: "login", data: { name: name } })
|
||||
);
|
||||
}
|
||||
} else {
|
||||
ws.send(JSON.stringify({ type: "auth", data: { token: token } }));
|
||||
}
|
||||
}
|
||||
if (type == "auth_success") {
|
||||
ws.send(JSON.stringify({ type: "login", data: {} }));
|
||||
}
|
||||
if (type == "auth_failed") {
|
||||
ws.send(
|
||||
JSON.stringify({ type: "login", data: { name: "test_name" } })
|
||||
);
|
||||
}
|
||||
if (type == "add_waiter") {
|
||||
var name = data["name"];
|
||||
var id = data["id"];
|
||||
name = document.createTextNode(name);
|
||||
|
||||
var waiter = document.getElementById("waiters");
|
||||
var li = document.createElement("li");
|
||||
var btn = document.createElement("button");
|
||||
waiter.appendChild(li);
|
||||
li.appendChild(name);
|
||||
li.addEventListener("click", () => {
|
||||
console.log("TET");
|
||||
ws.send(
|
||||
JSON.stringify({ type: "accept_waiter", data: { id: id } })
|
||||
);
|
||||
});
|
||||
}
|
||||
if (type == "log_waiter") {
|
||||
ws.send(JSON.stringify({ type: "log_waiter", data: {} }));
|
||||
}
|
||||
};
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
@ -1,127 +0,0 @@
|
||||
from fastapi_pagination import add_pagination
|
||||
from fastapi.responses import PlainTextResponse
|
||||
from fastapi.exceptions import RequestValidationError, ValidationError
|
||||
from datetime import timedelta
|
||||
from fastapi import FastAPI, HTTPException, Depends, Request, status
|
||||
from fastapi_jwt_auth import AuthJWT
|
||||
from fastapi_jwt_auth.exceptions import AuthJWTException
|
||||
from fastapi.responses import JSONResponse
|
||||
from typing import List
|
||||
from tortoise.contrib.pydantic import pydantic_model_creator
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from tortoise import Tortoise
|
||||
from database.exercices.models import Exercice
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from tortoise.contrib.fastapi import register_tortoise
|
||||
from pydantic import BaseModel
|
||||
import apis.base
|
||||
import config
|
||||
from redis import Redis
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
|
||||
app = FastAPI(title="Tortoise ORM FastAPI example")
|
||||
origins = [
|
||||
"http://localhost:8000",
|
||||
"https://localhost:8001",
|
||||
"http://localhost",
|
||||
"http://localhost:8080",
|
||||
]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=['*'],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
@app.exception_handler(RequestValidationError)
|
||||
@app.exception_handler(ValidationError)
|
||||
async def validation_exception_handler(request, exc: RequestValidationError):
|
||||
errors = {}
|
||||
for e in exc.errors():
|
||||
errors[e['loc'][-1] + "_error"] = e['msg']
|
||||
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
content=jsonable_encoder({"detail": errors}),
|
||||
)
|
||||
|
||||
|
||||
class Settings(BaseModel):
|
||||
authjwt_secret_key: str = config.SECRET_KEY
|
||||
authjwt_denylist_enabled: bool = True
|
||||
authjwt_denylist_token_checks: set = {"access", "refresh"}
|
||||
access_expires: int = timedelta(minutes=15 )
|
||||
refresh_expires: int = timedelta(days=30)
|
||||
|
||||
# callback to get your configuration
|
||||
|
||||
settings = Settings()
|
||||
@AuthJWT.load_config
|
||||
def get_config():
|
||||
return settings
|
||||
|
||||
|
||||
# exception handler for authjwt
|
||||
# in production, you can tweak performance using orjson response
|
||||
@app.exception_handler(AuthJWTException)
|
||||
def authjwt_exception_handler(request: Request, exc: AuthJWTException):
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={"detail": exc.message}
|
||||
)
|
||||
|
||||
|
||||
redis_conn = Redis(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
|
||||
|
||||
@AuthJWT.token_in_denylist_loader
|
||||
def check_if_token_in_denylist(decrypted_token):
|
||||
jti = decrypted_token['jti']
|
||||
entry = redis_conn.get(jti)
|
||||
return entry and entry == 'true'
|
||||
|
||||
app.include_router(apis.base.api_router)
|
||||
|
||||
|
||||
@app.delete('/access-revoke')
|
||||
def access_revoke(Authorize: AuthJWT = Depends()):
|
||||
Authorize.jwt_required()
|
||||
|
||||
# Store the tokens in redis with the value true for revoked.
|
||||
# We can also set an expires time on these tokens in redis,
|
||||
# so they will get automatically removed after they expired.
|
||||
jti = Authorize.get_raw_jwt()['jti']
|
||||
redis_conn.setex(jti, settings.access_expires, 'true')
|
||||
return {"detail": "Access token has been revoke"}
|
||||
|
||||
|
||||
@app.delete('/refresh-revoke')
|
||||
def refresh_revoke(Authorize: AuthJWT = Depends()):
|
||||
Authorize.jwt_refresh_token_required()
|
||||
|
||||
jti = Authorize.get_raw_jwt()['jti']
|
||||
redis_conn.setex(jti, settings.refresh_expires, 'true')
|
||||
return {"detail": "Refresh token has been revoke"}
|
||||
add_pagination(app)
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "sqlite://database/db.sqlite3"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": ["database.exercices.models", 'database.auth.models', "database.room.models","aerich.models"],
|
||||
"default_connection": "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
register_tortoise(
|
||||
app,
|
||||
config=TORTOISE_ORM,
|
||||
#db_url="sqlite://database/db.sqlite3",
|
||||
modules={"models": ["database.exercices.models", 'database.auth.models']},
|
||||
generate_schemas=True,
|
||||
add_exception_handlers=True,
|
||||
)
|
@ -1,4 +0,0 @@
|
||||
[tool.aerich]
|
||||
tortoise_orm = "main.TORTOISE_ORM"
|
||||
location = "./migrations"
|
||||
src_folder = "./."
|
@ -1,47 +0,0 @@
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, validator
|
||||
from services.password import validate_password
|
||||
|
||||
from database.decorators import as_form
|
||||
|
||||
|
||||
@as_form
|
||||
class UserForm(BaseModel):
|
||||
username: str
|
||||
firstname: Optional[str]
|
||||
name: Optional[str]
|
||||
email: Optional[str]
|
||||
|
||||
@as_form
|
||||
class User(BaseModel):
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
@as_form
|
||||
class UserRegister(User):
|
||||
password_confirm: str
|
||||
|
||||
@validator('username')
|
||||
def username_alphanumeric(cls, v):
|
||||
assert v.isalnum(), 'must be alphanumeric'
|
||||
return v
|
||||
|
||||
@validator('password')
|
||||
def password_validation(cls, v):
|
||||
is_valid = validate_password(v)
|
||||
if is_valid != True:
|
||||
raise ValueError(is_valid)
|
||||
return v
|
||||
|
||||
@validator('password_confirm')
|
||||
def password_match(cls, v, values):
|
||||
if 'password' in values and v != values['password']:
|
||||
raise ValueError('Les mots de passe ne correspondent pas')
|
||||
return v
|
||||
|
||||
|
||||
@as_form
|
||||
class PasswordSet(BaseModel):
|
||||
password: str
|
||||
password_confirm: str
|
@ -1,138 +0,0 @@
|
||||
from uuid import UUID
|
||||
from database.decorators import as_form
|
||||
from services.password import get_password_hash, validate_password
|
||||
from fastapi_jwt_auth import AuthJWT
|
||||
from datetime import datetime, timedelta
|
||||
from jose import jwt, JWTError
|
||||
from fastapi import Depends, HTTPException, Request, status
|
||||
from config import SECRET_KEY, ALGORITHM, ExerciceIn_schema, User_schema
|
||||
from database.auth.crud import get_user_db
|
||||
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
||||
from pydantic import BaseModel
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
|
||||
from database.auth.models import UserModel
|
||||
from .jwt import create_access_token
|
||||
from passlib.context import CryptContext
|
||||
from .password import verify_password
|
||||
from database.exercices.models import Exercice, Tag
|
||||
from fastapi.security.utils import get_authorization_scheme_param
|
||||
from schema.user import User, UserRegister, PasswordSet
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="login", auto_error=False)
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
|
||||
class TokenData(BaseModel):
|
||||
clientId: str | None = None
|
||||
|
||||
|
||||
|
||||
async def authenticate_user(user: User = Depends(User.as_form)):
|
||||
user_db = await get_user_db(user.username)
|
||||
if not user_db:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail={"username_error":"Utilisateur introuvable"},
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
if not verify_password(user.password, user_db.hashed_password):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail={"password_error": "Mot de passe invalide"},
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
return user_db
|
||||
|
||||
|
||||
async def get_user(username):
|
||||
user = await get_user_db(username)
|
||||
if user:
|
||||
return await User_schema.from_tortoise_orm(user)
|
||||
|
||||
|
||||
async def get_user_from_token(token: str):
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
clientId: str = payload.get("sub")
|
||||
if clientId is None:
|
||||
return None
|
||||
user = await UserModel.get_or_none(clientId=clientId)
|
||||
return user
|
||||
|
||||
|
||||
|
||||
''' async def check_tag_owner(exo_id: str, user: User_schema = Depends(get_current_active_user), ):
|
||||
tag = await Tag.get(id_code=exo_id)
|
||||
if tag.owner_id != user.id:
|
||||
raise HTTPException(status_code=401, detail="Non autorisé")
|
||||
return user '''
|
||||
|
||||
|
||||
def jwt_required(Authorize: AuthJWT = Depends()):
|
||||
Authorize.jwt_required()
|
||||
return Authorize
|
||||
|
||||
|
||||
def jwt_optional(Authorize: AuthJWT = Depends()):
|
||||
Authorize.jwt_optional()
|
||||
return Authorize
|
||||
|
||||
|
||||
def jwt_refresh_required(Authorize: AuthJWT = Depends()):
|
||||
Authorize.jwt_refresh_token_required()
|
||||
return Authorize
|
||||
|
||||
|
||||
def fresh_jwt_required(Authorize: AuthJWT = Depends()):
|
||||
Authorize.fresh_jwt_required()
|
||||
return Authorize
|
||||
|
||||
|
||||
|
||||
|
||||
def get_current_clientId(Authorize: AuthJWT = Depends(jwt_required)):
|
||||
return Authorize.get_jwt_subject()
|
||||
|
||||
async def get_current_user(clientId: str = Depends(get_current_clientId)):
|
||||
user = await UserModel.get_or_none(clientId=clientId)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail='User not found')
|
||||
return user
|
||||
|
||||
async def get_current_user_optional(Authorize: AuthJWT = Depends(jwt_optional)):
|
||||
clientId = Authorize.get_jwt_subject()
|
||||
if clientId:
|
||||
return await UserModel.get_or_none(clientId=clientId)
|
||||
return None
|
||||
|
||||
async def check_unique_user(username: str):
|
||||
user = await UserModel.get_or_none(username=username)
|
||||
if user is not None:
|
||||
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail={"username_error": "Déjà pris "})
|
||||
return username
|
||||
|
||||
|
||||
def validate_passwords(passwords: PasswordSet = Depends(PasswordSet.as_form)):
|
||||
if passwords.password != passwords.password_confirm:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail='Les mots de passe ne correspondent pas !')
|
||||
is_valid = validate_password(passwords.password)
|
||||
if is_valid is not True:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f'Invalid password : {is_valid}')
|
||||
return passwords.password
|
||||
|
||||
|
||||
async def validate_register_user(user: UserRegister = Depends(UserRegister.as_form)):
|
||||
username = await check_unique_user(user.username)
|
||||
password_set = PasswordSet(
|
||||
password=user.password, password_confirm=user.password_confirm)
|
||||
validate_passwords(password_set)
|
||||
return user
|
||||
|
||||
|
||||
async def check_author_exo(id_code: str, user: UserModel = Depends(get_current_user)):
|
||||
exo = await Exercice.get(id_code=id_code)
|
||||
if exo.author_id != user.id:
|
||||
raise HTTPException(status_code=401, detail="Non autorisé")
|
||||
return user
|
@ -1,56 +0,0 @@
|
||||
|
||||
|
||||
import os
|
||||
import typing
|
||||
import uuid
|
||||
|
||||
TEXTCHARS = bytearray({7, 8, 9, 10, 12, 13, 27} |
|
||||
set(range(0x20, 0x100)) - {0x7f})
|
||||
def delete_root_slash(path: str) -> str:
|
||||
if path.startswith('/'):
|
||||
path = path[1:]
|
||||
return path
|
||||
|
||||
|
||||
def get_abs_path_from_relative_to_root(path):
|
||||
return os.path.join(os.environ.get('FASTAPI_ROOT_URL'), delete_root_slash(path))
|
||||
|
||||
|
||||
def remove_fastapi_root(path):
|
||||
return path.replace(os.environ.get('FASTAPI_ROOT_URL'), "")
|
||||
|
||||
def is_binary_file(file_path: str):
|
||||
with open(file_path, 'rb') as f:
|
||||
content = f.read(1024)
|
||||
return bool(content.translate(None, TEXTCHARS))
|
||||
def get_or_create_dir(path: str) -> str:
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
return path
|
||||
|
||||
|
||||
def get_filename(file: typing.IO, default: str = uuid.uuid4()) -> str:
|
||||
if hasattr(file, 'name'):
|
||||
return file.name
|
||||
elif hasattr(file, 'filename'):
|
||||
return file.filename
|
||||
else:
|
||||
return f"{default.id_code}.py"
|
||||
|
||||
def remove_if_exists(path):
|
||||
if os.path.exists(path) and os.path.isfile(path):
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def get_parent_dir(path):
|
||||
return os.path.abspath(os.path.join(path, os.pardir))
|
||||
|
||||
|
||||
def get_ancestor(path:str, levels: int = 1):
|
||||
for i in range(levels):
|
||||
path = get_parent_dir(path)
|
||||
return path
|
||||
|
||||
|
||||
def get_filename_from_path(path):
|
||||
return os.path.split(path)[-1]
|
@ -1,14 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
from jose import jwt, JWTError
|
||||
from config import SECRET_KEY, ALGORITHM
|
||||
|
||||
|
||||
def create_access_token(data: dict, expires_delta: timedelta | None = None):
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
else:
|
||||
expire = datetime.utcnow() + timedelta(minutes=15)
|
||||
to_encode.update({"exp": expire})
|
||||
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
@ -1,24 +0,0 @@
|
||||
import re
|
||||
from passlib.context import CryptContext
|
||||
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def get_password_hash(password):
|
||||
return pwd_context.hash(password)
|
||||
|
||||
|
||||
def validate_password(password):
|
||||
if len(password) < 8:
|
||||
return "Le mot de passe est trop court (8 caractères minimum)"
|
||||
elif re.search('[0-9]', password) is None:
|
||||
return 'Le mot de passe doit contenir au moins un chiffre'
|
||||
elif re.search('[A-Z]', password) is None:
|
||||
return "Le mot de passe doit contenir au moins une majuscule"
|
||||
return True
|
||||
|
@ -1,21 +0,0 @@
|
||||
from contextlib import contextmanager
|
||||
import signal
|
||||
def raise_timeout(signum, frame):
|
||||
raise TimeoutError
|
||||
|
||||
@contextmanager
|
||||
def timeout(time:int, exception = TimeoutError):
|
||||
# Register a function to raise a TimeoutError on the signal.
|
||||
signal.signal(signal.SIGALRM, raise_timeout)
|
||||
# Schedule the signal to be sent after ``time``.
|
||||
signal.alarm(time)
|
||||
|
||||
try:
|
||||
yield
|
||||
except TimeoutError:
|
||||
print('TIMED OUT')
|
||||
raise exception
|
||||
finally:
|
||||
# Unregister the signal so it won't be triggered
|
||||
# if the timeout is not reached.
|
||||
signal.signal(signal.SIGALRM, signal.SIG_IGN)
|
@ -1,49 +0,0 @@
|
||||
from typing import Any
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from apis.base import api_router
|
||||
from tortoise.contrib.fastapi import register_tortoise
|
||||
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "sqlite://database/db.sqlite3"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": ["database.models", "aerich.models"],
|
||||
"default_connection": "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def start_application():
|
||||
app = FastAPI()
|
||||
app.include_router(api_router)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def app() -> Generator[FastAPI, Any, None]:
|
||||
_app = start_application()
|
||||
register_tortoise(
|
||||
app,
|
||||
config=TORTOISE_ORM,
|
||||
#db_url="sqlite://database/db.sqlite3",
|
||||
modules={"models": ["database.models"]},
|
||||
generate_schemas=True,
|
||||
add_exception_handlers=True,
|
||||
)
|
||||
yield _app
|
||||
|
||||
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client(
|
||||
app: FastAPI
|
||||
) -> Generator[TestClient, Any, None]:
|
||||
with TestClient(app) as client:
|
||||
yield client
|
@ -1,105 +0,0 @@
|
||||
|
||||
from tortoise.contrib.fastapi import register_tortoise
|
||||
import datetime
|
||||
from fastapi.testclient import TestClient
|
||||
import sys, os
|
||||
|
||||
import requests
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
from faker import Faker
|
||||
from main import app
|
||||
|
||||
fake = Faker()
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
fake_exercices = []
|
||||
|
||||
token = ''
|
||||
|
||||
def test_register():
|
||||
global token
|
||||
fail_response = requests.post('http://localhost:8001/register', data={
|
||||
'username': "", 'password': 't', "password_confirm": "t"})
|
||||
|
||||
assert fail_response.status_code == 422
|
||||
assert fail_response.json()['detail'][0]['msg'] == 'field required'
|
||||
|
||||
fail_response = requests.post('http://localhost:8001/register', data={
|
||||
'username': "test", 'password': 'tt', "password_confirm": "t"})
|
||||
assert fail_response.status_code == 400
|
||||
assert fail_response.json()['detail'] == 'Les mots de passe ne correspondent pas !'
|
||||
|
||||
fail_response = requests.post('http://localhost:8001/register', data={
|
||||
'username': "test", 'password': 'tt', "password_confirm": "tt"})
|
||||
assert fail_response.status_code == 400
|
||||
assert fail_response.json(
|
||||
)['detail'] == 'Invalid password : Password too short'
|
||||
|
||||
fail_response = requests.post('http://localhost:8001/register', data={
|
||||
'username': "test", 'password': 'testtest', "password_confirm": "testtest"})
|
||||
assert fail_response.status_code == 400
|
||||
assert fail_response.json(
|
||||
)['detail'] == 'Invalid password : Password must have a figure'
|
||||
fail_response = requests.post('http://localhost:8001/register', data={
|
||||
'username': "test", 'password': 'testtest1', "password_confirm": "testtest1"})
|
||||
assert fail_response.status_code == 400
|
||||
assert fail_response.json(
|
||||
)['detail'] == 'Invalid password : Password must have capital letter'
|
||||
|
||||
success_response = requests.post('http://localhost:8001/register', data={'username': "test", 'password': 'Testtest1', "password_confirm": "Testtest1"})
|
||||
print(success_response.json())
|
||||
assert success_response.status_code == 200
|
||||
assert 'access_token' in success_response.json()
|
||||
assert success_response.json()['token_type'] == 'bearer'
|
||||
|
||||
fail_response = requests.post('http://localhost:8001/register', data={
|
||||
'username': "test", 'password': 'Testtest1', "password_confirm": "Testtest1"})
|
||||
assert fail_response.status_code == 422
|
||||
assert 'UNIQUE constraint failed' in fail_response.json()['detail'][0]['msg']
|
||||
token = success_response.json()['access_token']
|
||||
|
||||
def test_login():
|
||||
global token
|
||||
r = requests.post('http://localhost:8001/login', data = {"username": "teste", 'password': 'Testtest1'})
|
||||
assert r.status_code == 401
|
||||
assert r.json()['detail'] == 'Incorrect username or password'
|
||||
r = requests.post('http://localhost:8001/login', data = {"username": "test", 'password': 'Testtest'})
|
||||
assert r.status_code == 401
|
||||
assert r.json()['detail'] == 'Incorrect username or password'
|
||||
|
||||
r = requests.post('http://localhost:8001/login', data = {"username": "test", 'password': 'Testtest1'})
|
||||
assert r.status_code == 200
|
||||
assert 'access_token' in r.json()
|
||||
assert r.json()['token_type'] == 'bearer'
|
||||
token = r.json()['access_token']
|
||||
|
||||
def test_delete_user():
|
||||
r = requests.delete('http://localhost:8001/user', headers={'Authorization': f"Bearer {token}"})
|
||||
print(r.json())
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_create_exo():
|
||||
print('TOKEN', token)
|
||||
headers = {'Authorization': f'Bearer ${token}'}
|
||||
response = requests.post('http://localhost:8001/exercices/', params= {"name": "test", "consigne": "test", 'private': False}, files = {'file': ('1test_model.py', open("/home/lilian/1test_model.py", 'rb'), "text/x-python")}, headers=headers)
|
||||
data = response.json()
|
||||
fake_exercices.append(data)
|
||||
assert 'id_code' in data
|
||||
assert 'updated_at' in data
|
||||
id_code = data.pop('id_code')
|
||||
data.pop('updated_at')
|
||||
assert response.status_code == 200
|
||||
assert data == {'name': 'test', 'consigne': 'test', 'private': False,
|
||||
'tags': [], 'origin': None, 'isOriginal': True, 'pdfSupport': True, 'csvSupport': False, 'webSupport': True}
|
||||
|
||||
def delete_exo():
|
||||
response = requests.delete('http://localhost:8001/exercices/DATEMY')
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_ws():
|
||||
pass
|
||||
|
||||
|
||||
|
@ -1,10 +0,0 @@
|
||||
import random
|
||||
|
||||
"""
|
||||
Fonction main() qui doit renvoyer un objet avec:
|
||||
calcul: le calcul a afficher
|
||||
result: la correction du calcul (pas de correction -> mettre None)
|
||||
"""
|
||||
|
||||
def main():
|
||||
return {"csv": "None", 'pdf': "", "calcul": "1+1=2"}
|
@ -1,10 +0,0 @@
|
||||
import random
|
||||
|
||||
"""
|
||||
Fonction main() qui doit renvoyer un objet avec:
|
||||
calcul: le calcul a afficher
|
||||
result: la correction du calcul (pas de correction -> mettre None)
|
||||
"""
|
||||
|
||||
def main():
|
||||
return {"csv": "1+1", 'pdf': "", "calcul": "1+1=2"}
|
@ -1,10 +0,0 @@
|
||||
import random
|
||||
|
||||
"""
|
||||
Fonction main() qui doit renvoyer un objet avec:
|
||||
calcul: le calcul a afficher
|
||||
result: la correction du calcul (pas de correction -> mettre None)
|
||||
"""
|
||||
|
||||
def main():
|
||||
return {"csv": None, 'pdf': "", "calcul": "1+1=2"}
|
@ -1,10 +0,0 @@
|
||||
import random
|
||||
|
||||
"""
|
||||
Fonction main() qui doit renvoyer un objet avec:
|
||||
calcul: le calcul a afficher
|
||||
result: la correction du calcul (pas de correction -> mettre None)
|
||||
"""
|
||||
|
||||
def main():
|
||||
return {"csv": "None", 'pdf': "", "calcul": "1+1=2"}
|
@ -1,10 +0,0 @@
|
||||
import random
|
||||
|
||||
"""
|
||||
Fonction main() qui doit renvoyer un objet avec:
|
||||
calcul: le calcul a afficher
|
||||
result: la correction du calcul (pas de correction -> mettre None)
|
||||
"""
|
||||
|
||||
def main():
|
||||
return {"csv": "None", 'pdf': "", "calcul": "1+1=2"}
|
@ -1,10 +0,0 @@
|
||||
import random
|
||||
|
||||
"""
|
||||
Fonction main() qui doit renvoyer un objet avec:
|
||||
calcul: le calcul a afficher
|
||||
result: la correction du calcul (pas de correction -> mettre None)
|
||||
"""
|
||||
|
||||
def main():
|
||||
return {"csv": None, 'web': None, "calcul": "1+1=2"}
|
2
frontend/.gitignore
vendored
Normal file
2
frontend/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
dist
|
@ -5,6 +5,7 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<link rel="shortcut icon" type="image/ico" href="/src/assets/favicon.ico" />
|
||||
<title>Solid App</title>
|
||||
</head>
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "vite-template-solid",
|
||||
"name": "generateur",
|
||||
"version": "0.0.0",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
@ -10,24 +10,14 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"sass": "^1.54.3",
|
||||
"typescript": "^4.7.4",
|
||||
"vite": "^3.0.0",
|
||||
"autoprefixer": "^10.4.12",
|
||||
"postcss": "^8.4.18",
|
||||
"tailwindcss": "^3.1.8",
|
||||
"typescript": "^4.8.2",
|
||||
"vite": "^3.0.9",
|
||||
"vite-plugin-solid": "^2.3.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@solidjs/meta": "^0.28.0",
|
||||
"@solidjs/router": "^0.4.2",
|
||||
"axios": "^0.27.2",
|
||||
"chroma-js": "^2.4.2",
|
||||
"emotion-solid": "^1.1.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"solid-forms": "^0.4.5",
|
||||
"solid-icons": "^1.0.1",
|
||||
"solid-js": "^1.4.7",
|
||||
"solid-styled-components": "^0.28.4",
|
||||
"solid-styled-jsx": "^0.27.1",
|
||||
"solid-toast": "^0.3.4",
|
||||
"styled-jsx": "^3.4.4"
|
||||
"solid-js": "^1.5.1"
|
||||
}
|
||||
}
|
||||
|
827
frontend/pnpm-lock.yaml
generated
827
frontend/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
6
frontend/postcss.config.js
Normal file
6
frontend/postcss.config.js
Normal file
@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
@ -1,65 +1,27 @@
|
||||
import { Component, createEffect, createSignal } from "solid-js";
|
||||
import type { Component } from 'solid-js';
|
||||
|
||||
import logo from './logo.svg';
|
||||
import styles from './App.module.css';
|
||||
|
||||
import logo from "./logo.svg";
|
||||
import styles from "./App.module.css";
|
||||
import { MetaProvider } from "@solidjs/meta";
|
||||
import Layout from "./components/Layout";
|
||||
import { Route, Routes } from "@solidjs/router";
|
||||
import Test from "./components/test";
|
||||
import Home from "./components/Home";
|
||||
import Routing from "./components/Routing";
|
||||
import { AuthProvider } from "./context/auth.context.jsx";
|
||||
import { LoginPopUpProvider } from "./context/loginPopUp.context.jsx";
|
||||
import LoginPopup from "./components/LoginPopup";
|
||||
import { Toaster } from "solid-toast";
|
||||
import { NotificationProvider } from "./context/notification.context.jsx";
|
||||
import { NavigateProvider } from "./context/navigate.context.jsx";
|
||||
const App: Component = () => {
|
||||
const [count, setCount] = createSignal(0);
|
||||
createEffect(() => {
|
||||
setInterval(() => setCount((c) => c + 1), 1000);
|
||||
});
|
||||
const [popup, setPopup] = createSignal({ active: false, next: () => {} });
|
||||
return (
|
||||
<MetaProvider>
|
||||
<NotificationProvider>
|
||||
<NavigateProvider>
|
||||
<LoginPopUpProvider
|
||||
popup={(next: () => void) => {
|
||||
setPopup({ active: true, next: next });
|
||||
}}
|
||||
active={popup().active}
|
||||
next={() => {
|
||||
popup().next();
|
||||
setPopup({ active: false, next: () => {} });
|
||||
}}
|
||||
>
|
||||
<AuthProvider>
|
||||
<Routing />
|
||||
|
||||
<LoginPopup
|
||||
active={popup().active}
|
||||
close={() => {
|
||||
setPopup({ active: false, next: () => {} });
|
||||
}}
|
||||
/>
|
||||
|
||||
<Toaster />
|
||||
</AuthProvider>
|
||||
</LoginPopUpProvider>{" "}
|
||||
</NavigateProvider>
|
||||
</NotificationProvider>
|
||||
</MetaProvider>
|
||||
<div class={styles.App}>
|
||||
<header class={styles.header}>
|
||||
<img src={logo} class={styles.logo} alt="logo" />
|
||||
<p>
|
||||
Edit <code>src/App.tsx</code> and save to reload.
|
||||
</p>
|
||||
<a
|
||||
class={styles.link}
|
||||
href="https://github.com/solidjs/solid"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Learn Solid
|
||||
</a>
|
||||
</header>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
type countModel = {
|
||||
count: number;
|
||||
};
|
||||
|
||||
const Counter: Component<countModel> = (props: countModel) => {
|
||||
var c = props.count;
|
||||
return <p>{props.count}</p>;
|
||||
};
|
||||
|
||||
export default App;
|
||||
|
@ -1,72 +1,7 @@
|
||||
/* @refresh reload */
|
||||
import { render } from "solid-js/web";
|
||||
import { render } from 'solid-js/web';
|
||||
|
||||
import "./index.css";
|
||||
import "./styles/index.scss";
|
||||
import App from "./App";
|
||||
import { Router } from "@solidjs/router";
|
||||
import { exoInstance } from "./apis/exoInstance.instance.js";
|
||||
import { refresh_request } from "./requests/auth.requests.js";
|
||||
import jwtDecode from "jwt-decode";
|
||||
const jwt_expire_check = (token) => {
|
||||
var { exp } = jwtDecode<any>(token);
|
||||
return Date.now() >= exp * 1000;
|
||||
}; /*
|
||||
exoInstance.interceptors.request.use(
|
||||
(config) => {
|
||||
import './index.css';
|
||||
import App from './App';
|
||||
|
||||
if ("Authorization" in config.headers) {
|
||||
var token = localStorage.getItem("token");
|
||||
var refresh = localStorage.getItem("refresh_token");
|
||||
var originalRequest = config;
|
||||
if (token != null && refresh != null) {
|
||||
if (jwt_expire_check(token)) {
|
||||
refresh_request(refresh).then((r) => {
|
||||
localStorage.setItem("token", r.access_token);
|
||||
|
||||
originalRequest.headers.Authorization = "Bearer " + r.access_token;
|
||||
return Promise.resolve(originalRequest);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return config;
|
||||
},
|
||||
(err) => {
|
||||
return Promise.reject(err);
|
||||
}
|
||||
); */
|
||||
exoInstance.interceptors.response.use(
|
||||
(response) => response,
|
||||
(error) => {
|
||||
console.log(error, "errrrrrrrrrrrrrrr")
|
||||
const status = error.response ? error.response.status : null;
|
||||
console.log(status)
|
||||
if (error.response.data.detail === "Signature has expired") {
|
||||
var token = localStorage.getItem("token");
|
||||
var refresh = localStorage.getItem("refresh_token");
|
||||
console.log("testtetetet", token, refresh );
|
||||
if (token != null && refresh != null) {
|
||||
console.log('tets')
|
||||
refresh_request(refresh).then((r) => {
|
||||
error.config.headers["Authorization"] = "Bearer " + r.access_token;
|
||||
localStorage.setItem('token', r.access_token)
|
||||
error.config.baseURL = undefined;
|
||||
|
||||
return exoInstance.request(error.config);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.reject(error);
|
||||
}
|
||||
);
|
||||
|
||||
render(
|
||||
() => (
|
||||
<Router>
|
||||
<App />
|
||||
</Router>
|
||||
),
|
||||
document.getElementById("root") as HTMLElement
|
||||
);
|
||||
render(() => <App />, document.getElementById('root') as HTMLElement);
|
||||
|
8
frontend/tailwind.config.js
Normal file
8
frontend/tailwind.config.js
Normal file
@ -0,0 +1,8 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: [],
|
||||
theme: {
|
||||
extend: {},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
@ -10,8 +10,6 @@
|
||||
"jsxImportSource": "solid-js",
|
||||
"types": ["vite/client"],
|
||||
"noEmit": true,
|
||||
"isolatedModules": true,
|
||||
"plugins": [{ "name": "typescript-plugin-css-modules" }],
|
||||
"noImplicitAny": false
|
||||
"isolatedModules": true
|
||||
}
|
||||
}
|
||||
|
34
frontend_old/README.md
Normal file
34
frontend_old/README.md
Normal file
@ -0,0 +1,34 @@
|
||||
## Usage
|
||||
|
||||
Those templates dependencies are maintained via [pnpm](https://pnpm.io) via `pnpm up -Lri`.
|
||||
|
||||
This is the reason you see a `pnpm-lock.yaml`. That being said, any package manager will work. This file can be safely be removed once you clone a template.
|
||||
|
||||
```bash
|
||||
$ npm install # or pnpm install or yarn install
|
||||
```
|
||||
|
||||
### Learn more on the [Solid Website](https://solidjs.com) and come chat with us on our [Discord](https://discord.com/invite/solidjs)
|
||||
|
||||
## Available Scripts
|
||||
|
||||
In the project directory, you can run:
|
||||
|
||||
### `npm dev` or `npm start`
|
||||
|
||||
Runs the app in the development mode.<br>
|
||||
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
|
||||
|
||||
The page will reload if you make edits.<br>
|
||||
|
||||
### `npm run build`
|
||||
|
||||
Builds the app for production to the `dist` folder.<br>
|
||||
It correctly bundles Solid in production mode and optimizes the build for the best performance.
|
||||
|
||||
The build is minified and the filenames include the hashes.<br>
|
||||
Your app is ready to be deployed!
|
||||
|
||||
## Deployment
|
||||
|
||||
You can deploy the `dist` folder to any static host provider (netlify, surge, now, etc.)
|
15
frontend_old/index.html
Normal file
15
frontend_old/index.html
Normal file
@ -0,0 +1,15 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<link rel="shortcut icon" type="image/ico" href="/src/assets/favicon.ico" />
|
||||
</head>
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
<div id="root"></div>
|
||||
|
||||
<script src="/src/index.tsx" type="module"></script>
|
||||
</body>
|
||||
</html>
|
17
frontend_old/node_modules/.bin/sass
generated
vendored
Executable file
17
frontend_old/node_modules/.bin/sass
generated
vendored
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -z "$NODE_PATH" ]; then
|
||||
export NODE_PATH="/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
else
|
||||
export NODE_PATH="$NODE_PATH:/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
fi
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../sass/sass.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../sass/sass.js" "$@"
|
||||
fi
|
17
frontend_old/node_modules/.bin/tsc
generated
vendored
Executable file
17
frontend_old/node_modules/.bin/tsc
generated
vendored
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -z "$NODE_PATH" ]; then
|
||||
export NODE_PATH="/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
else
|
||||
export NODE_PATH="$NODE_PATH:/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
fi
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../typescript/bin/tsc" "$@"
|
||||
else
|
||||
exec node "$basedir/../typescript/bin/tsc" "$@"
|
||||
fi
|
17
frontend_old/node_modules/.bin/tsserver
generated
vendored
Executable file
17
frontend_old/node_modules/.bin/tsserver
generated
vendored
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -z "$NODE_PATH" ]; then
|
||||
export NODE_PATH="/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
else
|
||||
export NODE_PATH="$NODE_PATH:/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
fi
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../typescript/bin/tsserver" "$@"
|
||||
else
|
||||
exec node "$basedir/../typescript/bin/tsserver" "$@"
|
||||
fi
|
17
frontend_old/node_modules/.bin/vite
generated
vendored
Executable file
17
frontend_old/node_modules/.bin/vite
generated
vendored
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -z "$NODE_PATH" ]; then
|
||||
export NODE_PATH="/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
else
|
||||
export NODE_PATH="$NODE_PATH:/home/lilian/Dev/project/fastapi_gen/frontend/node_modules/.pnpm/node_modules"
|
||||
fi
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../vite/bin/vite.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../vite/bin/vite.js" "$@"
|
||||
fi
|
333
frontend_old/node_modules/.modules.yaml
generated
vendored
Normal file
333
frontend_old/node_modules/.modules.yaml
generated
vendored
Normal file
@ -0,0 +1,333 @@
|
||||
hoistPattern:
|
||||
- '*'
|
||||
hoistedDependencies:
|
||||
/@ampproject/remapping/2.2.0:
|
||||
'@ampproject/remapping': private
|
||||
/@babel/code-frame/7.18.6:
|
||||
'@babel/code-frame': private
|
||||
/@babel/compat-data/7.18.8:
|
||||
'@babel/compat-data': private
|
||||
/@babel/core/7.18.6:
|
||||
'@babel/core': private
|
||||
/@babel/generator/7.18.7:
|
||||
'@babel/generator': private
|
||||
/@babel/helper-annotate-as-pure/7.18.6:
|
||||
'@babel/helper-annotate-as-pure': private
|
||||
/@babel/helper-compilation-targets/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/helper-compilation-targets': private
|
||||
/@babel/helper-create-class-features-plugin/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/helper-create-class-features-plugin': private
|
||||
/@babel/helper-environment-visitor/7.18.6:
|
||||
'@babel/helper-environment-visitor': private
|
||||
/@babel/helper-function-name/7.18.6:
|
||||
'@babel/helper-function-name': private
|
||||
/@babel/helper-hoist-variables/7.18.6:
|
||||
'@babel/helper-hoist-variables': private
|
||||
/@babel/helper-member-expression-to-functions/7.18.6:
|
||||
'@babel/helper-member-expression-to-functions': private
|
||||
/@babel/helper-module-imports/7.18.6:
|
||||
'@babel/helper-module-imports': private
|
||||
/@babel/helper-module-transforms/7.18.8:
|
||||
'@babel/helper-module-transforms': private
|
||||
/@babel/helper-optimise-call-expression/7.18.6:
|
||||
'@babel/helper-optimise-call-expression': private
|
||||
/@babel/helper-plugin-utils/7.18.6:
|
||||
'@babel/helper-plugin-utils': private
|
||||
/@babel/helper-replace-supers/7.18.6:
|
||||
'@babel/helper-replace-supers': private
|
||||
/@babel/helper-simple-access/7.18.6:
|
||||
'@babel/helper-simple-access': private
|
||||
/@babel/helper-split-export-declaration/7.18.6:
|
||||
'@babel/helper-split-export-declaration': private
|
||||
/@babel/helper-validator-identifier/7.18.6:
|
||||
'@babel/helper-validator-identifier': private
|
||||
/@babel/helper-validator-option/7.18.6:
|
||||
'@babel/helper-validator-option': private
|
||||
/@babel/helpers/7.18.6:
|
||||
'@babel/helpers': private
|
||||
/@babel/highlight/7.18.6:
|
||||
'@babel/highlight': private
|
||||
/@babel/parser/7.18.8:
|
||||
'@babel/parser': private
|
||||
/@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/plugin-syntax-jsx': private
|
||||
/@babel/plugin-syntax-typescript/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/plugin-syntax-typescript': private
|
||||
/@babel/plugin-transform-typescript/7.18.8_@babel+core@7.18.6:
|
||||
'@babel/plugin-transform-typescript': private
|
||||
/@babel/preset-typescript/7.18.6_@babel+core@7.18.6:
|
||||
'@babel/preset-typescript': private
|
||||
/@babel/template/7.18.6:
|
||||
'@babel/template': private
|
||||
/@babel/traverse/7.18.8:
|
||||
'@babel/traverse': private
|
||||
/@babel/types/7.8.3:
|
||||
'@babel/types': private
|
||||
/@emotion/cache/11.10.3:
|
||||
'@emotion/cache': private
|
||||
/@emotion/hash/0.9.0:
|
||||
'@emotion/hash': private
|
||||
/@emotion/is-prop-valid/1.2.0:
|
||||
'@emotion/is-prop-valid': private
|
||||
/@emotion/memoize/0.8.0:
|
||||
'@emotion/memoize': private
|
||||
/@emotion/serialize/1.1.0:
|
||||
'@emotion/serialize': private
|
||||
/@emotion/sheet/1.2.0:
|
||||
'@emotion/sheet': private
|
||||
/@emotion/unitless/0.8.0:
|
||||
'@emotion/unitless': private
|
||||
/@emotion/utils/1.2.0:
|
||||
'@emotion/utils': private
|
||||
/@emotion/weak-memoize/0.3.0:
|
||||
'@emotion/weak-memoize': private
|
||||
/@jridgewell/gen-mapping/0.1.1:
|
||||
'@jridgewell/gen-mapping': private
|
||||
/@jridgewell/resolve-uri/3.1.0:
|
||||
'@jridgewell/resolve-uri': private
|
||||
/@jridgewell/set-array/1.1.2:
|
||||
'@jridgewell/set-array': private
|
||||
/@jridgewell/sourcemap-codec/1.4.14:
|
||||
'@jridgewell/sourcemap-codec': private
|
||||
/@jridgewell/trace-mapping/0.3.14:
|
||||
'@jridgewell/trace-mapping': private
|
||||
/ansi-styles/3.2.1:
|
||||
ansi-styles: private
|
||||
/anymatch/3.1.2:
|
||||
anymatch: private
|
||||
/asynckit/0.4.0:
|
||||
asynckit: private
|
||||
/babel-plugin-jsx-dom-expressions/0.33.12_@babel+core@7.18.6:
|
||||
babel-plugin-jsx-dom-expressions: private
|
||||
/babel-plugin-syntax-jsx/6.18.0:
|
||||
babel-plugin-syntax-jsx: private
|
||||
/babel-plugin-transform-rename-import/2.3.0:
|
||||
babel-plugin-transform-rename-import: private
|
||||
/babel-preset-solid/1.4.6_@babel+core@7.18.6:
|
||||
babel-preset-solid: private
|
||||
/big.js/5.2.2:
|
||||
big.js: private
|
||||
/binary-extensions/2.2.0:
|
||||
binary-extensions: private
|
||||
/braces/3.0.2:
|
||||
braces: private
|
||||
/browserslist/4.21.2:
|
||||
browserslist: private
|
||||
/caniuse-lite/1.0.30001366:
|
||||
caniuse-lite: private
|
||||
/chalk/2.4.2:
|
||||
chalk: private
|
||||
/chokidar/3.5.3:
|
||||
chokidar: private
|
||||
/color-convert/1.9.3:
|
||||
color-convert: private
|
||||
/color-name/1.1.3:
|
||||
color-name: private
|
||||
/combined-stream/1.0.8:
|
||||
combined-stream: private
|
||||
/convert-source-map/1.7.0:
|
||||
convert-source-map: private
|
||||
/csstype/3.1.0:
|
||||
csstype: private
|
||||
/debug/4.3.4:
|
||||
debug: private
|
||||
/delayed-stream/1.0.0:
|
||||
delayed-stream: private
|
||||
/electron-to-chromium/1.4.189:
|
||||
electron-to-chromium: private
|
||||
/emojis-list/2.1.0:
|
||||
emojis-list: private
|
||||
/esbuild-android-64/0.14.49:
|
||||
esbuild-android-64: private
|
||||
/esbuild-android-arm64/0.14.49:
|
||||
esbuild-android-arm64: private
|
||||
/esbuild-darwin-64/0.14.49:
|
||||
esbuild-darwin-64: private
|
||||
/esbuild-darwin-arm64/0.14.49:
|
||||
esbuild-darwin-arm64: private
|
||||
/esbuild-freebsd-64/0.14.49:
|
||||
esbuild-freebsd-64: private
|
||||
/esbuild-freebsd-arm64/0.14.49:
|
||||
esbuild-freebsd-arm64: private
|
||||
/esbuild-linux-32/0.14.49:
|
||||
esbuild-linux-32: private
|
||||
/esbuild-linux-64/0.14.49:
|
||||
esbuild-linux-64: private
|
||||
/esbuild-linux-arm/0.14.49:
|
||||
esbuild-linux-arm: private
|
||||
/esbuild-linux-arm64/0.14.49:
|
||||
esbuild-linux-arm64: private
|
||||
/esbuild-linux-mips64le/0.14.49:
|
||||
esbuild-linux-mips64le: private
|
||||
/esbuild-linux-ppc64le/0.14.49:
|
||||
esbuild-linux-ppc64le: private
|
||||
/esbuild-linux-riscv64/0.14.49:
|
||||
esbuild-linux-riscv64: private
|
||||
/esbuild-linux-s390x/0.14.49:
|
||||
esbuild-linux-s390x: private
|
||||
/esbuild-netbsd-64/0.14.49:
|
||||
esbuild-netbsd-64: private
|
||||
/esbuild-openbsd-64/0.14.49:
|
||||
esbuild-openbsd-64: private
|
||||
/esbuild-sunos-64/0.14.49:
|
||||
esbuild-sunos-64: private
|
||||
/esbuild-windows-32/0.14.49:
|
||||
esbuild-windows-32: private
|
||||
/esbuild-windows-64/0.14.49:
|
||||
esbuild-windows-64: private
|
||||
/esbuild-windows-arm64/0.14.49:
|
||||
esbuild-windows-arm64: private
|
||||
/esbuild/0.14.49:
|
||||
esbuild: private
|
||||
/escalade/3.1.1:
|
||||
escalade: private
|
||||
/escape-string-regexp/1.0.5:
|
||||
escape-string-regexp: private
|
||||
/esutils/2.0.3:
|
||||
esutils: private
|
||||
/fast-deep-equal/3.1.3:
|
||||
fast-deep-equal: private
|
||||
/fill-range/7.0.1:
|
||||
fill-range: private
|
||||
/follow-redirects/1.15.1:
|
||||
follow-redirects: private
|
||||
/form-data/4.0.0:
|
||||
form-data: private
|
||||
/fsevents/2.3.2:
|
||||
fsevents: private
|
||||
/function-bind/1.1.1:
|
||||
function-bind: private
|
||||
/gensync/1.0.0-beta.2:
|
||||
gensync: private
|
||||
/glob-parent/5.1.2:
|
||||
glob-parent: private
|
||||
/globals/11.12.0:
|
||||
globals: private
|
||||
/goober/2.1.11_csstype@3.1.0:
|
||||
goober: private
|
||||
/has-flag/3.0.0:
|
||||
has-flag: private
|
||||
/has/1.0.3:
|
||||
has: private
|
||||
/html-entities/2.3.2:
|
||||
html-entities: private
|
||||
/immutable/4.1.0:
|
||||
immutable: private
|
||||
/is-binary-path/2.1.0:
|
||||
is-binary-path: private
|
||||
/is-core-module/2.9.0:
|
||||
is-core-module: private
|
||||
/is-extglob/2.1.1:
|
||||
is-extglob: private
|
||||
/is-glob/4.0.3:
|
||||
is-glob: private
|
||||
/is-number/7.0.0:
|
||||
is-number: private
|
||||
/is-what/4.1.7:
|
||||
is-what: private
|
||||
/js-tokens/4.0.0:
|
||||
js-tokens: private
|
||||
/jsesc/2.5.2:
|
||||
jsesc: private
|
||||
/json5/2.2.1:
|
||||
json5: private
|
||||
/loader-utils/1.2.3:
|
||||
loader-utils: private
|
||||
/lodash/4.17.21:
|
||||
lodash: private
|
||||
/merge-anything/5.0.2:
|
||||
merge-anything: private
|
||||
/mime-db/1.52.0:
|
||||
mime-db: private
|
||||
/mime-types/2.1.35:
|
||||
mime-types: private
|
||||
/minimist/1.2.6:
|
||||
minimist: private
|
||||
/ms/2.1.2:
|
||||
ms: private
|
||||
/nanoid/3.3.4:
|
||||
nanoid: private
|
||||
/node-releases/2.0.6:
|
||||
node-releases: private
|
||||
/normalize-path/3.0.0:
|
||||
normalize-path: private
|
||||
/path-parse/1.0.7:
|
||||
path-parse: private
|
||||
/picocolors/1.0.0:
|
||||
picocolors: private
|
||||
/picomatch/2.3.1:
|
||||
picomatch: private
|
||||
/postcss/8.4.14:
|
||||
postcss: private
|
||||
/readdirp/3.6.0:
|
||||
readdirp: private
|
||||
/resolve/1.22.1:
|
||||
resolve: private
|
||||
/rollup/2.76.0:
|
||||
rollup: private
|
||||
/safe-buffer/5.1.2:
|
||||
safe-buffer: private
|
||||
/semver/6.3.0:
|
||||
semver: private
|
||||
/solid-refresh/0.4.1_solid-js@1.4.7:
|
||||
solid-refresh: private
|
||||
/source-map-js/1.0.2:
|
||||
source-map-js: private
|
||||
/source-map/0.7.3:
|
||||
source-map: private
|
||||
/string-hash/1.1.3:
|
||||
string-hash: private
|
||||
/stylis-rule-sheet/0.0.10_stylis@3.5.4:
|
||||
stylis-rule-sheet: private
|
||||
/stylis/3.5.4:
|
||||
stylis: private
|
||||
/supports-color/5.5.0:
|
||||
supports-color: private
|
||||
/supports-preserve-symlinks-flag/1.0.0:
|
||||
supports-preserve-symlinks-flag: private
|
||||
/to-fast-properties/2.0.0:
|
||||
to-fast-properties: private
|
||||
/to-regex-range/5.0.1:
|
||||
to-regex-range: private
|
||||
/ts-toolbelt/9.6.0:
|
||||
ts-toolbelt: private
|
||||
/update-browserslist-db/1.0.4_browserslist@4.21.2:
|
||||
update-browserslist-db: private
|
||||
included:
|
||||
dependencies: true
|
||||
devDependencies: true
|
||||
optionalDependencies: true
|
||||
injectedDeps: {}
|
||||
layoutVersion: 5
|
||||
nodeLinker: isolated
|
||||
packageManager: pnpm@7.9.5
|
||||
pendingBuilds: []
|
||||
prunedAt: Wed, 31 Aug 2022 09:00:44 GMT
|
||||
publicHoistPattern:
|
||||
- '*eslint*'
|
||||
- '*prettier*'
|
||||
registries:
|
||||
default: https://registry.npmjs.org/
|
||||
skipped:
|
||||
- /esbuild-android-64/0.14.49
|
||||
- /esbuild-android-arm64/0.14.49
|
||||
- /esbuild-darwin-64/0.14.49
|
||||
- /esbuild-darwin-arm64/0.14.49
|
||||
- /esbuild-freebsd-64/0.14.49
|
||||
- /esbuild-freebsd-arm64/0.14.49
|
||||
- /esbuild-linux-32/0.14.49
|
||||
- /esbuild-linux-arm/0.14.49
|
||||
- /esbuild-linux-arm64/0.14.49
|
||||
- /esbuild-linux-mips64le/0.14.49
|
||||
- /esbuild-linux-ppc64le/0.14.49
|
||||
- /esbuild-linux-riscv64/0.14.49
|
||||
- /esbuild-linux-s390x/0.14.49
|
||||
- /esbuild-netbsd-64/0.14.49
|
||||
- /esbuild-openbsd-64/0.14.49
|
||||
- /esbuild-sunos-64/0.14.49
|
||||
- /esbuild-windows-32/0.14.49
|
||||
- /esbuild-windows-64/0.14.49
|
||||
- /esbuild-windows-arm64/0.14.49
|
||||
- /fsevents/2.3.2
|
||||
storeDir: /home/lilian/.local/share/pnpm/store/v3
|
||||
virtualStoreDir: .pnpm
|
202
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
202
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2019 Google LLC
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
218
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
218
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
@ -0,0 +1,218 @@
|
||||
# @ampproject/remapping
|
||||
|
||||
> Remap sequential sourcemaps through transformations to point at the original source code
|
||||
|
||||
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
||||
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
||||
with webpack", all pointing to the correct location in your original source code.
|
||||
|
||||
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
||||
they only need to generate an output sourcemap. This greatly simplifies building custom
|
||||
transformations (think a find-and-replace).
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @ampproject/remapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function remapping(
|
||||
map: SourceMap | SourceMap[],
|
||||
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
||||
options?: { excludeContent: boolean, decodedMappings: boolean }
|
||||
): SourceMap;
|
||||
|
||||
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
||||
// "source" location (where child sources are resolved relative to, or the location of original
|
||||
// source), and the ability to override the "content" of an original source for inclusion in the
|
||||
// output sourcemap.
|
||||
type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
}
|
||||
```
|
||||
|
||||
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
||||
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
||||
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
||||
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
||||
|
||||
```js
|
||||
// Babel transformed "helloworld.js" into "transformed.js"
|
||||
const transformedMap = JSON.stringify({
|
||||
file: 'transformed.js',
|
||||
// 1st column of 2nd line of output file translates into the 1st source
|
||||
// file, line 3, column 2
|
||||
mappings: ';CAEE',
|
||||
sources: ['helloworld.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
// Uglify minified "transformed.js" into "transformed.min.js"
|
||||
const minifiedTransformedMap = JSON.stringify({
|
||||
file: 'transformed.min.js',
|
||||
// 0th column of 1st line of output file translates into the 1st source
|
||||
// file, line 2, column 1.
|
||||
mappings: 'AACC',
|
||||
names: [],
|
||||
sources: ['transformed.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
// The "transformed.js" file is an transformed file.
|
||||
if (file === 'transformed.js') {
|
||||
// The root importer is empty.
|
||||
console.assert(ctx.importer === '');
|
||||
// The depth in the sourcemap tree we're currently loading.
|
||||
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
||||
console.assert(ctx.depth === 1);
|
||||
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
// Loader will be called to load transformedMap's source file pointers as well.
|
||||
console.assert(file === 'helloworld.js');
|
||||
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
||||
console.assert(ctx.importer === 'transformed.js');
|
||||
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
||||
console.assert(ctx.depth === 2);
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
In this example, `loader` will be called twice:
|
||||
|
||||
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
||||
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
||||
be traced through it into the source files it represents.
|
||||
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
||||
we return `null`.
|
||||
|
||||
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
||||
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
||||
column of the 2nd line of the file `helloworld.js`".
|
||||
|
||||
### Multiple transformations of a file
|
||||
|
||||
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
||||
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
||||
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
||||
written as:
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
[minifiedTransformedMap, transformedMap],
|
||||
() => null
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
### Advanced control of the loading graph
|
||||
|
||||
#### `source`
|
||||
|
||||
The `source` property can overridden to any value to change the location of the current load. Eg,
|
||||
for an original source file, it allows us to change the location to the original source regardless
|
||||
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
||||
relative resolving location for child sources of the loaded sourcemap.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
||||
// source files are loaded, they will now be relative to `src/`.
|
||||
ctx.source = 'src/transformed.js';
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'src/helloworld.js');
|
||||
// We could futher change the source of this original file, eg, to be inside a nested directory
|
||||
// itself. This will be reflected in the remapped sourcemap.
|
||||
ctx.source = 'src/nested/transformed.js';
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sources: ['src/nested/helloworld.js'],
|
||||
// };
|
||||
```
|
||||
|
||||
|
||||
#### `content`
|
||||
|
||||
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
||||
you to manually provide the source content of the original file regardless of whether the
|
||||
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
||||
the source content.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
||||
// would not include any `sourcesContent` values.
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'helloworld.js');
|
||||
// We can read the file to provide the source content.
|
||||
ctx.content = fs.readFileSync(file, 'utf8');
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sourcesContent: [
|
||||
// 'console.log("Hello world!")',
|
||||
// ],
|
||||
// };
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
#### excludeContent
|
||||
|
||||
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
||||
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
||||
the size out the sourcemap.
|
||||
|
||||
#### decodedMappings
|
||||
|
||||
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
||||
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
||||
encoding into a VLQ string.
|
204
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
204
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
@ -0,0 +1,204 @@
|
||||
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
||||
import { GenMapping, addSegment, setSourceContent, decodedMap, encodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const SOURCELESS_MAPPING = {
|
||||
source: null,
|
||||
column: null,
|
||||
line: null,
|
||||
name: null,
|
||||
content: null,
|
||||
};
|
||||
const EMPTY_SOURCES = [];
|
||||
function Source(map, sources, source, content) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content) {
|
||||
return Source(null, EMPTY_SOURCES, source, content);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
const gen = new GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
let lastSource = null;
|
||||
let lastSourceLine = null;
|
||||
let lastSourceColumn = null;
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
// So we traced a segment down into its original source file. Now push a
|
||||
// new segment pointing to this location.
|
||||
const { column, line, name, content, source } = traced;
|
||||
if (line === lastSourceLine && column === lastSourceColumn && source === lastSource) {
|
||||
continue;
|
||||
}
|
||||
lastSourceLine = line;
|
||||
lastSourceColumn = column;
|
||||
lastSource = source;
|
||||
// Sigh, TypeScript can't figure out source/line/column are either all null, or all non-null...
|
||||
addSegment(gen, i, genCol, source, line, column, name);
|
||||
if (content != null)
|
||||
setSourceContent(gen, source, content);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return { column, line, name, source: source.source, content: source.content };
|
||||
}
|
||||
const segment = traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
return OriginalSource(source, sourceContent);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? decodedMap(map) : encodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
export { remapping as default };
|
||||
//# sourceMappingURL=remapping.mjs.map
|
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
209
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
209
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,209 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
|
||||
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
|
||||
})(this, (function (traceMapping, genMapping) { 'use strict';
|
||||
|
||||
const SOURCELESS_MAPPING = {
|
||||
source: null,
|
||||
column: null,
|
||||
line: null,
|
||||
name: null,
|
||||
content: null,
|
||||
};
|
||||
const EMPTY_SOURCES = [];
|
||||
function Source(map, sources, source, content) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content) {
|
||||
return Source(null, EMPTY_SOURCES, source, content);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
const gen = new genMapping.GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = traceMapping.decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
let lastSource = null;
|
||||
let lastSourceLine = null;
|
||||
let lastSourceColumn = null;
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
// So we traced a segment down into its original source file. Now push a
|
||||
// new segment pointing to this location.
|
||||
const { column, line, name, content, source } = traced;
|
||||
if (line === lastSourceLine && column === lastSourceColumn && source === lastSource) {
|
||||
continue;
|
||||
}
|
||||
lastSourceLine = line;
|
||||
lastSourceColumn = column;
|
||||
lastSource = source;
|
||||
// Sigh, TypeScript can't figure out source/line/column are either all null, or all non-null...
|
||||
genMapping.addSegment(gen, i, genCol, source, line, column, name);
|
||||
if (content != null)
|
||||
genMapping.setSourceContent(gen, source, content);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return { column, line, name, source: source.source, content: source.content };
|
||||
}
|
||||
const segment = traceMapping.traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
return OriginalSource(source, sourceContent);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? genMapping.decodedMap(map) : genMapping.encodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
return remapping;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=remapping.umd.js.map
|
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
14
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import type { MapSource as MapSourceType } from './source-map-tree';
|
||||
import type { SourceMapInput, SourceMapLoader } from './types';
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
|
19
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
19
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
import SourceMap from './source-map';
|
||||
import type { SourceMapInput, SourceMapLoader, Options } from './types';
|
||||
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
|
48
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
48
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
import { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { TraceMap } from '@jridgewell/trace-mapping';
|
||||
export declare type SourceMapSegmentObject = {
|
||||
column: number;
|
||||
line: number;
|
||||
name: string;
|
||||
source: string;
|
||||
content: string | null;
|
||||
} | {
|
||||
column: null;
|
||||
line: null;
|
||||
name: null;
|
||||
source: null;
|
||||
content: null;
|
||||
};
|
||||
export declare type OriginalSource = {
|
||||
map: TraceMap;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: string | null;
|
||||
};
|
||||
export declare type MapSource = {
|
||||
map: TraceMap;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: string | null;
|
||||
};
|
||||
export declare type Sources = OriginalSource | MapSource;
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
export declare function OriginalSource(source: string, content: string | null): OriginalSource;
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
export declare function traceMappings(tree: MapSource): GenMapping;
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
|
17
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
17
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
import type { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
export default class SourceMap {
|
||||
file?: string | null;
|
||||
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
|
||||
sourceRoot?: string;
|
||||
names: string[];
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
constructor(map: GenMapping, options: Options);
|
||||
toString(): string;
|
||||
}
|
14
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
14
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapInput };
|
||||
export declare type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
};
|
||||
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
|
||||
export declare type Options = {
|
||||
excludeContent?: boolean;
|
||||
decodedMappings?: boolean;
|
||||
};
|
63
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/package.json
generated
vendored
Normal file
63
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping/package.json
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
{
|
||||
"name": "@ampproject/remapping",
|
||||
"version": "2.2.0",
|
||||
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map",
|
||||
"remap"
|
||||
],
|
||||
"main": "dist/remapping.umd.js",
|
||||
"module": "dist/remapping.mjs",
|
||||
"typings": "dist/types/remapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"author": "Justin Ridgewell <jridgewell@google.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ampproject/remapping.git"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "jest --coverage",
|
||||
"test:watch": "jest --coverage --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.2",
|
||||
"@types/jest": "27.4.1",
|
||||
"@typescript-eslint/eslint-plugin": "5.20.0",
|
||||
"@typescript-eslint/parser": "5.20.0",
|
||||
"eslint": "8.14.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"jest": "27.5.1",
|
||||
"jest-config": "27.5.1",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.6.2",
|
||||
"rollup": "2.70.2",
|
||||
"ts-jest": "27.1.4",
|
||||
"tslib": "2.4.0",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/gen-mapping": "^0.1.0",
|
||||
"@jridgewell/trace-mapping": "^0.3.9"
|
||||
}
|
||||
}
|
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@jridgewell/gen-mapping
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@jridgewell/gen-mapping
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@jridgewell+gen-mapping@0.1.1/node_modules/@jridgewell/gen-mapping
|
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@jridgewell/trace-mapping
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@ampproject+remapping@2.2.0/node_modules/@jridgewell/trace-mapping
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@jridgewell+trace-mapping@0.3.14/node_modules/@jridgewell/trace-mapping
|
22
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/LICENSE
generated
vendored
Normal file
22
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/README.md
generated
vendored
Normal file
19
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/README.md
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# @babel/code-frame
|
||||
|
||||
> Generate errors that contain a code frame that point to source locations.
|
||||
|
||||
See our website [@babel/code-frame](https://babeljs.io/docs/en/babel-code-frame) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/code-frame
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/code-frame --dev
|
||||
```
|
163
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/lib/index.js
generated
vendored
Normal file
163
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/lib/index.js
generated
vendored
Normal file
@ -0,0 +1,163 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.codeFrameColumns = codeFrameColumns;
|
||||
exports.default = _default;
|
||||
|
||||
var _highlight = require("@babel/highlight");
|
||||
|
||||
let deprecationWarningShown = false;
|
||||
|
||||
function getDefs(chalk) {
|
||||
return {
|
||||
gutter: chalk.grey,
|
||||
marker: chalk.red.bold,
|
||||
message: chalk.red.bold
|
||||
};
|
||||
}
|
||||
|
||||
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
|
||||
|
||||
function getMarkerLines(loc, source, opts) {
|
||||
const startLoc = Object.assign({
|
||||
column: 0,
|
||||
line: -1
|
||||
}, loc.start);
|
||||
const endLoc = Object.assign({}, startLoc, loc.end);
|
||||
const {
|
||||
linesAbove = 2,
|
||||
linesBelow = 3
|
||||
} = opts || {};
|
||||
const startLine = startLoc.line;
|
||||
const startColumn = startLoc.column;
|
||||
const endLine = endLoc.line;
|
||||
const endColumn = endLoc.column;
|
||||
let start = Math.max(startLine - (linesAbove + 1), 0);
|
||||
let end = Math.min(source.length, endLine + linesBelow);
|
||||
|
||||
if (startLine === -1) {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
if (endLine === -1) {
|
||||
end = source.length;
|
||||
}
|
||||
|
||||
const lineDiff = endLine - startLine;
|
||||
const markerLines = {};
|
||||
|
||||
if (lineDiff) {
|
||||
for (let i = 0; i <= lineDiff; i++) {
|
||||
const lineNumber = i + startLine;
|
||||
|
||||
if (!startColumn) {
|
||||
markerLines[lineNumber] = true;
|
||||
} else if (i === 0) {
|
||||
const sourceLength = source[lineNumber - 1].length;
|
||||
markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];
|
||||
} else if (i === lineDiff) {
|
||||
markerLines[lineNumber] = [0, endColumn];
|
||||
} else {
|
||||
const sourceLength = source[lineNumber - i].length;
|
||||
markerLines[lineNumber] = [0, sourceLength];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (startColumn === endColumn) {
|
||||
if (startColumn) {
|
||||
markerLines[startLine] = [startColumn, 0];
|
||||
} else {
|
||||
markerLines[startLine] = true;
|
||||
}
|
||||
} else {
|
||||
markerLines[startLine] = [startColumn, endColumn - startColumn];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
};
|
||||
}
|
||||
|
||||
function codeFrameColumns(rawLines, loc, opts = {}) {
|
||||
const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight.shouldHighlight)(opts);
|
||||
const chalk = (0, _highlight.getChalk)(opts);
|
||||
const defs = getDefs(chalk);
|
||||
|
||||
const maybeHighlight = (chalkFn, string) => {
|
||||
return highlighted ? chalkFn(string) : string;
|
||||
};
|
||||
|
||||
const lines = rawLines.split(NEWLINE);
|
||||
const {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
} = getMarkerLines(loc, lines, opts);
|
||||
const hasColumns = loc.start && typeof loc.start.column === "number";
|
||||
const numberMaxWidth = String(end).length;
|
||||
const highlightedLines = highlighted ? (0, _highlight.default)(rawLines, opts) : rawLines;
|
||||
let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => {
|
||||
const number = start + 1 + index;
|
||||
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
|
||||
const gutter = ` ${paddedNumber} |`;
|
||||
const hasMarker = markerLines[number];
|
||||
const lastMarkerLine = !markerLines[number + 1];
|
||||
|
||||
if (hasMarker) {
|
||||
let markerLine = "";
|
||||
|
||||
if (Array.isArray(hasMarker)) {
|
||||
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
|
||||
const numberOfMarkers = hasMarker[1] || 1;
|
||||
markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), " ", markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join("");
|
||||
|
||||
if (lastMarkerLine && opts.message) {
|
||||
markerLine += " " + maybeHighlight(defs.message, opts.message);
|
||||
}
|
||||
}
|
||||
|
||||
return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line.length > 0 ? ` ${line}` : "", markerLine].join("");
|
||||
} else {
|
||||
return ` ${maybeHighlight(defs.gutter, gutter)}${line.length > 0 ? ` ${line}` : ""}`;
|
||||
}
|
||||
}).join("\n");
|
||||
|
||||
if (opts.message && !hasColumns) {
|
||||
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
|
||||
}
|
||||
|
||||
if (highlighted) {
|
||||
return chalk.reset(frame);
|
||||
} else {
|
||||
return frame;
|
||||
}
|
||||
}
|
||||
|
||||
function _default(rawLines, lineNumber, colNumber, opts = {}) {
|
||||
if (!deprecationWarningShown) {
|
||||
deprecationWarningShown = true;
|
||||
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
|
||||
|
||||
if (process.emitWarning) {
|
||||
process.emitWarning(message, "DeprecationWarning");
|
||||
} else {
|
||||
const deprecationError = new Error(message);
|
||||
deprecationError.name = "DeprecationWarning";
|
||||
console.warn(new Error(message));
|
||||
}
|
||||
}
|
||||
|
||||
colNumber = Math.max(colNumber, 0);
|
||||
const location = {
|
||||
start: {
|
||||
column: colNumber,
|
||||
line: lineNumber
|
||||
}
|
||||
};
|
||||
return codeFrameColumns(rawLines, location, opts);
|
||||
}
|
30
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/package.json
generated
vendored
Normal file
30
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/code-frame/package.json
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "@babel/code-frame",
|
||||
"version": "7.18.6",
|
||||
"description": "Generate errors that contain a code frame that point to source locations.",
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"homepage": "https://babel.dev/docs/en/next/babel-code-frame",
|
||||
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen",
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-code-frame"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"dependencies": {
|
||||
"@babel/highlight": "^7.18.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chalk": "^2.0.0",
|
||||
"chalk": "^2.0.0",
|
||||
"strip-ansi": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"type": "commonjs"
|
||||
}
|
1
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/highlight
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@babel+code-frame@7.18.6/node_modules/@babel/highlight
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@babel+highlight@7.18.6/node_modules/@babel/highlight
|
22
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/LICENSE
generated
vendored
Normal file
22
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/README.md
generated
vendored
Normal file
19
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/README.md
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# @babel/compat-data
|
||||
|
||||
>
|
||||
|
||||
See our website [@babel/compat-data](https://babeljs.io/docs/en/babel-compat-data) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save @babel/compat-data
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/compat-data
|
||||
```
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/corejs2-built-ins.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/corejs2-built-ins.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/corejs2-built-ins.json");
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/corejs3-shipped-proposals.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/corejs3-shipped-proposals.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/corejs3-shipped-proposals.json");
|
1789
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/corejs2-built-ins.json
generated
vendored
Normal file
1789
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/corejs2-built-ins.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/corejs3-shipped-proposals.json
generated
vendored
Normal file
5
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/corejs3-shipped-proposals.json
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
[
|
||||
"esnext.global-this",
|
||||
"esnext.promise.all-settled",
|
||||
"esnext.string.match-all"
|
||||
]
|
18
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/native-modules.json
generated
vendored
Normal file
18
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/native-modules.json
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"es6.module": {
|
||||
"chrome": "61",
|
||||
"and_chr": "61",
|
||||
"edge": "16",
|
||||
"firefox": "60",
|
||||
"and_ff": "60",
|
||||
"node": "13.2.0",
|
||||
"opera": "48",
|
||||
"op_mob": "48",
|
||||
"safari": "10.1",
|
||||
"ios": "10.3",
|
||||
"samsung": "8.2",
|
||||
"android": "61",
|
||||
"electron": "2.0",
|
||||
"ios_saf": "10.3"
|
||||
}
|
||||
}
|
22
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/overlapping-plugins.json
generated
vendored
Normal file
22
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/overlapping-plugins.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"transform-async-to-generator": [
|
||||
"bugfix/transform-async-arrows-in-class"
|
||||
],
|
||||
"transform-parameters": [
|
||||
"bugfix/transform-edge-default-parameters",
|
||||
"bugfix/transform-safari-id-destructuring-collision-in-function-expression"
|
||||
],
|
||||
"transform-function-name": [
|
||||
"bugfix/transform-edge-function-name"
|
||||
],
|
||||
"transform-block-scoping": [
|
||||
"bugfix/transform-safari-block-shadowing",
|
||||
"bugfix/transform-safari-for-shadowing"
|
||||
],
|
||||
"transform-template-literals": [
|
||||
"bugfix/transform-tagged-template-caching"
|
||||
],
|
||||
"proposal-optional-chaining": [
|
||||
"bugfix/transform-v8-spread-parameters-in-optional-chaining"
|
||||
]
|
||||
}
|
157
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/plugin-bugfixes.json
generated
vendored
Normal file
157
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/plugin-bugfixes.json
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
||||
{
|
||||
"bugfix/transform-async-arrows-in-class": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "11",
|
||||
"node": "7.6",
|
||||
"ios": "11",
|
||||
"samsung": "6",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"bugfix/transform-edge-default-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "18",
|
||||
"firefox": "52",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-edge-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "79",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"bugfix/transform-safari-block-shadowing": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "12",
|
||||
"firefox": "44",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"ie": "11",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-safari-for-shadowing": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "12",
|
||||
"firefox": "4",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"ie": "11",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-safari-id-destructuring-collision-in-function-expression": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "14",
|
||||
"firefox": "2",
|
||||
"node": "6",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-tagged-template-caching": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "13",
|
||||
"node": "4",
|
||||
"ios": "13",
|
||||
"samsung": "3.4",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"bugfix/transform-v8-spread-parameters-in-optional-chaining": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "16.9",
|
||||
"ios": "13.4",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"proposal-optional-chaining": {
|
||||
"chrome": "80",
|
||||
"opera": "67",
|
||||
"edge": "80",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "14",
|
||||
"ios": "13.4",
|
||||
"samsung": "13",
|
||||
"electron": "8.0"
|
||||
},
|
||||
"transform-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-async-to-generator": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "10.1",
|
||||
"node": "7.6",
|
||||
"ios": "10.3",
|
||||
"samsung": "6",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"transform-template-literals": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "13",
|
||||
"firefox": "34",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "3.4",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "14",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-block-scoping": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "14",
|
||||
"firefox": "51",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
}
|
||||
}
|
478
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/plugins.json
generated
vendored
Normal file
478
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/data/plugins.json
generated
vendored
Normal file
@ -0,0 +1,478 @@
|
||||
{
|
||||
"proposal-class-static-block": {
|
||||
"chrome": "94",
|
||||
"opera": "80",
|
||||
"edge": "94",
|
||||
"firefox": "93",
|
||||
"node": "16.11",
|
||||
"electron": "15.0"
|
||||
},
|
||||
"proposal-private-property-in-object": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "90",
|
||||
"safari": "15",
|
||||
"node": "16.9",
|
||||
"ios": "15",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"proposal-class-properties": {
|
||||
"chrome": "74",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "90",
|
||||
"safari": "14.1",
|
||||
"node": "12",
|
||||
"ios": "15",
|
||||
"samsung": "11",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"proposal-private-methods": {
|
||||
"chrome": "84",
|
||||
"opera": "70",
|
||||
"edge": "84",
|
||||
"firefox": "90",
|
||||
"safari": "15",
|
||||
"node": "14.6",
|
||||
"ios": "15",
|
||||
"samsung": "14",
|
||||
"electron": "10.0"
|
||||
},
|
||||
"proposal-numeric-separator": {
|
||||
"chrome": "75",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "70",
|
||||
"safari": "13",
|
||||
"node": "12.5",
|
||||
"ios": "13",
|
||||
"samsung": "11",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"proposal-logical-assignment-operators": {
|
||||
"chrome": "85",
|
||||
"opera": "71",
|
||||
"edge": "85",
|
||||
"firefox": "79",
|
||||
"safari": "14",
|
||||
"node": "15",
|
||||
"ios": "14",
|
||||
"samsung": "14",
|
||||
"electron": "10.0"
|
||||
},
|
||||
"proposal-nullish-coalescing-operator": {
|
||||
"chrome": "80",
|
||||
"opera": "67",
|
||||
"edge": "80",
|
||||
"firefox": "72",
|
||||
"safari": "13.1",
|
||||
"node": "14",
|
||||
"ios": "13.4",
|
||||
"samsung": "13",
|
||||
"electron": "8.0"
|
||||
},
|
||||
"proposal-optional-chaining": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "16.9",
|
||||
"ios": "13.4",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"proposal-json-strings": {
|
||||
"chrome": "66",
|
||||
"opera": "53",
|
||||
"edge": "79",
|
||||
"firefox": "62",
|
||||
"safari": "12",
|
||||
"node": "10",
|
||||
"ios": "12",
|
||||
"samsung": "9",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-optional-catch-binding": {
|
||||
"chrome": "66",
|
||||
"opera": "53",
|
||||
"edge": "79",
|
||||
"firefox": "58",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "18",
|
||||
"firefox": "53",
|
||||
"node": "6",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"proposal-async-generator-functions": {
|
||||
"chrome": "63",
|
||||
"opera": "50",
|
||||
"edge": "79",
|
||||
"firefox": "57",
|
||||
"safari": "12",
|
||||
"node": "10",
|
||||
"ios": "12",
|
||||
"samsung": "8",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-object-rest-spread": {
|
||||
"chrome": "60",
|
||||
"opera": "47",
|
||||
"edge": "79",
|
||||
"firefox": "55",
|
||||
"safari": "11.1",
|
||||
"node": "8.3",
|
||||
"ios": "11.3",
|
||||
"samsung": "8",
|
||||
"electron": "2.0"
|
||||
},
|
||||
"transform-dotall-regex": {
|
||||
"chrome": "62",
|
||||
"opera": "49",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "8.10",
|
||||
"ios": "11.3",
|
||||
"samsung": "8",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-unicode-property-regex": {
|
||||
"chrome": "64",
|
||||
"opera": "51",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-named-capturing-groups-regex": {
|
||||
"chrome": "64",
|
||||
"opera": "51",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-async-to-generator": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "11",
|
||||
"node": "7.6",
|
||||
"ios": "11",
|
||||
"samsung": "6",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"transform-exponentiation-operator": {
|
||||
"chrome": "52",
|
||||
"opera": "39",
|
||||
"edge": "14",
|
||||
"firefox": "52",
|
||||
"safari": "10.1",
|
||||
"node": "7",
|
||||
"ios": "10.3",
|
||||
"samsung": "6",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "1.3"
|
||||
},
|
||||
"transform-template-literals": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "13",
|
||||
"firefox": "34",
|
||||
"safari": "13",
|
||||
"node": "4",
|
||||
"ios": "13",
|
||||
"samsung": "3.4",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-literals": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "53",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "79",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-arrow-functions": {
|
||||
"chrome": "47",
|
||||
"opera": "34",
|
||||
"edge": "13",
|
||||
"firefox": "43",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-block-scoped-functions": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "12",
|
||||
"firefox": "46",
|
||||
"safari": "10",
|
||||
"node": "4",
|
||||
"ie": "11",
|
||||
"ios": "10",
|
||||
"samsung": "3.4",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-classes": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-object-super": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-shorthand-properties": {
|
||||
"chrome": "43",
|
||||
"opera": "30",
|
||||
"edge": "12",
|
||||
"firefox": "33",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"rhino": "1.7.14",
|
||||
"electron": "0.27"
|
||||
},
|
||||
"transform-duplicate-keys": {
|
||||
"chrome": "42",
|
||||
"opera": "29",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "3.4",
|
||||
"electron": "0.25"
|
||||
},
|
||||
"transform-computed-properties": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "7.1",
|
||||
"node": "4",
|
||||
"ios": "8",
|
||||
"samsung": "4",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-for-of": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-sticky-regex": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "13",
|
||||
"firefox": "3",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-unicode-escapes": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "53",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-unicode-regex": {
|
||||
"chrome": "50",
|
||||
"opera": "37",
|
||||
"edge": "13",
|
||||
"firefox": "46",
|
||||
"safari": "12",
|
||||
"node": "6",
|
||||
"ios": "12",
|
||||
"samsung": "5",
|
||||
"electron": "1.1"
|
||||
},
|
||||
"transform-spread": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-destructuring": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-block-scoping": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "14",
|
||||
"firefox": "51",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-typeof-symbol": {
|
||||
"chrome": "38",
|
||||
"opera": "25",
|
||||
"edge": "12",
|
||||
"firefox": "36",
|
||||
"safari": "9",
|
||||
"node": "0.12",
|
||||
"ios": "9",
|
||||
"samsung": "3",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-new-target": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "14",
|
||||
"firefox": "41",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-regenerator": {
|
||||
"chrome": "50",
|
||||
"opera": "37",
|
||||
"edge": "13",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"electron": "1.1"
|
||||
},
|
||||
"transform-member-expression-literals": {
|
||||
"chrome": "7",
|
||||
"opera": "12",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "5.1",
|
||||
"node": "0.4",
|
||||
"ie": "9",
|
||||
"android": "4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-property-literals": {
|
||||
"chrome": "7",
|
||||
"opera": "12",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "5.1",
|
||||
"node": "0.4",
|
||||
"ie": "9",
|
||||
"android": "4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-reserved-words": {
|
||||
"chrome": "13",
|
||||
"opera": "10.50",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "3.1",
|
||||
"node": "0.6",
|
||||
"ie": "9",
|
||||
"android": "4.4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"proposal-export-namespace-from": {
|
||||
"chrome": "72",
|
||||
"and_chr": "72",
|
||||
"edge": "79",
|
||||
"firefox": "80",
|
||||
"and_ff": "80",
|
||||
"node": "13.2",
|
||||
"opera": "60",
|
||||
"op_mob": "51",
|
||||
"samsung": "11.0",
|
||||
"android": "72",
|
||||
"electron": "5.0"
|
||||
}
|
||||
}
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/native-modules.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/native-modules.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/native-modules.json");
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/overlapping-plugins.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/overlapping-plugins.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/overlapping-plugins.json");
|
40
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/package.json
generated
vendored
Normal file
40
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/package.json
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "@babel/compat-data",
|
||||
"version": "7.18.8",
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"license": "MIT",
|
||||
"description": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-compat-data"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"exports": {
|
||||
"./plugins": "./plugins.js",
|
||||
"./native-modules": "./native-modules.js",
|
||||
"./corejs2-built-ins": "./corejs2-built-ins.js",
|
||||
"./corejs3-shipped-proposals": "./corejs3-shipped-proposals.js",
|
||||
"./overlapping-plugins": "./overlapping-plugins.js",
|
||||
"./plugin-bugfixes": "./plugin-bugfixes.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build-data": "./scripts/download-compat-table.sh && node ./scripts/build-data.js && node ./scripts/build-modules-support.js && node ./scripts/build-bugfixes-targets.js"
|
||||
},
|
||||
"keywords": [
|
||||
"babel",
|
||||
"compat-table",
|
||||
"compat-data"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@mdn/browser-compat-data": "^4.0.10",
|
||||
"core-js-compat": "^3.22.1",
|
||||
"electron-to-chromium": "^1.4.113"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"type": "commonjs"
|
||||
}
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/plugin-bugfixes.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/plugin-bugfixes.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/plugin-bugfixes.json");
|
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/plugins.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+compat-data@7.18.8/node_modules/@babel/compat-data/plugins.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require("./data/plugins.json");
|
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@ampproject/remapping
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@ampproject/remapping
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@ampproject+remapping@2.2.0/node_modules/@ampproject/remapping
|
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/code-frame
generated
vendored
Symbolic link
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/code-frame
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../../../@babel+code-frame@7.18.6/node_modules/@babel/code-frame
|
22
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/LICENSE
generated
vendored
Normal file
22
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/README.md
generated
vendored
Normal file
19
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/README.md
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# @babel/core
|
||||
|
||||
> Babel compiler core.
|
||||
|
||||
See our website [@babel/core](https://babeljs.io/docs/en/babel-core) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/core
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/core --dev
|
||||
```
|
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/cache-contexts.js
generated
vendored
Normal file
1
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/cache-contexts.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
0 && 0;
|
327
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/caching.js
generated
vendored
Normal file
327
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/caching.js
generated
vendored
Normal file
@ -0,0 +1,327 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.assertSimpleType = assertSimpleType;
|
||||
exports.makeStrongCache = makeStrongCache;
|
||||
exports.makeStrongCacheSync = makeStrongCacheSync;
|
||||
exports.makeWeakCache = makeWeakCache;
|
||||
exports.makeWeakCacheSync = makeWeakCacheSync;
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _async = require("../gensync-utils/async");
|
||||
|
||||
var _util = require("./util");
|
||||
|
||||
const synchronize = gen => {
|
||||
return _gensync()(gen).sync;
|
||||
};
|
||||
|
||||
function* genTrue() {
|
||||
return true;
|
||||
}
|
||||
|
||||
function makeWeakCache(handler) {
|
||||
return makeCachedFunction(WeakMap, handler);
|
||||
}
|
||||
|
||||
function makeWeakCacheSync(handler) {
|
||||
return synchronize(makeWeakCache(handler));
|
||||
}
|
||||
|
||||
function makeStrongCache(handler) {
|
||||
return makeCachedFunction(Map, handler);
|
||||
}
|
||||
|
||||
function makeStrongCacheSync(handler) {
|
||||
return synchronize(makeStrongCache(handler));
|
||||
}
|
||||
|
||||
function makeCachedFunction(CallCache, handler) {
|
||||
const callCacheSync = new CallCache();
|
||||
const callCacheAsync = new CallCache();
|
||||
const futureCache = new CallCache();
|
||||
return function* cachedFunction(arg, data) {
|
||||
const asyncContext = yield* (0, _async.isAsync)();
|
||||
const callCache = asyncContext ? callCacheAsync : callCacheSync;
|
||||
const cached = yield* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data);
|
||||
if (cached.valid) return cached.value;
|
||||
const cache = new CacheConfigurator(data);
|
||||
const handlerResult = handler(arg, cache);
|
||||
let finishLock;
|
||||
let value;
|
||||
|
||||
if ((0, _util.isIterableIterator)(handlerResult)) {
|
||||
const gen = handlerResult;
|
||||
value = yield* (0, _async.onFirstPause)(gen, () => {
|
||||
finishLock = setupAsyncLocks(cache, futureCache, arg);
|
||||
});
|
||||
} else {
|
||||
value = handlerResult;
|
||||
}
|
||||
|
||||
updateFunctionCache(callCache, cache, arg, value);
|
||||
|
||||
if (finishLock) {
|
||||
futureCache.delete(arg);
|
||||
finishLock.release(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
function* getCachedValue(cache, arg, data) {
|
||||
const cachedValue = cache.get(arg);
|
||||
|
||||
if (cachedValue) {
|
||||
for (const {
|
||||
value,
|
||||
valid
|
||||
} of cachedValue) {
|
||||
if (yield* valid(data)) return {
|
||||
valid: true,
|
||||
value
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
|
||||
function* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data) {
|
||||
const cached = yield* getCachedValue(callCache, arg, data);
|
||||
|
||||
if (cached.valid) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
if (asyncContext) {
|
||||
const cached = yield* getCachedValue(futureCache, arg, data);
|
||||
|
||||
if (cached.valid) {
|
||||
const value = yield* (0, _async.waitFor)(cached.value.promise);
|
||||
return {
|
||||
valid: true,
|
||||
value
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
|
||||
function setupAsyncLocks(config, futureCache, arg) {
|
||||
const finishLock = new Lock();
|
||||
updateFunctionCache(futureCache, config, arg, finishLock);
|
||||
return finishLock;
|
||||
}
|
||||
|
||||
function updateFunctionCache(cache, config, arg, value) {
|
||||
if (!config.configured()) config.forever();
|
||||
let cachedValue = cache.get(arg);
|
||||
config.deactivate();
|
||||
|
||||
switch (config.mode()) {
|
||||
case "forever":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: genTrue
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
|
||||
case "invalidate":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: config.validator()
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
|
||||
case "valid":
|
||||
if (cachedValue) {
|
||||
cachedValue.push({
|
||||
value,
|
||||
valid: config.validator()
|
||||
});
|
||||
} else {
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: config.validator()
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
class CacheConfigurator {
|
||||
constructor(data) {
|
||||
this._active = true;
|
||||
this._never = false;
|
||||
this._forever = false;
|
||||
this._invalidate = false;
|
||||
this._configured = false;
|
||||
this._pairs = [];
|
||||
this._data = void 0;
|
||||
this._data = data;
|
||||
}
|
||||
|
||||
simple() {
|
||||
return makeSimpleConfigurator(this);
|
||||
}
|
||||
|
||||
mode() {
|
||||
if (this._never) return "never";
|
||||
if (this._forever) return "forever";
|
||||
if (this._invalidate) return "invalidate";
|
||||
return "valid";
|
||||
}
|
||||
|
||||
forever() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never) {
|
||||
throw new Error("Caching has already been configured with .never()");
|
||||
}
|
||||
|
||||
this._forever = true;
|
||||
this._configured = true;
|
||||
}
|
||||
|
||||
never() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._forever) {
|
||||
throw new Error("Caching has already been configured with .forever()");
|
||||
}
|
||||
|
||||
this._never = true;
|
||||
this._configured = true;
|
||||
}
|
||||
|
||||
using(handler) {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never || this._forever) {
|
||||
throw new Error("Caching has already been configured with .never or .forever()");
|
||||
}
|
||||
|
||||
this._configured = true;
|
||||
const key = handler(this._data);
|
||||
const fn = (0, _async.maybeAsync)(handler, `You appear to be using an async cache handler, but Babel has been called synchronously`);
|
||||
|
||||
if ((0, _async.isThenable)(key)) {
|
||||
return key.then(key => {
|
||||
this._pairs.push([key, fn]);
|
||||
|
||||
return key;
|
||||
});
|
||||
}
|
||||
|
||||
this._pairs.push([key, fn]);
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
invalidate(handler) {
|
||||
this._invalidate = true;
|
||||
return this.using(handler);
|
||||
}
|
||||
|
||||
validator() {
|
||||
const pairs = this._pairs;
|
||||
return function* (data) {
|
||||
for (const [key, fn] of pairs) {
|
||||
if (key !== (yield* fn(data))) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
}
|
||||
|
||||
deactivate() {
|
||||
this._active = false;
|
||||
}
|
||||
|
||||
configured() {
|
||||
return this._configured;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function makeSimpleConfigurator(cache) {
|
||||
function cacheFn(val) {
|
||||
if (typeof val === "boolean") {
|
||||
if (val) cache.forever();else cache.never();
|
||||
return;
|
||||
}
|
||||
|
||||
return cache.using(() => assertSimpleType(val()));
|
||||
}
|
||||
|
||||
cacheFn.forever = () => cache.forever();
|
||||
|
||||
cacheFn.never = () => cache.never();
|
||||
|
||||
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
|
||||
|
||||
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
|
||||
|
||||
return cacheFn;
|
||||
}
|
||||
|
||||
function assertSimpleType(value) {
|
||||
if ((0, _async.isThenable)(value)) {
|
||||
throw new Error(`You appear to be using an async cache handler, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously handle your caching logic.`);
|
||||
}
|
||||
|
||||
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
|
||||
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
class Lock {
|
||||
constructor() {
|
||||
this.released = false;
|
||||
this.promise = void 0;
|
||||
this._resolve = void 0;
|
||||
this.promise = new Promise(resolve => {
|
||||
this._resolve = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
release(value) {
|
||||
this.released = true;
|
||||
|
||||
this._resolve(value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
0 && 0;
|
566
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
Normal file
566
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
Normal file
@ -0,0 +1,566 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.buildPresetChain = buildPresetChain;
|
||||
exports.buildPresetChainWalker = void 0;
|
||||
exports.buildRootChain = buildRootChain;
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _options = require("./validation/options");
|
||||
|
||||
var _patternToRegex = require("./pattern-to-regex");
|
||||
|
||||
var _printer = require("./printer");
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
var _configDescriptors = require("./config-descriptors");
|
||||
|
||||
const debug = _debug()("babel:config:config-chain");
|
||||
|
||||
function* buildPresetChain(arg, context) {
|
||||
const chain = yield* buildPresetChainWalker(arg, context);
|
||||
if (!chain) return null;
|
||||
return {
|
||||
plugins: dedupDescriptors(chain.plugins),
|
||||
presets: dedupDescriptors(chain.presets),
|
||||
options: chain.options.map(o => normalizeOptions(o)),
|
||||
files: new Set()
|
||||
};
|
||||
}
|
||||
|
||||
const buildPresetChainWalker = makeChainWalker({
|
||||
root: preset => loadPresetDescriptors(preset),
|
||||
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
|
||||
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
|
||||
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName),
|
||||
createLogger: () => () => {}
|
||||
});
|
||||
exports.buildPresetChainWalker = buildPresetChainWalker;
|
||||
const loadPresetDescriptors = (0, _caching.makeWeakCacheSync)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
|
||||
const loadPresetEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
|
||||
function* buildRootChain(opts, context) {
|
||||
let configReport, babelRcReport;
|
||||
const programmaticLogger = new _printer.ConfigPrinter();
|
||||
const programmaticChain = yield* loadProgrammaticChain({
|
||||
options: opts,
|
||||
dirname: context.cwd
|
||||
}, context, undefined, programmaticLogger);
|
||||
if (!programmaticChain) return null;
|
||||
const programmaticReport = yield* programmaticLogger.output();
|
||||
let configFile;
|
||||
|
||||
if (typeof opts.configFile === "string") {
|
||||
configFile = yield* (0, _files.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
|
||||
} else if (opts.configFile !== false) {
|
||||
configFile = yield* (0, _files.findRootConfig)(context.root, context.envName, context.caller);
|
||||
}
|
||||
|
||||
let {
|
||||
babelrc,
|
||||
babelrcRoots
|
||||
} = opts;
|
||||
let babelrcRootsDirectory = context.cwd;
|
||||
const configFileChain = emptyChain();
|
||||
const configFileLogger = new _printer.ConfigPrinter();
|
||||
|
||||
if (configFile) {
|
||||
const validatedFile = validateConfigFile(configFile);
|
||||
const result = yield* loadFileChain(validatedFile, context, undefined, configFileLogger);
|
||||
if (!result) return null;
|
||||
configReport = yield* configFileLogger.output();
|
||||
|
||||
if (babelrc === undefined) {
|
||||
babelrc = validatedFile.options.babelrc;
|
||||
}
|
||||
|
||||
if (babelrcRoots === undefined) {
|
||||
babelrcRootsDirectory = validatedFile.dirname;
|
||||
babelrcRoots = validatedFile.options.babelrcRoots;
|
||||
}
|
||||
|
||||
mergeChain(configFileChain, result);
|
||||
}
|
||||
|
||||
let ignoreFile, babelrcFile;
|
||||
let isIgnored = false;
|
||||
const fileChain = emptyChain();
|
||||
|
||||
if ((babelrc === true || babelrc === undefined) && typeof context.filename === "string") {
|
||||
const pkgData = yield* (0, _files.findPackageData)(context.filename);
|
||||
|
||||
if (pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
|
||||
({
|
||||
ignore: ignoreFile,
|
||||
config: babelrcFile
|
||||
} = yield* (0, _files.findRelativeConfig)(pkgData, context.envName, context.caller));
|
||||
|
||||
if (ignoreFile) {
|
||||
fileChain.files.add(ignoreFile.filepath);
|
||||
}
|
||||
|
||||
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
|
||||
isIgnored = true;
|
||||
}
|
||||
|
||||
if (babelrcFile && !isIgnored) {
|
||||
const validatedFile = validateBabelrcFile(babelrcFile);
|
||||
const babelrcLogger = new _printer.ConfigPrinter();
|
||||
const result = yield* loadFileChain(validatedFile, context, undefined, babelrcLogger);
|
||||
|
||||
if (!result) {
|
||||
isIgnored = true;
|
||||
} else {
|
||||
babelRcReport = yield* babelrcLogger.output();
|
||||
mergeChain(fileChain, result);
|
||||
}
|
||||
}
|
||||
|
||||
if (babelrcFile && isIgnored) {
|
||||
fileChain.files.add(babelrcFile.filepath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (context.showConfig) {
|
||||
console.log(`Babel configs on "${context.filename}" (ascending priority):\n` + [configReport, babelRcReport, programmaticReport].filter(x => !!x).join("\n\n") + "\n-----End Babel configs-----");
|
||||
}
|
||||
|
||||
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
|
||||
return {
|
||||
plugins: isIgnored ? [] : dedupDescriptors(chain.plugins),
|
||||
presets: isIgnored ? [] : dedupDescriptors(chain.presets),
|
||||
options: isIgnored ? [] : chain.options.map(o => normalizeOptions(o)),
|
||||
fileHandling: isIgnored ? "ignored" : "transpile",
|
||||
ignore: ignoreFile || undefined,
|
||||
babelrc: babelrcFile || undefined,
|
||||
config: configFile || undefined,
|
||||
files: chain.files
|
||||
};
|
||||
}
|
||||
|
||||
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
|
||||
if (typeof babelrcRoots === "boolean") return babelrcRoots;
|
||||
const absoluteRoot = context.root;
|
||||
|
||||
if (babelrcRoots === undefined) {
|
||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||
}
|
||||
|
||||
let babelrcPatterns = babelrcRoots;
|
||||
|
||||
if (!Array.isArray(babelrcPatterns)) {
|
||||
babelrcPatterns = [babelrcPatterns];
|
||||
}
|
||||
|
||||
babelrcPatterns = babelrcPatterns.map(pat => {
|
||||
return typeof pat === "string" ? _path().resolve(babelrcRootsDirectory, pat) : pat;
|
||||
});
|
||||
|
||||
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
|
||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||
}
|
||||
|
||||
return babelrcPatterns.some(pat => {
|
||||
if (typeof pat === "string") {
|
||||
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
|
||||
}
|
||||
|
||||
return pkgData.directories.some(directory => {
|
||||
return matchPattern(pat, babelrcRootsDirectory, directory, context);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const validateConfigFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("configfile", file.options)
|
||||
}));
|
||||
const validateBabelrcFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("babelrcfile", file.options)
|
||||
}));
|
||||
const validateExtendFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("extendsfile", file.options)
|
||||
}));
|
||||
const loadProgrammaticChain = makeChainWalker({
|
||||
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
|
||||
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
|
||||
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
|
||||
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName),
|
||||
createLogger: (input, context, baseLogger) => buildProgrammaticLogger(input, context, baseLogger)
|
||||
});
|
||||
const loadFileChainWalker = makeChainWalker({
|
||||
root: file => loadFileDescriptors(file),
|
||||
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
|
||||
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
|
||||
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName),
|
||||
createLogger: (file, context, baseLogger) => buildFileLogger(file.filepath, context, baseLogger)
|
||||
});
|
||||
|
||||
function* loadFileChain(input, context, files, baseLogger) {
|
||||
const chain = yield* loadFileChainWalker(input, context, files, baseLogger);
|
||||
|
||||
if (chain) {
|
||||
chain.files.add(input.filepath);
|
||||
}
|
||||
|
||||
return chain;
|
||||
}
|
||||
|
||||
const loadFileDescriptors = (0, _caching.makeWeakCacheSync)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
|
||||
const loadFileEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadFileOverridesDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
|
||||
function buildFileLogger(filepath, context, baseLogger) {
|
||||
if (!baseLogger) {
|
||||
return () => {};
|
||||
}
|
||||
|
||||
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Config, {
|
||||
filepath
|
||||
});
|
||||
}
|
||||
|
||||
function buildRootDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors) {
|
||||
return descriptors(dirname, options, alias);
|
||||
}
|
||||
|
||||
function buildProgrammaticLogger(_, context, baseLogger) {
|
||||
var _context$caller;
|
||||
|
||||
if (!baseLogger) {
|
||||
return () => {};
|
||||
}
|
||||
|
||||
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Programmatic, {
|
||||
callerName: (_context$caller = context.caller) == null ? void 0 : _context$caller.name
|
||||
});
|
||||
}
|
||||
|
||||
function buildEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, envName) {
|
||||
const opts = options.env && options.env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
|
||||
}
|
||||
|
||||
function buildOverrideDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index) {
|
||||
const opts = options.overrides && options.overrides[index];
|
||||
if (!opts) throw new Error("Assertion failure - missing override");
|
||||
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
|
||||
}
|
||||
|
||||
function buildOverrideEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index, envName) {
|
||||
const override = options.overrides && options.overrides[index];
|
||||
if (!override) throw new Error("Assertion failure - missing override");
|
||||
const opts = override.env && override.env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
|
||||
}
|
||||
|
||||
function makeChainWalker({
|
||||
root,
|
||||
env,
|
||||
overrides,
|
||||
overridesEnv,
|
||||
createLogger
|
||||
}) {
|
||||
return function* (input, context, files = new Set(), baseLogger) {
|
||||
const {
|
||||
dirname
|
||||
} = input;
|
||||
const flattenedConfigs = [];
|
||||
const rootOpts = root(input);
|
||||
|
||||
if (configIsApplicable(rootOpts, dirname, context)) {
|
||||
flattenedConfigs.push({
|
||||
config: rootOpts,
|
||||
envName: undefined,
|
||||
index: undefined
|
||||
});
|
||||
const envOpts = env(input, context.envName);
|
||||
|
||||
if (envOpts && configIsApplicable(envOpts, dirname, context)) {
|
||||
flattenedConfigs.push({
|
||||
config: envOpts,
|
||||
envName: context.envName,
|
||||
index: undefined
|
||||
});
|
||||
}
|
||||
|
||||
(rootOpts.options.overrides || []).forEach((_, index) => {
|
||||
const overrideOps = overrides(input, index);
|
||||
|
||||
if (configIsApplicable(overrideOps, dirname, context)) {
|
||||
flattenedConfigs.push({
|
||||
config: overrideOps,
|
||||
index,
|
||||
envName: undefined
|
||||
});
|
||||
const overrideEnvOpts = overridesEnv(input, index, context.envName);
|
||||
|
||||
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context)) {
|
||||
flattenedConfigs.push({
|
||||
config: overrideEnvOpts,
|
||||
index,
|
||||
envName: context.envName
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (flattenedConfigs.some(({
|
||||
config: {
|
||||
options: {
|
||||
ignore,
|
||||
only
|
||||
}
|
||||
}
|
||||
}) => shouldIgnore(context, ignore, only, dirname))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const chain = emptyChain();
|
||||
const logger = createLogger(input, context, baseLogger);
|
||||
|
||||
for (const {
|
||||
config,
|
||||
index,
|
||||
envName
|
||||
} of flattenedConfigs) {
|
||||
if (!(yield* mergeExtendsChain(chain, config.options, dirname, context, files, baseLogger))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
logger(config, index, envName);
|
||||
yield* mergeChainOpts(chain, config);
|
||||
}
|
||||
|
||||
return chain;
|
||||
};
|
||||
}
|
||||
|
||||
function* mergeExtendsChain(chain, opts, dirname, context, files, baseLogger) {
|
||||
if (opts.extends === undefined) return true;
|
||||
const file = yield* (0, _files.loadConfig)(opts.extends, dirname, context.envName, context.caller);
|
||||
|
||||
if (files.has(file)) {
|
||||
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
|
||||
}
|
||||
|
||||
files.add(file);
|
||||
const fileChain = yield* loadFileChain(validateExtendFile(file), context, files, baseLogger);
|
||||
files.delete(file);
|
||||
if (!fileChain) return false;
|
||||
mergeChain(chain, fileChain);
|
||||
return true;
|
||||
}
|
||||
|
||||
function mergeChain(target, source) {
|
||||
target.options.push(...source.options);
|
||||
target.plugins.push(...source.plugins);
|
||||
target.presets.push(...source.presets);
|
||||
|
||||
for (const file of source.files) {
|
||||
target.files.add(file);
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
function* mergeChainOpts(target, {
|
||||
options,
|
||||
plugins,
|
||||
presets
|
||||
}) {
|
||||
target.options.push(options);
|
||||
target.plugins.push(...(yield* plugins()));
|
||||
target.presets.push(...(yield* presets()));
|
||||
return target;
|
||||
}
|
||||
|
||||
function emptyChain() {
|
||||
return {
|
||||
options: [],
|
||||
presets: [],
|
||||
plugins: [],
|
||||
files: new Set()
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeOptions(opts) {
|
||||
const options = Object.assign({}, opts);
|
||||
delete options.extends;
|
||||
delete options.env;
|
||||
delete options.overrides;
|
||||
delete options.plugins;
|
||||
delete options.presets;
|
||||
delete options.passPerPreset;
|
||||
delete options.ignore;
|
||||
delete options.only;
|
||||
delete options.test;
|
||||
delete options.include;
|
||||
delete options.exclude;
|
||||
|
||||
if (Object.prototype.hasOwnProperty.call(options, "sourceMap")) {
|
||||
options.sourceMaps = options.sourceMap;
|
||||
delete options.sourceMap;
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
function dedupDescriptors(items) {
|
||||
const map = new Map();
|
||||
const descriptors = [];
|
||||
|
||||
for (const item of items) {
|
||||
if (typeof item.value === "function") {
|
||||
const fnKey = item.value;
|
||||
let nameMap = map.get(fnKey);
|
||||
|
||||
if (!nameMap) {
|
||||
nameMap = new Map();
|
||||
map.set(fnKey, nameMap);
|
||||
}
|
||||
|
||||
let desc = nameMap.get(item.name);
|
||||
|
||||
if (!desc) {
|
||||
desc = {
|
||||
value: item
|
||||
};
|
||||
descriptors.push(desc);
|
||||
if (!item.ownPass) nameMap.set(item.name, desc);
|
||||
} else {
|
||||
desc.value = item;
|
||||
}
|
||||
} else {
|
||||
descriptors.push({
|
||||
value: item
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return descriptors.reduce((acc, desc) => {
|
||||
acc.push(desc.value);
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
function configIsApplicable({
|
||||
options
|
||||
}, dirname, context) {
|
||||
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname));
|
||||
}
|
||||
|
||||
function configFieldIsApplicable(context, test, dirname) {
|
||||
const patterns = Array.isArray(test) ? test : [test];
|
||||
return matchesPatterns(context, patterns, dirname);
|
||||
}
|
||||
|
||||
function ignoreListReplacer(_key, value) {
|
||||
if (value instanceof RegExp) {
|
||||
return String(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function shouldIgnore(context, ignore, only, dirname) {
|
||||
if (ignore && matchesPatterns(context, ignore, dirname)) {
|
||||
var _context$filename;
|
||||
|
||||
const message = `No config is applied to "${(_context$filename = context.filename) != null ? _context$filename : "(unknown)"}" because it matches one of \`ignore: ${JSON.stringify(ignore, ignoreListReplacer)}\` from "${dirname}"`;
|
||||
debug(message);
|
||||
|
||||
if (context.showConfig) {
|
||||
console.log(message);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if (only && !matchesPatterns(context, only, dirname)) {
|
||||
var _context$filename2;
|
||||
|
||||
const message = `No config is applied to "${(_context$filename2 = context.filename) != null ? _context$filename2 : "(unknown)"}" because it fails to match one of \`only: ${JSON.stringify(only, ignoreListReplacer)}\` from "${dirname}"`;
|
||||
debug(message);
|
||||
|
||||
if (context.showConfig) {
|
||||
console.log(message);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function matchesPatterns(context, patterns, dirname) {
|
||||
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context));
|
||||
}
|
||||
|
||||
function matchPattern(pattern, dirname, pathToTest, context) {
|
||||
if (typeof pattern === "function") {
|
||||
return !!pattern(pathToTest, {
|
||||
dirname,
|
||||
envName: context.envName,
|
||||
caller: context.caller
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof pathToTest !== "string") {
|
||||
throw new Error(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`);
|
||||
}
|
||||
|
||||
if (typeof pattern === "string") {
|
||||
pattern = (0, _patternToRegex.default)(pattern, dirname);
|
||||
}
|
||||
|
||||
return pattern.test(pathToTest);
|
||||
}
|
||||
|
||||
0 && 0;
|
246
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
246
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
@ -0,0 +1,246 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createCachedDescriptors = createCachedDescriptors;
|
||||
exports.createDescriptor = createDescriptor;
|
||||
exports.createUncachedDescriptors = createUncachedDescriptors;
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
var _resolveTargets = require("./resolve-targets");
|
||||
|
||||
function isEqualDescriptor(a, b) {
|
||||
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && (a.file && a.file.request) === (b.file && b.file.request) && (a.file && a.file.resolved) === (b.file && b.file.resolved);
|
||||
}
|
||||
|
||||
function* handlerOf(value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
function optionsWithResolvedBrowserslistConfigFile(options, dirname) {
|
||||
if (typeof options.browserslistConfigFile === "string") {
|
||||
options.browserslistConfigFile = (0, _resolveTargets.resolveBrowserslistConfigFile)(options.browserslistConfigFile, dirname);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
function createCachedDescriptors(dirname, options, alias) {
|
||||
const {
|
||||
plugins,
|
||||
presets,
|
||||
passPerPreset
|
||||
} = options;
|
||||
return {
|
||||
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
|
||||
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => handlerOf([]),
|
||||
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => handlerOf([])
|
||||
};
|
||||
}
|
||||
|
||||
function createUncachedDescriptors(dirname, options, alias) {
|
||||
let plugins;
|
||||
let presets;
|
||||
return {
|
||||
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
|
||||
|
||||
*plugins() {
|
||||
if (!plugins) {
|
||||
plugins = yield* createPluginDescriptors(options.plugins || [], dirname, alias);
|
||||
}
|
||||
|
||||
return plugins;
|
||||
},
|
||||
|
||||
*presets() {
|
||||
if (!presets) {
|
||||
presets = yield* createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset);
|
||||
}
|
||||
|
||||
return presets;
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPresetDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCacheSync)(alias => (0, _caching.makeStrongCache)(function* (passPerPreset) {
|
||||
const descriptors = yield* createPresetDescriptors(items, dirname, alias, passPerPreset);
|
||||
return descriptors.map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc));
|
||||
}));
|
||||
});
|
||||
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPluginDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCache)(function* (alias) {
|
||||
const descriptors = yield* createPluginDescriptors(items, dirname, alias);
|
||||
return descriptors.map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc));
|
||||
});
|
||||
});
|
||||
const DEFAULT_OPTIONS = {};
|
||||
|
||||
function loadCachedDescriptor(cache, desc) {
|
||||
const {
|
||||
value,
|
||||
options = DEFAULT_OPTIONS
|
||||
} = desc;
|
||||
if (options === false) return desc;
|
||||
let cacheByOptions = cache.get(value);
|
||||
|
||||
if (!cacheByOptions) {
|
||||
cacheByOptions = new WeakMap();
|
||||
cache.set(value, cacheByOptions);
|
||||
}
|
||||
|
||||
let possibilities = cacheByOptions.get(options);
|
||||
|
||||
if (!possibilities) {
|
||||
possibilities = [];
|
||||
cacheByOptions.set(options, possibilities);
|
||||
}
|
||||
|
||||
if (possibilities.indexOf(desc) === -1) {
|
||||
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
|
||||
|
||||
if (matches.length > 0) {
|
||||
return matches[0];
|
||||
}
|
||||
|
||||
possibilities.push(desc);
|
||||
}
|
||||
|
||||
return desc;
|
||||
}
|
||||
|
||||
function* createPresetDescriptors(items, dirname, alias, passPerPreset) {
|
||||
return yield* createDescriptors("preset", items, dirname, alias, passPerPreset);
|
||||
}
|
||||
|
||||
function* createPluginDescriptors(items, dirname, alias) {
|
||||
return yield* createDescriptors("plugin", items, dirname, alias);
|
||||
}
|
||||
|
||||
function* createDescriptors(type, items, dirname, alias, ownPass) {
|
||||
const descriptors = yield* _gensync().all(items.map((item, index) => createDescriptor(item, dirname, {
|
||||
type,
|
||||
alias: `${alias}$${index}`,
|
||||
ownPass: !!ownPass
|
||||
})));
|
||||
assertNoDuplicates(descriptors);
|
||||
return descriptors;
|
||||
}
|
||||
|
||||
function* createDescriptor(pair, dirname, {
|
||||
type,
|
||||
alias,
|
||||
ownPass
|
||||
}) {
|
||||
const desc = (0, _item.getItemDescriptor)(pair);
|
||||
|
||||
if (desc) {
|
||||
return desc;
|
||||
}
|
||||
|
||||
let name;
|
||||
let options;
|
||||
let value = pair;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 3) {
|
||||
[value, options, name] = value;
|
||||
} else {
|
||||
[value, options] = value;
|
||||
}
|
||||
}
|
||||
|
||||
let file = undefined;
|
||||
let filepath = null;
|
||||
|
||||
if (typeof value === "string") {
|
||||
if (typeof type !== "string") {
|
||||
throw new Error("To resolve a string-based item, the type of item must be given");
|
||||
}
|
||||
|
||||
const resolver = type === "plugin" ? _files.loadPlugin : _files.loadPreset;
|
||||
const request = value;
|
||||
({
|
||||
filepath,
|
||||
value
|
||||
} = yield* resolver(value, dirname));
|
||||
file = {
|
||||
request,
|
||||
resolved: filepath
|
||||
};
|
||||
}
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Unexpected falsy value: ${String(value)}`);
|
||||
}
|
||||
|
||||
if (typeof value === "object" && value.__esModule) {
|
||||
if (value.default) {
|
||||
value = value.default;
|
||||
} else {
|
||||
throw new Error("Must export a default export when using ES6 modules.");
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof value !== "object" && typeof value !== "function") {
|
||||
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
|
||||
}
|
||||
|
||||
if (filepath !== null && typeof value === "object" && value) {
|
||||
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
alias: filepath || alias,
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
ownPass,
|
||||
file
|
||||
};
|
||||
}
|
||||
|
||||
function assertNoDuplicates(items) {
|
||||
const map = new Map();
|
||||
|
||||
for (const item of items) {
|
||||
if (typeof item.value !== "function") continue;
|
||||
let nameMap = map.get(item.value);
|
||||
|
||||
if (!nameMap) {
|
||||
nameMap = new Set();
|
||||
map.set(item.value, nameMap);
|
||||
}
|
||||
|
||||
if (nameMap.has(item.name)) {
|
||||
const conflicts = items.filter(i => i.value === item.value);
|
||||
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`, ``, `Duplicates detected are:`, `${JSON.stringify(conflicts, null, 2)}`].join("\n"));
|
||||
}
|
||||
|
||||
nameMap.add(item.name);
|
||||
}
|
||||
}
|
||||
|
||||
0 && 0;
|
360
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
360
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
@ -0,0 +1,360 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ROOT_CONFIG_FILENAMES = void 0;
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.resolveShowConfigPath = resolveShowConfigPath;
|
||||
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _fs() {
|
||||
const data = require("fs");
|
||||
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _json() {
|
||||
const data = require("json5");
|
||||
|
||||
_json = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
var _configApi = require("../helpers/config-api");
|
||||
|
||||
var _utils = require("./utils");
|
||||
|
||||
var _moduleTypes = require("./module-types");
|
||||
|
||||
var _patternToRegex = require("../pattern-to-regex");
|
||||
|
||||
var fs = require("../../gensync-utils/fs");
|
||||
|
||||
function _module() {
|
||||
const data = require("module");
|
||||
|
||||
_module = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const debug = _debug()("babel:config:loading:files:configuration");
|
||||
|
||||
const ROOT_CONFIG_FILENAMES = ["babel.config.js", "babel.config.cjs", "babel.config.mjs", "babel.config.json"];
|
||||
exports.ROOT_CONFIG_FILENAMES = ROOT_CONFIG_FILENAMES;
|
||||
const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs", ".babelrc.mjs", ".babelrc.json"];
|
||||
const BABELIGNORE_FILENAME = ".babelignore";
|
||||
|
||||
function findConfigUpwards(rootDir) {
|
||||
let dirname = rootDir;
|
||||
|
||||
for (;;) {
|
||||
for (const filename of ROOT_CONFIG_FILENAMES) {
|
||||
if (_fs().existsSync(_path().join(dirname, filename))) {
|
||||
return dirname;
|
||||
}
|
||||
}
|
||||
|
||||
const nextDir = _path().dirname(dirname);
|
||||
|
||||
if (dirname === nextDir) break;
|
||||
dirname = nextDir;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function* findRelativeConfig(packageData, envName, caller) {
|
||||
let config = null;
|
||||
let ignore = null;
|
||||
|
||||
const dirname = _path().dirname(packageData.filepath);
|
||||
|
||||
for (const loc of packageData.directories) {
|
||||
if (!config) {
|
||||
var _packageData$pkg;
|
||||
|
||||
config = yield* loadOneConfig(RELATIVE_CONFIG_FILENAMES, loc, envName, caller, ((_packageData$pkg = packageData.pkg) == null ? void 0 : _packageData$pkg.dirname) === loc ? packageToBabelConfig(packageData.pkg) : null);
|
||||
}
|
||||
|
||||
if (!ignore) {
|
||||
const ignoreLoc = _path().join(loc, BABELIGNORE_FILENAME);
|
||||
|
||||
ignore = yield* readIgnoreConfig(ignoreLoc);
|
||||
|
||||
if (ignore) {
|
||||
debug("Found ignore %o from %o.", ignore.filepath, dirname);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
config,
|
||||
ignore
|
||||
};
|
||||
}
|
||||
|
||||
function findRootConfig(dirname, envName, caller) {
|
||||
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
|
||||
}
|
||||
|
||||
function* loadOneConfig(names, dirname, envName, caller, previousConfig = null) {
|
||||
const configs = yield* _gensync().all(names.map(filename => readConfig(_path().join(dirname, filename), envName, caller)));
|
||||
const config = configs.reduce((previousConfig, config) => {
|
||||
if (config && previousConfig) {
|
||||
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().basename(previousConfig.filepath)}\n` + ` - ${config.filepath}\n` + `from ${dirname}`);
|
||||
}
|
||||
|
||||
return config || previousConfig;
|
||||
}, previousConfig);
|
||||
|
||||
if (config) {
|
||||
debug("Found configuration %o from %o.", config.filepath, dirname);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
function* loadConfig(name, dirname, envName, caller) {
|
||||
const filepath = (((v, w) => (v = v.split("."), w = w.split("."), +v[0] > +w[0] || v[0] == w[0] && +v[1] >= +w[1]))(process.versions.node, "8.9") ? require.resolve : (r, {
|
||||
paths: [b]
|
||||
}, M = require("module")) => {
|
||||
let f = M._findPath(r, M._nodeModulePaths(b).concat(b));
|
||||
|
||||
if (f) return f;
|
||||
f = new Error(`Cannot resolve module '${r}'`);
|
||||
f.code = "MODULE_NOT_FOUND";
|
||||
throw f;
|
||||
})(name, {
|
||||
paths: [dirname]
|
||||
});
|
||||
const conf = yield* readConfig(filepath, envName, caller);
|
||||
|
||||
if (!conf) {
|
||||
throw new Error(`Config file ${filepath} contains no configuration data`);
|
||||
}
|
||||
|
||||
debug("Loaded config %o from %o.", name, dirname);
|
||||
return conf;
|
||||
}
|
||||
|
||||
function readConfig(filepath, envName, caller) {
|
||||
const ext = _path().extname(filepath);
|
||||
|
||||
return ext === ".js" || ext === ".cjs" || ext === ".mjs" ? readConfigJS(filepath, {
|
||||
envName,
|
||||
caller
|
||||
}) : readConfigJSON5(filepath);
|
||||
}
|
||||
|
||||
const LOADING_CONFIGS = new Set();
|
||||
const readConfigJS = (0, _caching.makeStrongCache)(function* readConfigJS(filepath, cache) {
|
||||
if (!_fs().existsSync(filepath)) {
|
||||
cache.never();
|
||||
return null;
|
||||
}
|
||||
|
||||
if (LOADING_CONFIGS.has(filepath)) {
|
||||
cache.never();
|
||||
debug("Auto-ignoring usage of config %o.", filepath);
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options: {}
|
||||
};
|
||||
}
|
||||
|
||||
let options;
|
||||
|
||||
try {
|
||||
LOADING_CONFIGS.add(filepath);
|
||||
options = yield* (0, _moduleTypes.default)(filepath, "You appear to be using a native ECMAScript module configuration " + "file, which is only supported when running Babel asynchronously.");
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while loading config - ${err.message}`;
|
||||
throw err;
|
||||
} finally {
|
||||
LOADING_CONFIGS.delete(filepath);
|
||||
}
|
||||
|
||||
let assertCache = false;
|
||||
|
||||
if (typeof options === "function") {
|
||||
yield* [];
|
||||
options = options((0, _configApi.makeConfigAPI)(cache));
|
||||
assertCache = true;
|
||||
}
|
||||
|
||||
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Configuration should be an exported JavaScript object.`);
|
||||
}
|
||||
|
||||
if (typeof options.then === "function") {
|
||||
throw new Error(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`);
|
||||
}
|
||||
|
||||
if (assertCache && !cache.configured()) throwConfigError();
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const packageToBabelConfig = (0, _caching.makeWeakCacheSync)(file => {
|
||||
const babel = file.options["babel"];
|
||||
if (typeof babel === "undefined") return null;
|
||||
|
||||
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
|
||||
throw new Error(`${file.filepath}: .babel property must be an object`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: babel
|
||||
};
|
||||
});
|
||||
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
|
||||
try {
|
||||
options = _json().parse(content);
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while parsing config - ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!options) throw new Error(`${filepath}: No config detected`);
|
||||
|
||||
if (typeof options !== "object") {
|
||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Expected config object but found array`);
|
||||
}
|
||||
|
||||
delete options["$schema"];
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
const ignoreDir = _path().dirname(filepath);
|
||||
|
||||
const ignorePatterns = content.split("\n").map(line => line.replace(/#(.*?)$/, "").trim()).filter(line => !!line);
|
||||
|
||||
for (const pattern of ignorePatterns) {
|
||||
if (pattern[0] === "!") {
|
||||
throw new Error(`Negation of file paths is not supported.`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
|
||||
};
|
||||
});
|
||||
|
||||
function* resolveShowConfigPath(dirname) {
|
||||
const targetPath = process.env.BABEL_SHOW_CONFIG_FOR;
|
||||
|
||||
if (targetPath != null) {
|
||||
const absolutePath = _path().resolve(dirname, targetPath);
|
||||
|
||||
const stats = yield* fs.stat(absolutePath);
|
||||
|
||||
if (!stats.isFile()) {
|
||||
throw new Error(`${absolutePath}: BABEL_SHOW_CONFIG_FOR must refer to a regular file, directories are not supported.`);
|
||||
}
|
||||
|
||||
return absolutePath;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function throwConfigError() {
|
||||
throw new Error(`\
|
||||
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
|
||||
for various types of caching, using the first param of their handler functions:
|
||||
|
||||
module.exports = function(api) {
|
||||
// The API exposes the following:
|
||||
|
||||
// Cache the returned value forever and don't call this function again.
|
||||
api.cache(true);
|
||||
|
||||
// Don't cache at all. Not recommended because it will be very slow.
|
||||
api.cache(false);
|
||||
|
||||
// Cached based on the value of some function. If this function returns a value different from
|
||||
// a previously-encountered value, the plugins will re-evaluate.
|
||||
var env = api.cache(() => process.env.NODE_ENV);
|
||||
|
||||
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
|
||||
// any possible NODE_ENV value that might come up during plugin execution.
|
||||
var isProd = api.cache(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// .cache(fn) will perform a linear search though instances to find the matching plugin based
|
||||
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
|
||||
// previous instance whenever something changes, you may use:
|
||||
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// Note, we also expose the following more-verbose versions of the above examples:
|
||||
api.cache.forever(); // api.cache(true)
|
||||
api.cache.never(); // api.cache(false)
|
||||
api.cache.using(fn); // api.cache(fn)
|
||||
|
||||
// Return the value that will be cached.
|
||||
return { };
|
||||
};`);
|
||||
}
|
||||
|
||||
0 && 0;
|
43
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/import-meta-resolve.js
generated
vendored
Normal file
43
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/import-meta-resolve.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = resolve;
|
||||
|
||||
function _module() {
|
||||
const data = require("module");
|
||||
|
||||
_module = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _importMetaResolve = require("../../vendor/import-meta-resolve");
|
||||
|
||||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
||||
|
||||
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
||||
|
||||
let import_;
|
||||
|
||||
try {
|
||||
import_ = require("./import").default;
|
||||
} catch (_unused) {}
|
||||
|
||||
const importMetaResolveP = import_ && process.execArgv.includes("--experimental-import-meta-resolve") ? import_("data:text/javascript,export default import.meta.resolve").then(m => m.default || _importMetaResolve.resolve, () => _importMetaResolve.resolve) : Promise.resolve(_importMetaResolve.resolve);
|
||||
|
||||
function resolve(_x, _x2) {
|
||||
return _resolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _resolve() {
|
||||
_resolve = _asyncToGenerator(function* (specifier, parent) {
|
||||
return (yield importMetaResolveP)(specifier, parent);
|
||||
});
|
||||
return _resolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
0 && 0;
|
12
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/import.js
generated
vendored
Normal file
12
frontend_old/node_modules/.pnpm/@babel+core@7.18.6/node_modules/@babel/core/lib/config/files/import.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = import_;
|
||||
|
||||
function import_(filepath) {
|
||||
return import(filepath);
|
||||
}
|
||||
|
||||
0 && 0;
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user