Aktueller Stand

This commit is contained in:
2026-01-22 22:22:48 +01:00
parent 33e2bc61e2
commit fa5f3808bb
169 changed files with 58567 additions and 25460 deletions

50
.env
View File

@@ -1,28 +1,64 @@
# --- Runtime ---
NODE_ENV=production
AUTO_MIGRATE=true
# --- Ports (host bindings) ---
BIND_IP=127.0.0.1
API_PORT=8201
WEB_PORT=3201
# Public API URL used by the frontend (browser). Set to http(s)://<host>:<api-port>
VITE_API_URL=http://docker:8201
VITE_API_URL=https://api.mailcleaner.srv.medeba-media.de
VITE_CLEANUP_ENABLED=true
# Allow Vite dev server to accept requests for these hostnames (comma-separated)
VITE_ALLOWED_HOSTS=docker
VITE_ALLOWED_HOSTS=mailcleaner.srv.medeba-media.de,api.mailcleaner.srv.medeba-media.de
# If running behind a reverse proxy (e.g. Nginx Proxy Manager), enable this
TRUST_PROXY=false
TRUST_PROXY=true
WEB_BASE_URL=https://mailcleaner.srv.medeba-media.de
# --- Database / Queue ---
DATABASE_URL=postgresql://mailcleaner:mailcleaner@postgres:5432/mailcleaner
REDIS_URL=redis://redis:6379
# JWT signing secret for auth
JWT_SECRET=dev-change-me
JWT_SECRET=P50zRZNRV1e8sbCxOmhGy+S5S+5HAnozYjcXQ8YR0tJbdZbSD2q198lckaxkOg/O
# Allowed CORS origins (comma-separated). Required in production.
CORS_ORIGINS=https://mailcleaner.srv.medeba-media.de
# Enable Swagger docs (/docs). Keep false in production.
ENABLE_SWAGGER=false
# Global rate limit
RATE_LIMIT_MAX=300
RATE_LIMIT_WINDOW=1 minute
# Auth-specific rate limit
RATE_LIMIT_AUTH_MAX=10
RATE_LIMIT_AUTH_WINDOW=1 minute
# Short-lived SSE token TTL (seconds)
SSE_TOKEN_TTL_SECONDS=300
# OAuth state TTL (seconds)
OAUTH_STATE_TTL_SECONDS=600
# Disallow custom IMAP/SMTP hosts unless explicitly enabled
ALLOW_CUSTOM_MAIL_HOSTS=false
# Block private network targets for unsubscribe HTTP
BLOCK_PRIVATE_NETWORKS=true
# Encryption key for sensitive secrets (set in production)
ENCRYPTION_KEY=t6ay7IQvSWBbMKOX4S23S4BIcZ9b31eAhF9E9/Xe2tN5J8bmjbbyv+hHYBd6aMP8
# --- Google OAuth (Gmail) ---
GOOGLE_CLIENT_ID=
GOOGLE_CLIENT_SECRET=
# Must match the OAuth redirect URL configured in Google Cloud Console
GOOGLE_REDIRECT_URI=http://docker:8201/oauth/gmail/callback
GOOGLE_REDIRECT_URI=https://api.mailcleaner.srv.medeba-media.de/oauth/gmail/callback
# --- Export jobs ---
# Where export ZIPs are stored by the worker
@@ -32,8 +68,8 @@ EXPORT_TTL_HOURS=24
# --- Seed admin account ---
SEED_ADMIN_EMAIL=admin@simplemailcleaner.local
SEED_ADMIN_PASSWORD=change-me-now
SEED_ADMIN_PASSWORD=abfkFHbf§bi3FBAkbekh34d
SEED_TENANT=Default Tenant
SEED_TENANT_ID=seed-tenant
SEED_ENABLED=true
SEED_ENABLED=false
SEED_FORCE_PASSWORD_UPDATE=false

View File

@@ -1,15 +1,22 @@
# --- Runtime ---
NODE_ENV=development
AUTO_MIGRATE=false
# --- Ports (host bindings) ---
BIND_IP=127.0.0.1
API_PORT=8201
WEB_PORT=3201
# Public API URL used by the frontend (browser). Set to http(s)://<host>:<api-port>
VITE_API_URL=http://localhost:8201
VITE_CLEANUP_ENABLED=false
# Allow Vite dev server to accept requests for these hostnames (comma-separated)
VITE_ALLOWED_HOSTS=localhost,docker
# If running behind a reverse proxy (e.g. Nginx Proxy Manager), enable this
TRUST_PROXY=false
WEB_BASE_URL=https://mailcleaner.srv.medeba-media.de
# --- Database / Queue ---
DATABASE_URL=postgresql://mailcleaner:mailcleaner@postgres:5432/mailcleaner
@@ -18,6 +25,35 @@ REDIS_URL=redis://redis:6379
# JWT signing secret for auth
JWT_SECRET=dev-change-me
# Allowed CORS origins (comma-separated). Required in production.
CORS_ORIGINS=http://localhost:3201
# Enable Swagger docs (/docs). Keep false in production.
ENABLE_SWAGGER=false
# Global rate limit
RATE_LIMIT_MAX=300
RATE_LIMIT_WINDOW=1 minute
# Auth-specific rate limit
RATE_LIMIT_AUTH_MAX=10
RATE_LIMIT_AUTH_WINDOW=1 minute
# Short-lived SSE token TTL (seconds)
SSE_TOKEN_TTL_SECONDS=300
# OAuth state TTL (seconds)
OAUTH_STATE_TTL_SECONDS=600
# Disallow custom IMAP/SMTP hosts unless explicitly enabled
ALLOW_CUSTOM_MAIL_HOSTS=false
# Block private network targets for unsubscribe HTTP
BLOCK_PRIVATE_NETWORKS=true
# Encryption key for sensitive secrets (set in production)
ENCRYPTION_KEY=change-me-please
# --- Google OAuth (Gmail) ---
GOOGLE_CLIENT_ID=
GOOGLE_CLIENT_SECRET=

161
README.md
View File

@@ -18,7 +18,7 @@ docker compose up --build
- Web UI: `http://localhost:${WEB_PORT}` (see root `.env`)
- API: `http://localhost:${API_PORT}`
- API Docs: `http://localhost:${API_PORT}/docs`
- API Docs: `http://localhost:${API_PORT}/docs` (only if `ENABLE_SWAGGER=true`)
## API (initial)
- `POST /auth/register` `{ tenantName, email, password }`
@@ -29,7 +29,8 @@ docker compose up --build
- `POST /mail/cleanup` (auth) `{ mailboxAccountId, dryRun, unsubscribeEnabled, routingEnabled }`
- `GET /jobs` (auth)
- `GET /jobs/:id/events` (auth)
- `GET /jobs/:id/stream?token=...` (auth via query token, SSE)
- `GET /jobs/:id/stream-token` (auth) -> short-lived SSE token
- `GET /jobs/:id/stream?token=...` (SSE using short-lived token)
- `GET /rules` (auth)
- `POST /rules` (auth)
- `PUT /rules/:id` (auth)
@@ -54,7 +55,8 @@ docker compose up --build
- `GET /admin/exports/:id/download` (admin)
- `POST /admin/exports/purge` (admin)
- `DELETE /admin/exports/:id` (admin)
- `GET /jobs/exports/:id/stream` (auth, SSE)
- `GET /jobs/exports/:id/stream-token` (admin) -> short-lived SSE token
- `GET /jobs/exports/:id/stream?token=...` (SSE using short-lived token)
Export queue:
- ZIP exports are queued via Redis/BullMQ and processed by the worker container.
@@ -74,6 +76,30 @@ UI:
- Weblink unsubscribe uses HTTP first, mailto fallback (SMTP required).
- Worker scans headers and applies routing rules (MOVE/DELETE) when not in dry run.
## Cleanup job behavior (what the button does)
When you click **“Bereinigung starten / Start cleanup”** a cleanup job is created and queued. The worker connects to the selected mailbox and:
1. Opens the INBOX (or first mailbox matching “inbox”).
2. Fetches recent message headers (subject/from/headers).
3. Detects newsletter candidates (ListUnsubscribe, ListId, heuristics).
4. Applies your routing rules (MOVE/ARCHIVE/LABEL/DELETE) if enabled.
5. Attempts to unsubscribe using `ListUnsubscribe` (HTTP oneclick or mailto).
6. Logs all actions and progress as job events (visible in the UI).
### The three checkboxes explained
**Dry run (keine Änderungen)**
Runs the full scan and logs what *would* happen, but **does not move/delete/unsubscribe** any mail. Useful for testing rules safely.
**Unsubscribe aktiv**
Enables `ListUnsubscribe` handling.
- HTTP links are called (oneclick POST when supported).
- Mailto links are sent via SMTP (requires SMTP host + app password).
**Routing aktiv**
Applies your configured rules (conditions → actions).
- MOVE/ARCHIVE/LABEL/DELETE will be executed when not in dry run.
- If disabled, no rule actions are executed (only detection + optional unsubscribe).
## Seed data
```bash
cd backend
@@ -84,7 +110,56 @@ SEED_TENANT=Default Tenant \\
SEED_TENANT_ID=seed-tenant \\
npm run prisma:seed
```
- DSGVO: data storage is designed for tenant isolation; encryption at rest will be added.
- DSGVO: tenant isolation supported; sensitive secrets are encrypted at rest when `ENCRYPTION_KEY` is set.
## Admin password reset (CLI)
Reset an admin password via CLI:
```
docker compose exec api npm run admin:reset -- admin@simplemailcleaner.local NEW_PASSWORD
```
Generate a temporary password (forces change on next login):
```
docker compose exec api npm run admin:reset -- admin@simplemailcleaner.local
```
## Security hardening (public hosting)
The app includes a security hardening pass for public deployments. Highlights:
- **No public DB/Redis ports** by default (only API/Web are bound, DB/Redis are internal to Docker).
- **CORS locked down** via `CORS_ORIGINS`.
- **Rate limiting** globally and stricter on auth endpoints.
- **Shortlived SSE tokens** instead of using the user JWT in URLs.
- **OAuth state signed** to prevent token injection.
- **SSRF protections** for ListUnsubscribe HTTP and custom mail hosts.
- **Secrets encrypted at rest** (OAuth tokens, app passwords, Google client secret).
- **Swagger disabled** by default in production.
- **Production env validation** rejects default secrets and missing encryption key.
### Findings and fixes (audit log)
- **Open DB/Redis ports** → removed public port bindings in `docker-compose.yml`.
- **Default secrets in production** → config validation blocks default JWT/seed secrets in `NODE_ENV=production`.
- **Tokens/app passwords stored in plain text** → encrypted at rest with `ENCRYPTION_KEY`.
- **SSRF via unsubscribe URLs / custom hosts** → private network block + scheme validation + timeouts.
- **OAuth state not verifiable** → state is now a signed, expiring JWT.
- **JWT in SSE URL** → replaced with shortlived stream tokens.
- **CORS allowall** → restricted by `CORS_ORIGINS`.
- **Swagger exposed** → disabled by default in production.
- **No rate limiting** → global and authspecific rate limits added.
### Required production settings
Set these in `.env` before going public:
- `NODE_ENV=production`
- `JWT_SECRET=<strong secret>`
- `ENCRYPTION_KEY=<min 32 chars>`
- `CORS_ORIGINS=https://your-domain.tld`
- `TRUST_PROXY=true` (when behind nginx)
- `ENABLE_SWAGGER=false`
- `SEED_ENABLED=false` (after initial setup)
### Optional hardening
- `ALLOW_CUSTOM_MAIL_HOSTS=false` (default) to force provider defaults
- `BLOCK_PRIVATE_NETWORKS=true` (default) to block private IPs in unsubscribe URLs
## Environment
All config lives in the repo root `.env` (see `.env.example`).
@@ -97,7 +172,85 @@ Proxy settings (Nginx Proxy Manager):
- `TRUST_PROXY=true`
- `VITE_API_URL=https://your-domain.tld`
- `GOOGLE_REDIRECT_URI=https://your-domain.tld/oauth/gmail/callback`
- `CORS_ORIGINS=https://your-domain.tld`
Local ports (override via `.env` in repo root):
- `BIND_IP` (default `127.0.0.1`)
- `API_PORT` (default `8000`, now set to `8201` in `.env`)
- `WEB_PORT` (default `3000`, now set to `3201` in `.env`)
## Reverse proxy notes (Nginx)
- Terminate TLS at nginx.
- Only expose nginx (80/443) publicly.
- Keep API/Web bound to `127.0.0.1` (or internal Docker network).
- Set `TRUST_PROXY=true` so the app honors `X-Forwarded-*` headers.
## Nginx Proxy Manager (NPM) setup
Minimal steps to run behind Nginx Proxy Manager with limited nginx customization.
### 1) Bind services locally
In `.env`:
```
BIND_IP=127.0.0.1
API_PORT=8201
WEB_PORT=3201
```
### 2) Put NPM and Mailcleaner in the same Docker network
If NPM runs in Docker, attach both stacks to a shared network (example: `proxy`).
Create network once:
```
docker network create proxy
```
Add to `docker-compose.yml`:
```
networks:
proxy:
external: true
```
Then attach services:
```
services:
api:
networks: [proxy]
web:
networks: [proxy]
```
### 3) Create proxy hosts in NPM
Create **two** Proxy Hosts:
**Frontend**
- Domain: `app.your-domain.tld`
- Scheme: `http`
- Forward Hostname/IP: `mailcleaner-web`
- Forward Port: `3000`
- Websockets: ON
- Block Common Exploits: ON
- SSL: Lets Encrypt, Force SSL
**API**
- Domain: `api.your-domain.tld`
- Scheme: `http`
- Forward Hostname/IP: `mailcleaner-api`
- Forward Port: `8201`
- Websockets: ON
- Block Common Exploits: ON
- SSL: Lets Encrypt, Force SSL
### 4) Environment for public hosting
Set in `.env`:
```
NODE_ENV=production
TRUST_PROXY=true
CORS_ORIGINS=https://app.your-domain.tld
VITE_API_URL=https://api.your-domain.tld
GOOGLE_REDIRECT_URI=https://api.your-domain.tld/oauth/gmail/callback
ENABLE_SWAGGER=false
JWT_SECRET=<strong secret>
ENCRYPTION_KEY=<min 32 chars>
SEED_ENABLED=false
```

View File

@@ -224,6 +224,27 @@
"ipaddr.js": "^2.1.0"
}
},
"node_modules/@fastify/rate-limit": {
"version": "10.3.0",
"resolved": "https://registry.npmjs.org/@fastify/rate-limit/-/rate-limit-10.3.0.tgz",
"integrity": "sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT",
"dependencies": {
"@lukeed/ms": "^2.0.2",
"fastify-plugin": "^5.0.0",
"toad-cache": "^3.7.0"
}
},
"node_modules/@fastify/send": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@fastify/send/-/send-4.1.0.tgz",

File diff suppressed because one or more lines are too long

View File

@@ -151,6 +151,7 @@ exports.Prisma.UserScalarFieldEnum = {
password: 'password',
role: 'role',
isActive: 'isActive',
passwordResetRequired: 'passwordResetRequired',
createdAt: 'createdAt',
updatedAt: 'updatedAt'
};

View File

@@ -4280,6 +4280,7 @@ export namespace Prisma {
password: string | null
role: $Enums.UserRole | null
isActive: boolean | null
passwordResetRequired: boolean | null
createdAt: Date | null
updatedAt: Date | null
}
@@ -4291,6 +4292,7 @@ export namespace Prisma {
password: string | null
role: $Enums.UserRole | null
isActive: boolean | null
passwordResetRequired: boolean | null
createdAt: Date | null
updatedAt: Date | null
}
@@ -4302,6 +4304,7 @@ export namespace Prisma {
password: number
role: number
isActive: number
passwordResetRequired: number
createdAt: number
updatedAt: number
_all: number
@@ -4315,6 +4318,7 @@ export namespace Prisma {
password?: true
role?: true
isActive?: true
passwordResetRequired?: true
createdAt?: true
updatedAt?: true
}
@@ -4326,6 +4330,7 @@ export namespace Prisma {
password?: true
role?: true
isActive?: true
passwordResetRequired?: true
createdAt?: true
updatedAt?: true
}
@@ -4337,6 +4342,7 @@ export namespace Prisma {
password?: true
role?: true
isActive?: true
passwordResetRequired?: true
createdAt?: true
updatedAt?: true
_all?: true
@@ -4421,6 +4427,7 @@ export namespace Prisma {
password: string
role: $Enums.UserRole
isActive: boolean
passwordResetRequired: boolean
createdAt: Date
updatedAt: Date
_count: UserCountAggregateOutputType | null
@@ -4449,6 +4456,7 @@ export namespace Prisma {
password?: boolean
role?: boolean
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: boolean
updatedAt?: boolean
tenant?: boolean | TenantDefaultArgs<ExtArgs>
@@ -4461,6 +4469,7 @@ export namespace Prisma {
password?: boolean
role?: boolean
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: boolean
updatedAt?: boolean
tenant?: boolean | TenantDefaultArgs<ExtArgs>
@@ -4473,6 +4482,7 @@ export namespace Prisma {
password?: boolean
role?: boolean
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: boolean
updatedAt?: boolean
}
@@ -4496,6 +4506,7 @@ export namespace Prisma {
password: string
role: $Enums.UserRole
isActive: boolean
passwordResetRequired: boolean
createdAt: Date
updatedAt: Date
}, ExtArgs["result"]["user"]>
@@ -4898,6 +4909,7 @@ export namespace Prisma {
readonly password: FieldRef<"User", 'String'>
readonly role: FieldRef<"User", 'UserRole'>
readonly isActive: FieldRef<"User", 'Boolean'>
readonly passwordResetRequired: FieldRef<"User", 'Boolean'>
readonly createdAt: FieldRef<"User", 'DateTime'>
readonly updatedAt: FieldRef<"User", 'DateTime'>
}
@@ -15158,6 +15170,7 @@ export namespace Prisma {
password: 'password',
role: 'role',
isActive: 'isActive',
passwordResetRequired: 'passwordResetRequired',
createdAt: 'createdAt',
updatedAt: 'updatedAt'
};
@@ -15647,6 +15660,7 @@ export namespace Prisma {
password?: StringFilter<"User"> | string
role?: EnumUserRoleFilter<"User"> | $Enums.UserRole
isActive?: BoolFilter<"User"> | boolean
passwordResetRequired?: BoolFilter<"User"> | boolean
createdAt?: DateTimeFilter<"User"> | Date | string
updatedAt?: DateTimeFilter<"User"> | Date | string
tenant?: XOR<TenantRelationFilter, TenantWhereInput>
@@ -15659,6 +15673,7 @@ export namespace Prisma {
password?: SortOrder
role?: SortOrder
isActive?: SortOrder
passwordResetRequired?: SortOrder
createdAt?: SortOrder
updatedAt?: SortOrder
tenant?: TenantOrderByWithRelationInput
@@ -15674,6 +15689,7 @@ export namespace Prisma {
password?: StringFilter<"User"> | string
role?: EnumUserRoleFilter<"User"> | $Enums.UserRole
isActive?: BoolFilter<"User"> | boolean
passwordResetRequired?: BoolFilter<"User"> | boolean
createdAt?: DateTimeFilter<"User"> | Date | string
updatedAt?: DateTimeFilter<"User"> | Date | string
tenant?: XOR<TenantRelationFilter, TenantWhereInput>
@@ -15686,6 +15702,7 @@ export namespace Prisma {
password?: SortOrder
role?: SortOrder
isActive?: SortOrder
passwordResetRequired?: SortOrder
createdAt?: SortOrder
updatedAt?: SortOrder
_count?: UserCountOrderByAggregateInput
@@ -15703,6 +15720,7 @@ export namespace Prisma {
password?: StringWithAggregatesFilter<"User"> | string
role?: EnumUserRoleWithAggregatesFilter<"User"> | $Enums.UserRole
isActive?: BoolWithAggregatesFilter<"User"> | boolean
passwordResetRequired?: BoolWithAggregatesFilter<"User"> | boolean
createdAt?: DateTimeWithAggregatesFilter<"User"> | Date | string
updatedAt?: DateTimeWithAggregatesFilter<"User"> | Date | string
}
@@ -16606,6 +16624,7 @@ export namespace Prisma {
password: string
role?: $Enums.UserRole
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: Date | string
updatedAt?: Date | string
tenant: TenantCreateNestedOneWithoutUsersInput
@@ -16618,6 +16637,7 @@ export namespace Prisma {
password: string
role?: $Enums.UserRole
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: Date | string
updatedAt?: Date | string
}
@@ -16628,6 +16648,7 @@ export namespace Prisma {
password?: StringFieldUpdateOperationsInput | string
role?: EnumUserRoleFieldUpdateOperationsInput | $Enums.UserRole
isActive?: BoolFieldUpdateOperationsInput | boolean
passwordResetRequired?: BoolFieldUpdateOperationsInput | boolean
createdAt?: DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string
tenant?: TenantUpdateOneRequiredWithoutUsersNestedInput
@@ -16640,6 +16661,7 @@ export namespace Prisma {
password?: StringFieldUpdateOperationsInput | string
role?: EnumUserRoleFieldUpdateOperationsInput | $Enums.UserRole
isActive?: BoolFieldUpdateOperationsInput | boolean
passwordResetRequired?: BoolFieldUpdateOperationsInput | boolean
createdAt?: DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string
}
@@ -16651,6 +16673,7 @@ export namespace Prisma {
password: string
role?: $Enums.UserRole
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: Date | string
updatedAt?: Date | string
}
@@ -16661,6 +16684,7 @@ export namespace Prisma {
password?: StringFieldUpdateOperationsInput | string
role?: EnumUserRoleFieldUpdateOperationsInput | $Enums.UserRole
isActive?: BoolFieldUpdateOperationsInput | boolean
passwordResetRequired?: BoolFieldUpdateOperationsInput | boolean
createdAt?: DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string
}
@@ -16672,6 +16696,7 @@ export namespace Prisma {
password?: StringFieldUpdateOperationsInput | string
role?: EnumUserRoleFieldUpdateOperationsInput | $Enums.UserRole
isActive?: BoolFieldUpdateOperationsInput | boolean
passwordResetRequired?: BoolFieldUpdateOperationsInput | boolean
createdAt?: DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string
}
@@ -17778,6 +17803,7 @@ export namespace Prisma {
password?: SortOrder
role?: SortOrder
isActive?: SortOrder
passwordResetRequired?: SortOrder
createdAt?: SortOrder
updatedAt?: SortOrder
}
@@ -17789,6 +17815,7 @@ export namespace Prisma {
password?: SortOrder
role?: SortOrder
isActive?: SortOrder
passwordResetRequired?: SortOrder
createdAt?: SortOrder
updatedAt?: SortOrder
}
@@ -17800,6 +17827,7 @@ export namespace Prisma {
password?: SortOrder
role?: SortOrder
isActive?: SortOrder
passwordResetRequired?: SortOrder
createdAt?: SortOrder
updatedAt?: SortOrder
}
@@ -19441,6 +19469,7 @@ export namespace Prisma {
password: string
role?: $Enums.UserRole
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: Date | string
updatedAt?: Date | string
}
@@ -19451,6 +19480,7 @@ export namespace Prisma {
password: string
role?: $Enums.UserRole
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: Date | string
updatedAt?: Date | string
}
@@ -19654,6 +19684,7 @@ export namespace Prisma {
password?: StringFilter<"User"> | string
role?: EnumUserRoleFilter<"User"> | $Enums.UserRole
isActive?: BoolFilter<"User"> | boolean
passwordResetRequired?: BoolFilter<"User"> | boolean
createdAt?: DateTimeFilter<"User"> | Date | string
updatedAt?: DateTimeFilter<"User"> | Date | string
}
@@ -21034,6 +21065,7 @@ export namespace Prisma {
password: string
role?: $Enums.UserRole
isActive?: boolean
passwordResetRequired?: boolean
createdAt?: Date | string
updatedAt?: Date | string
}
@@ -21127,6 +21159,7 @@ export namespace Prisma {
password?: StringFieldUpdateOperationsInput | string
role?: EnumUserRoleFieldUpdateOperationsInput | $Enums.UserRole
isActive?: BoolFieldUpdateOperationsInput | boolean
passwordResetRequired?: BoolFieldUpdateOperationsInput | boolean
createdAt?: DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string
}
@@ -21137,6 +21170,7 @@ export namespace Prisma {
password?: StringFieldUpdateOperationsInput | string
role?: EnumUserRoleFieldUpdateOperationsInput | $Enums.UserRole
isActive?: BoolFieldUpdateOperationsInput | boolean
passwordResetRequired?: BoolFieldUpdateOperationsInput | boolean
createdAt?: DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string
}
@@ -21147,6 +21181,7 @@ export namespace Prisma {
password?: StringFieldUpdateOperationsInput | string
role?: EnumUserRoleFieldUpdateOperationsInput | $Enums.UserRole
isActive?: BoolFieldUpdateOperationsInput | boolean
passwordResetRequired?: BoolFieldUpdateOperationsInput | boolean
createdAt?: DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string
}

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +1,5 @@
{
"name": "prisma-client-0dfa452a25e24864bcf3f498cd8b34074b00c8171bdce93526b6a0ab38135aa4",
"name": "prisma-client-c17ad3aa0cf0f08d50d857b25a55f6c6b1d830984e9a5fc498323230fa13571f",
"main": "index.js",
"types": "index.d.ts",
"browser": "index-browser.js",

View File

@@ -81,14 +81,15 @@ model ExportJob {
}
model User {
id String @id @default(cuid())
tenantId String
email String @unique
password String
role UserRole @default(USER)
isActive Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
id String @id @default(cuid())
tenantId String
email String @unique
password String
role UserRole @default(USER)
isActive Boolean @default(true)
passwordResetRequired Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
tenant Tenant @relation(fields: [tenantId], references: [id])
}

View File

@@ -151,6 +151,7 @@ exports.Prisma.UserScalarFieldEnum = {
password: 'password',
role: 'role',
isActive: 'isActive',
passwordResetRequired: 'passwordResetRequired',
createdAt: 'createdAt',
updatedAt: 'updatedAt'
};

View File

@@ -0,0 +1,5 @@
# Set the default behavior, in case people don't have core.autocrlf set
* text=auto
# Require Unix line endings
* text eol=lf

View File

@@ -0,0 +1,13 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10

View File

@@ -0,0 +1,21 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 15
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- "discussion"
- "feature request"
- "bug"
- "help wanted"
- "plugin suggestion"
- "good first issue"
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View File

@@ -0,0 +1,24 @@
name: CI
on:
push:
paths-ignore:
- 'docs/**'
- '*.md'
pull_request:
paths-ignore:
- 'docs/**'
- '*.md'
permissions:
contents: read
jobs:
test:
permissions:
contents: write
pull-requests: write
uses: fastify/workflows/.github/workflows/plugins-ci-redis.yml@v5
with:
license-check: true
lint: true

21
backend/node_modules/@fastify/rate-limit/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Fastify
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

550
backend/node_modules/@fastify/rate-limit/README.md generated vendored Normal file
View File

@@ -0,0 +1,550 @@
# @fastify/rate-limit
[![CI](https://github.com/fastify/fastify-rate-limit/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/fastify/fastify-rate-limit/actions/workflows/ci.yml)
[![NPM version](https://img.shields.io/npm/v/@fastify/rate-limit.svg?style=flat)](https://www.npmjs.com/package/@fastify/rate-limit)
[![neostandard javascript style](https://img.shields.io/badge/code_style-neostandard-brightgreen?style=flat)](https://github.com/neostandard/neostandard)
A low overhead rate limiter for your routes.
## Install
```
npm i @fastify/rate-limit
```
### Compatibility
| Plugin version | Fastify version |
| -------------- | -------------------- |
| `>=10.x` | `^5.x` |
| `>=7.x <10.x` | `^4.x` |
| `>=3.x <7.x` | `^3.x` |
| `>=2.x <7.x` | `^2.x` |
| `^1.x` | `^1.x` |
Please note that if a Fastify version is out of support, then so are the corresponding versions of this plugin
in the table above.
See [Fastify's LTS policy](https://github.com/fastify/fastify/blob/main/docs/Reference/LTS.md) for more details.
## Usage
Register the plugin and, if required, pass some custom options.<br>
This plugin will add an `onRequest` hook to check if a client (based on their IP address) has made too many requests in the given timeWindow.
```js
import Fastify from 'fastify'
const fastify = Fastify()
await fastify.register(import('@fastify/rate-limit'), {
max: 100,
timeWindow: '1 minute'
})
fastify.get('/', (request, reply) => {
reply.send({ hello: 'world' })
})
fastify.listen({ port: 3000 }, err => {
if (err) throw err
console.log('Server listening at http://localhost:3000')
})
```
In case a client reaches the maximum number of allowed requests, an error will be sent to the user with the status code set to `429`:
```js
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 minute'
}
```
You can change the response by providing a callback to `errorResponseBuilder` or setting a [custom error handler](https://fastify.dev/docs/latest/Reference/Server/#seterrorhandler):
```js
fastify.setErrorHandler(function (error, request, reply) {
if (error.statusCode === 429) {
reply.code(429)
error.message = 'You hit the rate limit! Slow down please!'
}
reply.send(error)
})
```
The response will have some additional headers:
| Header | Description |
|--------|-------------|
|`x-ratelimit-limit` | how many requests the client can make
|`x-ratelimit-remaining` | how many requests remain to the client in the timewindow
|`x-ratelimit-reset` | how many seconds must pass before the rate limit resets
|`retry-after` | if the max has been reached, the seconds the client must wait before they can make new requests
### Preventing guessing of URLS through 404s
An attacker could search for valid URLs if your 404 error handling is not rate limited.
To rate limit your 404 response, you can use a custom handler:
```js
const fastify = Fastify()
await fastify.register(rateLimit, { global: true, max: 2, timeWindow: 1000 })
fastify.setNotFoundHandler({
preHandler: fastify.rateLimit()
}, function (request, reply) {
reply.code(404).send({ hello: 'world' })
})
```
Note that you can customize the behavior of the preHandler in the same way you would for specific routes:
```js
const fastify = Fastify()
await fastify.register(rateLimit, { global: true, max: 2, timeWindow: 1000 })
fastify.setNotFoundHandler({
preHandler: fastify.rateLimit({
max: 4,
timeWindow: 500
})
}, function (request, reply) {
reply.code(404).send({ hello: 'world' })
})
```
### Options
You can pass the following options during the plugin registration:
```js
await fastify.register(import('@fastify/rate-limit'), {
global : false, // default true
max: 3, // default 1000
ban: 2, // default -1
timeWindow: 5000, // default 1000 * 60
hook: 'preHandler', // default 'onRequest'
cache: 10000, // default 5000
allowList: ['127.0.0.1'], // default []
redis: new Redis({ host: '127.0.0.1' }), // default null
nameSpace: 'teste-ratelimit-', // default is 'fastify-rate-limit-'
continueExceeding: true, // default false
skipOnError: true, // default false
keyGenerator: function (request) { /* ... */ }, // default (request) => request.ip
errorResponseBuilder: function (request, context) { /* ... */},
enableDraftSpec: true, // default false. Uses IEFT draft header standard
addHeadersOnExceeding: { // default show all the response headers when rate limit is not reached
'x-ratelimit-limit': true,
'x-ratelimit-remaining': true,
'x-ratelimit-reset': true
},
addHeaders: { // default show all the response headers when rate limit is reached
'x-ratelimit-limit': true,
'x-ratelimit-remaining': true,
'x-ratelimit-reset': true,
'retry-after': true
}
})
```
- `global` : indicates if the plugin should apply rate limiting to all routes within the encapsulation scope.
- `max`: maximum number of requests a single client can perform inside a timeWindow. It can be an async function with the signature `async (request, key) => {}` where `request` is the Fastify request object and `key` is the value generated by the `keyGenerator`. The function **must** return a number.
- `ban`: maximum number of 429 responses to return to a client before returning 403 responses. When the ban limit is exceeded, the context argument that is passed to `errorResponseBuilder` will have its `ban` property set to `true`. **Note:** `0` can also be passed to directly return 403 responses when a client exceeds the `max` limit.
- `timeWindow:` the duration of the time window. It can be expressed in milliseconds, as a string (in the [`ms`](https://github.com/zeit/ms) format), or as an async function with the signature `async (request, key) => {}` where `request` is the Fastify request object and `key` is the value generated by the `keyGenerator`. The function **must** return a number.
- `cache`: this plugin internally uses an LRU cache to handle the clients, you can change the size of the cache with this option
- `allowList`: array of string of IPs to exclude from rate limiting. It can be a sync or async function with the signature `(request, key) => {}` where `request` is the Fastify request object and `key` is the value generated by the `keyGenerator`. If the function return a truthy value, the request will be excluded from the rate limit.
- `redis`: by default, this plugin uses an in-memory store, but if an application runs on multiple servers, an external store will be needed. This plugin requires the use of [`ioredis`](https://github.com/redis/ioredis).<br> **Note:** the [default settings](https://github.com/redis/ioredis/blob/v4.16.0/API.md#new_Redis_new) of an ioredis instance are not optimal for rate limiting. We recommend customizing the `connectTimeout` and `maxRetriesPerRequest` parameters as shown in the [`example`](https://github.com/fastify/fastify-rate-limit/tree/main/example/example.js).
- `nameSpace`: choose which prefix to use in the redis, default is 'fastify-rate-limit-'
- `continueExceeding`: Renew user limitation when user sends a request to the server when still limited. This will take priority over `exponentialBackoff`
- `store`: a custom store to track requests and rates which allows you to use your own storage mechanism (using an RDBMS, MongoDB, etc.) as well as further customizing the logic used in calculating the rate limits. A simple example is provided below as well as a more detailed example using Knex.js can be found in the [`example/`](https://github.com/fastify/fastify-rate-limit/tree/main/example) folder
- `skipOnError`: if `true` it will skip errors generated by the storage (e.g. redis not reachable).
- `keyGenerator`: a sync or async function to generate a unique identifier for each incoming request. Defaults to `(request) => request.ip`, the IP is resolved by fastify using `request.connection.remoteAddress` or `request.headers['x-forwarded-for']` if [trustProxy](https://fastify.dev/docs/latest/Reference/Server/#trustproxy) option is enabled. Use it if you want to override this behavior
- `groupId`: a string to group multiple routes together introducing separate per-group rate limit. This will be added on top of the result of `keyGenerator`.
- `errorResponseBuilder`: a function to generate a custom response object. Defaults to `(request, context) => ({statusCode: 429, error: 'Too Many Requests', message: ``Rate limit exceeded, retry in ${context.after}``})`
- `addHeadersOnExceeding`: define which headers should be added in the response when the limit is not reached. Defaults all the headers will be shown
- `addHeaders`: define which headers should be added in the response when the limit is reached. Defaults all the headers will be shown
- `enableDraftSpec`: if `true` it will change the HTTP rate limit headers following the IEFT draft document. More information at [draft-ietf-httpapi-ratelimit-headers.md](https://github.com/ietf-wg-httpapi/ratelimit-headers/blob/f6a7bc7560a776ea96d800cf5ed3752d6d397b06/draft-ietf-httpapi-ratelimit-headers.md).
- `onExceeding`: callback that will be executed before request limit has been reached.
- `onExceeded`: callback that will be executed after request limit has been reached.
- `onBanReach`: callback that will be executed when the ban limit has been reached.
- `exponentialBackoff`: Renew user limitation exponentially when user sends a request to the server when still limited.
`keyGenerator` example usage:
```js
await fastify.register(import('@fastify/rate-limit'), {
/* ... */
keyGenerator: function (request) {
return request.headers['x-real-ip'] // nginx
|| request.headers['x-client-ip'] // apache
|| request.headers['x-forwarded-for'] // use this only if you trust the header
|| request.session.username // you can limit based on any session value
|| request.ip // fallback to default
}
})
```
Variable `max` example usage:
```js
// In the same timeWindow, the max value can change based on request and/or key like this
fastify.register(rateLimit, {
/* ... */
keyGenerator (request) { return request.headers['service-key'] },
max: async (request, key) => { return key === 'pro' ? 3 : 2 },
timeWindow: 1000
})
```
`errorResponseBuilder` example usage:
```js
await fastify.register(import('@fastify/rate-limit'), {
/* ... */
errorResponseBuilder: function (request, context) {
return {
statusCode: 429,
error: 'Too Many Requests',
message: `I only allow ${context.max} requests per ${context.after} to this Website. Try again soon.`,
date: Date.now(),
expiresIn: context.ttl // milliseconds
}
}
})
```
Dynamic `allowList` example usage:
```js
await fastify.register(import('@fastify/rate-limit'), {
/* ... */
allowList: function (request, key) {
return request.headers['x-app-client-id'] === 'internal-usage'
}
})
```
Custom `hook` example usage (after authentication):
```js
await fastify.register(import('@fastify/rate-limit'), {
hook: 'preHandler',
keyGenerator: function (request) {
return request.userId || request.ip
}
})
fastify.decorateRequest('userId', '')
fastify.addHook('preHandler', async function (request) {
const { userId } = request.query
if (userId) {
request.userId = userId
}
})
```
Custom `store` example usage:
NOTE: The ```timeWindow``` will always be passed as the numeric value in milliseconds into the store's constructor.
```js
function CustomStore (options) {
this.options = options
this.current = 0
}
CustomStore.prototype.incr = function (key, cb) {
const timeWindow = this.options.timeWindow
this.current++
cb(null, { current: this.current, ttl: timeWindow - (this.current * 1000) })
}
CustomStore.prototype.child = function (routeOptions) {
// We create a merged copy of the current parent parameters with the specific
// route parameters and pass them into the child store.
const childParams = Object.assign(this.options, routeOptions)
const store = new CustomStore(childParams)
// Here is where you may want to do some custom calls on the store with the information
// in routeOptions first...
// store.setSubKey(routeOptions.method + routeOptions.url)
return store
}
await fastify.register(import('@fastify/rate-limit'), {
/* ... */
store: CustomStore
})
```
The `routeOptions` object passed to the `child` method of the store will contain the same options that are detailed above for plugin registration with any specific overrides provided on the route. In addition, the following parameter is provided:
- `routeInfo`: The configuration of the route including `method`, `url`, `path`, and the full route `config`
Custom `onExceeding` example usage:
```js
await fastify.register(import('@fastify/rate-limit'), {
/* */
onExceeding: function (req, key) {
console.log('callback on exceeding ... executed before response to client')
}
})
```
Custom `onExceeded` example usage:
```js
await fastify.register(import('@fastify/rate-limit'), {
/* */
onExceeded: function (req, key) {
console.log('callback on exceeded ... executed before response to client')
}
})
```
Custom `onBanReach` example usage:
```js
await fastify.register(import('@fastify/rate-limit'), {
/* */
ban: 10,
onBanReach: function (req, key) {
console.log('callback on exceeded ban limit')
}
})
```
### Options on the endpoint itself
Rate limiting can also be configured at the route level, applying the configuration independently.
For example the `allowList` if configured:
- on plugin registration will affect all endpoints within the encapsulation scope
- on route declaration will affect only the targeted endpoint
The global allowlist is configured when registering it with `fastify.register(...)`.
The endpoint allowlist is set on the endpoint directly with the `{ config : { rateLimit : { allowList : [] } } }` object.
ACL checking is performed based on the value of the key from the `keyGenerator`.
In this example, we are checking the IP address, but it could be an allowlist of specific user identifiers (like JWT or tokens):
```js
import Fastify from 'fastify'
const fastify = Fastify()
await fastify.register(import('@fastify/rate-limit'),
{
global : false, // don't apply these settings to all the routes of the context
max: 3000, // default global max rate limit
allowList: ['192.168.0.10'], // global allowlist access.
redis: redis, // custom connection to redis
})
// add a limited route with this configuration plus the global one
fastify.get('/', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute'
}
}
}, (request, reply) => {
reply.send({ hello: 'from ... root' })
})
// add a limited route with this configuration plus the global one
fastify.get('/private', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute'
}
}
}, (request, reply) => {
reply.send({ hello: 'from ... private' })
})
// this route doesn't have any rate limit
fastify.get('/public', (request, reply) => {
reply.send({ hello: 'from ... public' })
})
// add a limited route with this configuration plus the global one
fastify.get('/public/sub-rated-1', {
config: {
rateLimit: {
timeWindow: '1 minute',
allowList: ['127.0.0.1'],
onExceeding: function (request, key) {
console.log('callback on exceeding ... executed before response to client')
},
onExceeded: function (request, key) {
console.log('callback on exceeded ... to black ip in security group for example, request is give as argument')
}
}
}
}, (request, reply) => {
reply.send({ hello: 'from sub-rated-1 ... using default max value ... ' })
})
// group routes and add a rate limit
fastify.get('/otp/send', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute',
groupId:"OTP"
}
}
}, (request, reply) => {
reply.send({ hello: 'from ... grouped rate limit' })
})
fastify.get('/otp/resend', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute',
groupId:"OTP"
}
}
}, (request, reply) => {
reply.send({ hello: 'from ... grouped rate limit' })
})
```
In the route creation you can override the same settings of the plugin registration plus the following additional options:
- `onExceeding` : callback that will be executed each time a request is made to a route that is rate-limited
- `onExceeded` : callback that will be executed when a user reaches the maximum number of tries. Can be useful to blacklist clients
You may also want to set a global rate limiter and then disable it on some routes:
```js
import Fastify from 'fastify'
const fastify = Fastify()
await fastify.register(import('@fastify/rate-limit'), {
max: 100,
timeWindow: '1 minute'
})
// add a limited route with global config
fastify.get('/', (request, reply) => {
reply.send({ hello: 'from ... rate limited root' })
})
// this route doesn't have any rate limit
fastify.get('/public', {
config: {
rateLimit: false
}
}, (request, reply) => {
reply.send({ hello: 'from ... public' })
})
// add a limited route with global config and different max
fastify.get('/private', {
config: {
rateLimit: {
max: 9
}
}
}, (request, reply) => {
reply.send({ hello: 'from ... private and more limited' })
})
```
### Manual Rate Limit
A custom limiter function can be created with `fastify.createRateLimit()`, which is handy when needing to integrate with
technologies like [GraphQL](https://graphql.org/) or [tRPC](https://trpc.io/). This function uses the global [options](#options) set
during plugin registration, but you can override options such as `store`, `skipOnError`, `max`, `timeWindow`,
`allowList`, `keyGenerator`, and `ban`.
Example usage:
```js
import Fastify from 'fastify'
const fastify = Fastify()
// register with global options
await fastify.register(import('@fastify/rate-limit'), {
global : false,
max: 100,
timeWindow: '1 minute'
})
// checkRateLimit will use the global options provided above when called
const checkRateLimit = fastify.createRateLimit();
fastify.get("/", async (request, reply) => {
// manually check the rate limit (using global options)
const limit = await checkRateLimit(request);
if(!limit.isAllowed && limit.isExceeded) {
return reply.code(429).send("Limit exceeded");
}
return reply.send("Hello world");
});
// override global max option
const checkCustomRateLimit = fastify.createRateLimit({ max: 100 });
fastify.get("/custom", async (request, reply) => {
// manually check the rate limit (using global options and overridden max option)
const limit = await checkCustomRateLimit(request);
// manually handle limit exceedance
if(!limit.isAllowed && limit.isExceeded) {
return reply.code(429).send("Limit exceeded");
}
return reply.send("Hello world");
});
```
A custom limiter function created with `fastify.createRateLimit()` only requires a `FastifyRequest` as the first parameter:
```js
const checkRateLimit = fastify.createRateLimit();
const limit = await checkRateLimit(request);
```
The returned `limit` is an object containing the following properties for the `request` passed to `checkRateLimit`.
- `isAllowed`: if `true`, the request was excluded from rate limiting according to the configured `allowList`.
- `key`: the generated key as returned by the `keyGenerator` function.
If `isAllowed` is `false` the object also contains these additional properties:
- `max`: the configured `max` option as a number. If a `max` function was supplied as global option or to `fastify.createRateLimit()`, this property will correspond to the function's return type for the given `request`.
- `timeWindow`: the configured `timeWindow` option in milliseconds. If a function was supplied to `timeWindow`, similar to the `max` property above, this property will be equal to the function's return type.
- `remaining`: the remaining amount of requests before the limit is exceeded.
- `ttl`: the remaining time until the limit will be reset in milliseconds.
- `ttlInSeconds`: `ttl` in seconds.
- `isExceeded`: `true` if the limit was exceeded.
- `isBanned`: `true` if the request was banned according to the `ban` option.
### Examples of Custom Store
These examples show an overview of the `store` feature and you should take inspiration from it and tweak as you need:
- [Knex-SQLite](./example/example-knex.js)
- [Knex-MySQL](./example/example-knex-mysql.js)
- [Sequelize-PostgreSQL](./example/example-sequelize.js)
### IETF Draft Spec Headers
The response will have the following headers if `enableDraftSpec` is `true`:
| Header | Description |
|--------|-------------|
|`ratelimit-limit` | how many requests the client can make
|`ratelimit-remaining` | how many requests remain to the client in the timewindow
|`ratelimit-reset` | how many seconds must pass before the rate limit resets
|`retry-after` | contains the same value in time as `ratelimit-reset`
### Contribute
To run tests locally, you need a Redis instance that you can launch with this command:
```
npm run redis
```
<a name="license"></a>
## License
Licensed under [MIT](./LICENSE).

View File

@@ -0,0 +1,6 @@
'use strict'
module.exports = require('neostandard')({
ignores: require('neostandard').resolveIgnoresFromGitignore(),
ts: true
})

View File

@@ -0,0 +1,116 @@
'use strict'
import Redis from 'ioredis'
import Fastify from 'fastify'
const redis = new Redis({
enableAutoPipelining: true,
connectionName: 'my-connection-name',
host: 'localhost',
port: 6379,
connectTimeout: 500,
maxRetriesPerRequest: 1
})
const fastify = Fastify()
await fastify.register(import('../index.js'),
{
global: false,
max: 3000, // default max rate limit
// timeWindow: 1000*60,
// cache: 10000,
allowList: ['127.0.0.2'], // global allowList access ( ACL based on the key from the keyGenerator)
redis, // connection to redis
skipOnError: false // default false
// keyGenerator: function(req) { /* ... */ }, // default (req) => req.raw.ip
})
fastify.get('/', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... root' })
})
fastify.get('/private', {
config: {
rateLimit: {
max: 3,
allowList: ['127.0.2.1', '127.0.3.1'],
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... private' })
})
fastify.get('/public', (_req, reply) => {
reply.send({ hello: 'from ... public' })
})
fastify.get('/public/sub-rated-1', {
config: {
rateLimit: {
timeWindow: '1 minute',
allowList: ['127.0.2.1'],
onExceeding: function () {
console.log('callback on exceededing ... executed before response to client. req is give as argument')
},
onExceeded: function () {
console.log('callback on exceeded ... to black ip in security group for example, req is give as argument')
}
}
}
}, (_req, reply) => {
reply.send({ hello: 'from sub-rated-1 ... using default max value ... ' })
})
fastify.get('/public/sub-rated-2', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute',
onExceeding: function () {
console.log('callback on exceededing ... executed before response to client. req is give as argument')
},
onExceeded: function () {
console.log('callback on exceeded ... to black ip in security group for example, req is give as argument')
}
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... sub-rated-2' })
})
fastify.get('/home', {
config: {
rateLimit: {
max: 200,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'toto' })
})
fastify.get('/customerrormessage', {
config: {
rateLimit: {
max: 2,
timeWindow: '1 minute',
errorResponseBuilder: (_req, context) => ({ code: 429, timeWindow: context.after, limit: context.max })
}
}
}, (_req, reply) => {
reply.send({ hello: 'toto' })
})
fastify.listen({ port: 3000 }, err => {
if (err) throw err
console.log('Server listening at http://localhost:3000')
})

View File

@@ -0,0 +1,124 @@
'use strict'
/* eslint-disable no-undef */
// Example of a custom store using Knex.js and MySQL.
//
// Assumes you have access to a configured knex object.
//
// Note that the rate check should place a read lock on the row.
// For MySQL see:
// https://dev.mysql.com/doc/refman/8.0/en/innodb-locking-reads.html
// https://blog.nodeswat.com/concurrency-mysql-and-node-js-a-journey-of-discovery-31281e53572e
//
// Below is an example table to store rate limits that must be created
// in the database first.
//
// exports.up = async knex => {
// await knex.schema.createTable('rate_limits', table => {
// table.string('source').notNullable()
// table.string('route').notNullable()
// table.integer('count').unsigned()
// table.bigInteger ('ttl')
// table.primary(['route', 'source'])
// })
// }
//
// exports.down = async knex => {
// await knex.schema.dropTable('rate_limits')
// }
//
// CREATE TABLE `rate_limits` (
// `source` varchar(255) NOT NULL,
// `route` varchar(255) NOT NULL,
// `count` int unsigned DEFAULT NULL,
// `ttl` int unsigned DEFAULT NULL,
// PRIMARY KEY (`route`,`source`)
// ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
function KnexStore (options) {
this.options = options
this.route = ''
}
KnexStore.prototype.routeKey = function (route) {
if (route) this.route = route
return route
}
KnexStore.prototype.incr = async function (key, cb) {
const now = (new Date()).getTime()
const ttl = now + this.options.timeWindow
const max = this.options.max
const cond = { route: this.route, source: key }
const trx = await knex.transaction()
try {
// NOTE: MySQL syntax FOR UPDATE for read lock on counter stats in row
const row = await trx('rate_limits')
.whereRaw('route = ? AND source = ? FOR UPDATE', [cond.route || '', cond.source]) // Create read lock
const d = row[0]
if (d && d.ttl > now) {
// Optimization - no need to UPDATE if max has been reached.
if (d.count < max) {
await trx
.raw('UPDATE rate_limits SET count = ? WHERE route = ? AND source = ?', [d.count + 1, cond.route, key])
}
// If we were already at max no need to UPDATE but we must still send d.count + 1 to trigger rate limit.
process.nextTick(cb, null, { current: d.count + 1, ttl: d.ttl })
} else {
await trx
.raw('INSERT INTO rate_limits(route, source, count, ttl) VALUES(?,?,1,?) ON DUPLICATE KEY UPDATE count = 1, ttl = ?', [cond.route, key, d?.ttl || ttl, ttl])
process.nextTick(cb, null, { current: 1, ttl: d?.ttl || ttl })
}
await trx.commit()
} catch (err) {
await trx.rollback()
// TODO: Handle as desired
fastify.log.error(err)
process.nextTick(cb, err, { current: 0 })
}
}
KnexStore.prototype.child = function (routeOptions = {}) {
// NOTE: Optionally override and set global: false here for route specific
// options, which then allows you to use `global: true` should you
// wish to during initial registration below.
const options = { ...this.options, ...routeOptions, global: false }
const store = new KnexStore(options)
store.routeKey(routeOptions.routeInfo.method + routeOptions.routeInfo.url)
return store
}
fastify.register(require('../../fastify-rate-limit'),
{
global: false,
max: 10,
store: KnexStore,
skipOnError: false
}
)
fastify.get('/', {
config: {
rateLimit: {
max: 10,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... root' })
})
fastify.get('/private', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... private' })
})
fastify.get('/public', (_req, reply) => {
reply.send({ hello: 'from ... public' })
})

View File

@@ -0,0 +1,118 @@
'use strict'
// Example of a Custom Store using Knex.js ORM for SQLite database
// Below is an example table to store rate limits that must be created
// in the database first
//
// CREATE TABLE "RateLimits" (
// "Route" TEXT,
// "Source" TEXT,
// "Count" INTEGER,
// "TTL" NUMERIC,
// PRIMARY KEY("Source")
// );
//
// CREATE UNIQUE INDEX "idx_uniq_route_source" ON "RateLimits" (Route, Source);
//
const Knex = require('knex')
const fastify = require('fastify')()
const knex = Knex({
client: 'sqlite3',
connection: {
filename: './db.sqlite'
}
})
function KnexStore (options) {
this.options = options
this.route = ''
}
KnexStore.prototype.routeKey = function (route) {
if (route) {
this.route = route
} else {
return route
}
}
KnexStore.prototype.incr = function (key, cb) {
const now = (new Date()).getTime()
const ttl = now + this.options.timeWindow
knex.transaction(function (trx) {
trx
.where({ Route: this.route, Source: key })
.then(d => {
if (d.TTL > now) {
trx
.raw(`UPDATE RateLimits SET Count = 1 WHERE Route='${this.route}' AND Source='${key}'`)
.then(() => {
cb(null, { current: 1, ttl: d.TTL })
})
.catch(err => {
cb(err, { current: 0 })
})
} else {
trx
.raw(`INSERT INTO RateLimits(Route, Source, Count, TTL) VALUES('${this.route}', '${key}',1,${d.TTL || ttl}) ON CONFLICT(Route, Source) DO UPDATE SET Count=Count+1,TTL=${ttl}`)
.then(() => {
cb(null, { current: d.Count ? d.Count + 1 : 1, ttl: d.TTL || ttl })
})
.catch(err => {
cb(err, { current: 0 })
})
}
})
.catch(err => {
cb(err, { current: 0 })
})
})
}
KnexStore.prototype.child = function (routeOptions) {
const options = Object.assign(this.options, routeOptions)
const store = new KnexStore(options)
store.routeKey(routeOptions.routeInfo.method + routeOptions.routeInfo.url)
return store
}
fastify.register(require('../../fastify-rate-limit'),
{
global: false,
max: 10,
store: KnexStore,
skipOnError: false
}
)
fastify.get('/', {
config: {
rateLimit: {
max: 10,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... root' })
})
fastify.get('/private', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... private' })
})
fastify.get('/public', (_req, reply) => {
reply.send({ hello: 'from ... public' })
})
fastify.listen({ port: 3000 }, err => {
if (err) throw err
console.log('Server listening at http://localhost:3000')
})

View File

@@ -0,0 +1,185 @@
'use strict'
// Example of a Custom Store using Sequelize ORM for PostgreSQL database
// Sequelize Migration for "RateLimits" table
//
// module.exports = {
// up: (queryInterface, { TEXT, INTEGER, BIGINT }) => {
// return queryInterface.createTable(
// 'RateLimits',
// {
// Route: {
// type: TEXT,
// allowNull: false
// },
// Source: {
// type: TEXT,
// allowNull: false,
// primaryKey: true
// },
// Count: {
// type: INTEGER,
// allowNull: false
// },
// TTL: {
// type: BIGINT,
// allowNull: false
// }
// },
// {
// freezeTableName: true,
// timestamps: false,
// uniqueKeys: {
// unique_tag: {
// customIndex: true,
// fields: ['Route', 'Source']
// }
// }
// }
// )
// },
// down: queryInterface => {
// return queryInterface.dropTable('RateLimits')
// }
// }
const fastify = require('fastify')()
const Sequelize = require('sequelize')
const databaseUri = 'postgres://username:password@localhost:5432/fastify-rate-limit-example'
const sequelize = new Sequelize(databaseUri)
// OR
// const sequelize = new Sequelize('database', 'username', 'password');
// Sequelize Model for "RateLimits" table
//
const RateLimits = sequelize.define(
'RateLimits',
{
Route: {
type: Sequelize.TEXT,
allowNull: false
},
Source: {
type: Sequelize.TEXT,
allowNull: false,
primaryKey: true
},
Count: {
type: Sequelize.INTEGER,
allowNull: false
},
TTL: {
type: Sequelize.BIGINT,
allowNull: false
}
},
{
freezeTableName: true,
timestamps: false,
indexes: [
{
unique: true,
fields: ['Route', 'Source']
}
]
}
)
function RateLimiterStore (options) {
this.options = options
this.route = ''
}
RateLimiterStore.prototype.routeKey = function routeKey (route) {
if (route) this.route = route
return route
}
RateLimiterStore.prototype.incr = async function incr (key, cb) {
const now = new Date().getTime()
const ttl = now + this.options.timeWindow
const cond = { Route: this.route, Source: key }
const RateLimit = await RateLimits.findOne({ where: cond })
if (RateLimit && parseInt(RateLimit.TTL, 10) > now) {
try {
await RateLimit.update({ Count: RateLimit.Count + 1 }, cond)
cb(null, {
current: RateLimit.Count + 1,
ttl: RateLimit.TTL
})
} catch (err) {
cb(err, {
current: 0
})
}
} else {
sequelize.query(
`INSERT INTO "RateLimits"("Route", "Source", "Count", "TTL")
VALUES('${this.route}', '${key}', 1,
${RateLimit?.TTL || ttl})
ON CONFLICT("Route", "Source") DO UPDATE SET "Count"=1, "TTL"=${ttl}`
)
.then(() => {
cb(null, {
current: 1,
ttl: RateLimit?.TTL || ttl
})
})
.catch(err => {
cb(err, {
current: 0
})
})
}
}
RateLimiterStore.prototype.child = function child (routeOptions = {}) {
const options = Object.assign(this.options, routeOptions)
const store = new RateLimiterStore(options)
store.routeKey(routeOptions.routeInfo.method + routeOptions.routeInfo.url)
return store
}
fastify.register(require('../../fastify-rate-limit'),
{
global: false,
max: 10,
store: RateLimiterStore,
skipOnError: false
}
)
fastify.get('/', {
config: {
rateLimit: {
max: 10,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... root' })
})
fastify.get('/private', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... private' })
})
fastify.get('/public', (_req, reply) => {
reply.send({ hello: 'from ... public' })
})
fastify.listen({ port: 3000 }, err => {
if (err) throw err
console.log('Server listening at http://localhost:3000')
})

View File

@@ -0,0 +1,25 @@
import fastify from 'fastify'
import fastifyRateLimit from '../index.js'
const server = fastify()
await server.register(fastifyRateLimit, {
global: true,
max: 10000,
timeWindow: '1 minute'
})
server.get('/', (_request, reply) => {
reply.send('Hello, world!')
})
const start = async () => {
try {
await server.listen({ port: 3000 })
console.log('Server is running on port 3000')
} catch (error) {
console.error('Error starting server:', error)
}
}
start()

View File

@@ -0,0 +1,113 @@
'use strict'
const Redis = require('ioredis')
const redis = new Redis({
connectionName: 'my-connection-name',
host: 'localhost',
port: 6379,
connectTimeout: 500,
maxRetriesPerRequest: 1
})
const fastify = require('fastify')()
fastify.register(require('../../fastify-rate-limit'),
{
global: false,
max: 3000, // default max rate limit
// timeWindow: 1000*60,
// cache: 10000,
allowList: ['127.0.0.2'], // global allowList access ( ACL based on the key from the keyGenerator)
redis, // connection to redis
skipOnError: false // default false
// keyGenerator: function(req) { /* ... */ }, // default (req) => req.raw.ip
})
fastify.get('/', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... root' })
})
fastify.get('/private', {
config: {
rateLimit: {
max: 3,
allowList: ['127.0.2.1', '127.0.3.1'],
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... private' })
})
fastify.get('/public', (_req, reply) => {
reply.send({ hello: 'from ... public' })
})
fastify.get('/public/sub-rated-1', {
config: {
rateLimit: {
timeWindow: '1 minute',
allowList: ['127.0.2.1'],
onExceeding: function () {
console.log('callback on exceededing ... executed before response to client. req is give as argument')
},
onExceeded: function () {
console.log('callback on exceeded ... to black ip in security group for example, req is give as argument')
}
}
}
}, (_req, reply) => {
reply.send({ hello: 'from sub-rated-1 ... using default max value ... ' })
})
fastify.get('/public/sub-rated-2', {
config: {
rateLimit: {
max: 3,
timeWindow: '1 minute',
onExceeding: function () {
console.log('callback on exceededing ... executed before response to client. req is give as argument')
},
onExceeded: function () {
console.log('callback on exceeded ... to black ip in security group for example, req is give as argument')
}
}
}
}, (_req, reply) => {
reply.send({ hello: 'from ... sub-rated-2' })
})
fastify.get('/home', {
config: {
rateLimit: {
max: 200,
timeWindow: '1 minute'
}
}
}, (_req, reply) => {
reply.send({ hello: 'toto' })
})
fastify.get('/customerrormessage', {
config: {
rateLimit: {
max: 2,
timeWindow: '1 minute',
errorResponseBuilder: (_req, context) => ({ code: 429, timeWindow: context.after, limit: context.max })
}
}
}, (_req, reply) => {
reply.send({ hello: 'toto' })
})
fastify.listen({ port: 3000 }, err => {
if (err) throw err
console.log('Server listening at http://localhost:3000')
})

342
backend/node_modules/@fastify/rate-limit/index.js generated vendored Normal file
View File

@@ -0,0 +1,342 @@
'use strict'
const fp = require('fastify-plugin')
const { parse, format } = require('@lukeed/ms')
const LocalStore = require('./store/LocalStore')
const RedisStore = require('./store/RedisStore')
const defaultMax = 1000
const defaultTimeWindow = 60000
const defaultHook = 'onRequest'
const defaultHeaders = {
rateLimit: 'x-ratelimit-limit',
rateRemaining: 'x-ratelimit-remaining',
rateReset: 'x-ratelimit-reset',
retryAfter: 'retry-after'
}
const draftSpecHeaders = {
rateLimit: 'ratelimit-limit',
rateRemaining: 'ratelimit-remaining',
rateReset: 'ratelimit-reset',
retryAfter: 'retry-after'
}
const defaultOnFn = () => {}
const defaultKeyGenerator = (req) => req.ip
const defaultErrorResponse = (_req, context) => {
const err = new Error(`Rate limit exceeded, retry in ${context.after}`)
err.statusCode = context.statusCode
return err
}
async function fastifyRateLimit (fastify, settings) {
const globalParams = {
global: (typeof settings.global === 'boolean') ? settings.global : true
}
if (typeof settings.enableDraftSpec === 'boolean' && settings.enableDraftSpec) {
globalParams.enableDraftSpec = true
globalParams.labels = draftSpecHeaders
} else {
globalParams.enableDraftSpec = false
globalParams.labels = defaultHeaders
}
globalParams.addHeaders = Object.assign({
[globalParams.labels.rateLimit]: true,
[globalParams.labels.rateRemaining]: true,
[globalParams.labels.rateReset]: true,
[globalParams.labels.retryAfter]: true
}, settings.addHeaders)
globalParams.addHeadersOnExceeding = Object.assign({
[globalParams.labels.rateLimit]: true,
[globalParams.labels.rateRemaining]: true,
[globalParams.labels.rateReset]: true
}, settings.addHeadersOnExceeding)
// Global maximum allowed requests
if (Number.isFinite(settings.max) && settings.max >= 0) {
globalParams.max = Math.trunc(settings.max)
} else if (
typeof settings.max === 'function'
) {
globalParams.max = settings.max
} else {
globalParams.max = defaultMax
}
// Global time window
if (Number.isFinite(settings.timeWindow) && settings.timeWindow >= 0) {
globalParams.timeWindow = Math.trunc(settings.timeWindow)
} else if (typeof settings.timeWindow === 'string') {
globalParams.timeWindow = parse(settings.timeWindow)
} else if (
typeof settings.timeWindow === 'function'
) {
globalParams.timeWindow = settings.timeWindow
} else {
globalParams.timeWindow = defaultTimeWindow
}
globalParams.hook = settings.hook || defaultHook
globalParams.allowList = settings.allowList || settings.whitelist || null
globalParams.ban = Number.isFinite(settings.ban) && settings.ban >= 0 ? Math.trunc(settings.ban) : -1
globalParams.onBanReach = typeof settings.onBanReach === 'function' ? settings.onBanReach : defaultOnFn
globalParams.onExceeding = typeof settings.onExceeding === 'function' ? settings.onExceeding : defaultOnFn
globalParams.onExceeded = typeof settings.onExceeded === 'function' ? settings.onExceeded : defaultOnFn
globalParams.continueExceeding = typeof settings.continueExceeding === 'boolean' ? settings.continueExceeding : false
globalParams.exponentialBackoff = typeof settings.exponentialBackoff === 'boolean' ? settings.exponentialBackoff : false
globalParams.keyGenerator = typeof settings.keyGenerator === 'function'
? settings.keyGenerator
: defaultKeyGenerator
if (typeof settings.errorResponseBuilder === 'function') {
globalParams.errorResponseBuilder = settings.errorResponseBuilder
globalParams.isCustomErrorMessage = true
} else {
globalParams.errorResponseBuilder = defaultErrorResponse
globalParams.isCustomErrorMessage = false
}
globalParams.skipOnError = typeof settings.skipOnError === 'boolean' ? settings.skipOnError : false
const pluginComponent = {
rateLimitRan: Symbol('fastify.request.rateLimitRan'),
store: null
}
if (settings.store) {
const Store = settings.store
pluginComponent.store = new Store(globalParams)
} else {
if (settings.redis) {
pluginComponent.store = new RedisStore(globalParams.continueExceeding, globalParams.exponentialBackoff, settings.redis, settings.nameSpace)
} else {
pluginComponent.store = new LocalStore(globalParams.continueExceeding, globalParams.exponentialBackoff, settings.cache)
}
}
fastify.decorateRequest(pluginComponent.rateLimitRan, false)
if (!fastify.hasDecorator('createRateLimit')) {
fastify.decorate('createRateLimit', (options) => {
const args = createLimiterArgs(pluginComponent, globalParams, options)
return (req) => applyRateLimit.apply(this, args.concat(req))
})
}
if (!fastify.hasDecorator('rateLimit')) {
fastify.decorate('rateLimit', (options) => {
const args = createLimiterArgs(pluginComponent, globalParams, options)
return rateLimitRequestHandler(...args)
})
}
fastify.addHook('onRoute', (routeOptions) => {
if (routeOptions.config?.rateLimit != null) {
if (typeof routeOptions.config.rateLimit === 'object') {
const newPluginComponent = Object.create(pluginComponent)
const mergedRateLimitParams = mergeParams(globalParams, routeOptions.config.rateLimit, { routeInfo: routeOptions })
newPluginComponent.store = pluginComponent.store.child(mergedRateLimitParams)
addRouteRateHook(newPluginComponent, mergedRateLimitParams, routeOptions)
} else if (routeOptions.config.rateLimit !== false) {
throw new Error('Unknown value for route rate-limit configuration')
}
} else if (globalParams.global) {
// As the endpoint does not have a custom configuration, use the global one
addRouteRateHook(pluginComponent, globalParams, routeOptions)
}
})
}
function mergeParams (...params) {
const result = Object.assign({}, ...params)
if (Number.isFinite(result.timeWindow) && result.timeWindow >= 0) {
result.timeWindow = Math.trunc(result.timeWindow)
} else if (typeof result.timeWindow === 'string') {
result.timeWindow = parse(result.timeWindow)
} else if (typeof result.timeWindow !== 'function') {
result.timeWindow = defaultTimeWindow
}
if (Number.isFinite(result.max) && result.max >= 0) {
result.max = Math.trunc(result.max)
} else if (typeof result.max !== 'function') {
result.max = defaultMax
}
if (Number.isFinite(result.ban) && result.ban >= 0) {
result.ban = Math.trunc(result.ban)
} else {
result.ban = -1
}
if (result.groupId !== undefined && typeof result.groupId !== 'string') {
throw new Error('groupId must be a string')
}
return result
}
function createLimiterArgs (pluginComponent, globalParams, options) {
if (typeof options === 'object') {
const newPluginComponent = Object.create(pluginComponent)
const mergedRateLimitParams = mergeParams(globalParams, options, { routeInfo: {} })
newPluginComponent.store = newPluginComponent.store.child(mergedRateLimitParams)
return [newPluginComponent, mergedRateLimitParams]
}
return [pluginComponent, globalParams]
}
function addRouteRateHook (pluginComponent, params, routeOptions) {
const hook = params.hook
const hookHandler = rateLimitRequestHandler(pluginComponent, params)
if (Array.isArray(routeOptions[hook])) {
routeOptions[hook].push(hookHandler)
} else if (typeof routeOptions[hook] === 'function') {
routeOptions[hook] = [routeOptions[hook], hookHandler]
} else {
routeOptions[hook] = [hookHandler]
}
}
async function applyRateLimit (pluginComponent, params, req) {
const { store } = pluginComponent
// Retrieve the key from the generator (the global one or the one defined in the endpoint)
let key = await params.keyGenerator(req)
const groupId = req.routeOptions.config?.rateLimit?.groupId
if (groupId) {
key += groupId
}
// Don't apply any rate limiting if in the allow list
if (params.allowList) {
if (typeof params.allowList === 'function') {
if (await params.allowList(req, key)) {
return {
isAllowed: true,
key
}
}
} else if (params.allowList.indexOf(key) !== -1) {
return {
isAllowed: true,
key
}
}
}
const max = typeof params.max === 'number' ? params.max : await params.max(req, key)
const timeWindow = typeof params.timeWindow === 'number' ? params.timeWindow : await params.timeWindow(req, key)
let current = 0
let ttl = 0
let ttlInSeconds = 0
// We increment the rate limit for the current request
try {
const res = await new Promise((resolve, reject) => {
store.incr(key, (err, res) => {
err ? reject(err) : resolve(res)
}, timeWindow, max)
})
current = res.current
ttl = res.ttl
ttlInSeconds = Math.ceil(res.ttl / 1000)
} catch (err) {
if (!params.skipOnError) {
throw err
}
}
return {
isAllowed: false,
key,
max,
timeWindow,
remaining: Math.max(0, max - current),
ttl,
ttlInSeconds,
isExceeded: current > max,
isBanned: params.ban !== -1 && current - max > params.ban
}
}
function rateLimitRequestHandler (pluginComponent, params) {
const { rateLimitRan } = pluginComponent
return async (req, res) => {
if (req[rateLimitRan]) {
return
}
req[rateLimitRan] = true
const rateLimit = await applyRateLimit(pluginComponent, params, req)
if (rateLimit.isAllowed) {
return
}
const {
key,
max,
remaining,
ttl,
ttlInSeconds,
isExceeded,
isBanned
} = rateLimit
if (!isExceeded) {
if (params.addHeadersOnExceeding[params.labels.rateLimit]) { res.header(params.labels.rateLimit, max) }
if (params.addHeadersOnExceeding[params.labels.rateRemaining]) { res.header(params.labels.rateRemaining, remaining) }
if (params.addHeadersOnExceeding[params.labels.rateReset]) { res.header(params.labels.rateReset, ttlInSeconds) }
params.onExceeding(req, key)
return
}
params.onExceeded(req, key)
if (params.addHeaders[params.labels.rateLimit]) { res.header(params.labels.rateLimit, max) }
if (params.addHeaders[params.labels.rateRemaining]) { res.header(params.labels.rateRemaining, 0) }
if (params.addHeaders[params.labels.rateReset]) { res.header(params.labels.rateReset, ttlInSeconds) }
if (params.addHeaders[params.labels.retryAfter]) { res.header(params.labels.retryAfter, ttlInSeconds) }
const respCtx = {
statusCode: 429,
ban: false,
max,
ttl,
after: format(ttlInSeconds * 1000, true)
}
if (isBanned) {
respCtx.statusCode = 403
respCtx.ban = true
params.onBanReach(req, key)
}
throw params.errorResponseBuilder(req, respCtx)
}
}
module.exports = fp(fastifyRateLimit, {
fastify: '5.x',
name: '@fastify/rate-limit'
})
module.exports.default = fastifyRateLimit
module.exports.fastifyRateLimit = fastifyRateLimit

86
backend/node_modules/@fastify/rate-limit/package.json generated vendored Normal file
View File

@@ -0,0 +1,86 @@
{
"name": "@fastify/rate-limit",
"version": "10.3.0",
"description": "A low overhead rate limiter for your routes",
"main": "index.js",
"type": "commonjs",
"types": "types/index.d.ts",
"scripts": {
"lint": "eslint",
"lint:fix": "eslint --fix",
"redis": "docker run -p 6379:6379 --name rate-limit-redis -d --rm redis",
"test": "npm run test:unit && npm run test:typescript",
"test:unit": "c8 --100 node --test",
"test:typescript": "tsd"
},
"repository": {
"type": "git",
"url": "git+https://github.com/fastify/fastify-rate-limit.git"
},
"keywords": [
"fastify",
"rate",
"limit"
],
"author": "Tomas Della Vedova - @delvedor (http://delved.org)",
"contributors": [
{
"name": "Matteo Collina",
"email": "hello@matteocollina.com"
},
{
"name": "Manuel Spigolon",
"email": "behemoth89@gmail.com"
},
{
"name": "Gürgün Dayıoğlu",
"email": "hey@gurgun.day",
"url": "https://heyhey.to/G"
},
{
"name": "Frazer Smith",
"email": "frazer.dev@icloud.com",
"url": "https://github.com/fdawgs"
}
],
"license": "MIT",
"bugs": {
"url": "https://github.com/fastify/fastify-rate-limit/issues"
},
"homepage": "https://github.com/fastify/fastify-rate-limit#readme",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"devDependencies": {
"@fastify/pre-commit": "^2.1.0",
"@sinonjs/fake-timers": "^14.0.0",
"@types/node": "^22.0.0",
"c8": "^10.1.2",
"eslint": "^9.17.0",
"fastify": "^5.0.0",
"ioredis": "^5.4.1",
"knex": "^3.1.0",
"neostandard": "^0.12.0",
"sqlite3": "^5.1.7",
"tsd": "^0.32.0"
},
"dependencies": {
"@lukeed/ms": "^2.0.2",
"fastify-plugin": "^5.0.0",
"toad-cache": "^3.7.0"
},
"publishConfig": {
"access": "public"
},
"pre-commit": [
"lint",
"test"
]
}

View File

@@ -0,0 +1,50 @@
'use strict'
const { LruMap: Lru } = require('toad-cache')
function LocalStore (continueExceeding, exponentialBackoff, cache = 5000) {
this.continueExceeding = continueExceeding
this.exponentialBackoff = exponentialBackoff
this.lru = new Lru(cache)
}
LocalStore.prototype.incr = function (ip, cb, timeWindow, max) {
const nowInMs = Date.now()
let current = this.lru.get(ip)
if (!current) {
// Item doesn't exist
current = { current: 1, ttl: timeWindow, iterationStartMs: nowInMs }
} else if (current.iterationStartMs + timeWindow <= nowInMs) {
// Item has expired
current.current = 1
current.ttl = timeWindow
current.iterationStartMs = nowInMs
} else {
// Item is alive
++current.current
// Reset TLL if max has been exceeded and `continueExceeding` is enabled
if (this.continueExceeding && current.current > max) {
current.ttl = timeWindow
current.iterationStartMs = nowInMs
} else if (this.exponentialBackoff && current.current > max) {
// Handle exponential backoff
const backoffExponent = current.current - max - 1
const ttl = timeWindow * (2 ** backoffExponent)
current.ttl = Number.isSafeInteger(ttl) ? ttl : Number.MAX_SAFE_INTEGER
current.iterationStartMs = nowInMs
} else {
current.ttl = timeWindow - (nowInMs - current.iterationStartMs)
}
}
this.lru.set(ip, current)
cb(null, current)
}
LocalStore.prototype.child = function (routeOptions) {
return new LocalStore(routeOptions.continueExceeding, routeOptions.exponentialBackoff, routeOptions.cache)
}
module.exports = LocalStore

View File

@@ -0,0 +1,58 @@
'use strict'
const lua = `
-- Key to operate on
local key = KEYS[1]
-- Time window for the TTL
local timeWindow = tonumber(ARGV[1])
-- Max requests
local max = tonumber(ARGV[2])
-- Flag to determine if TTL should be reset after exceeding
local continueExceeding = ARGV[3] == 'true'
--Flag to determine if exponential backoff should be applied
local exponentialBackoff = ARGV[4] == 'true'
--Max safe integer
local MAX_SAFE_INTEGER = (2^53) - 1
-- Increment the key's value
local current = redis.call('INCR', key)
if current == 1 or (continueExceeding and current > max) then
redis.call('PEXPIRE', key, timeWindow)
elseif exponentialBackoff and current > max then
local backoffExponent = current - max - 1
timeWindow = math.min(timeWindow * (2 ^ backoffExponent), MAX_SAFE_INTEGER)
redis.call('PEXPIRE', key, timeWindow)
else
timeWindow = redis.call('PTTL', key)
end
return {current, timeWindow}
`
function RedisStore (continueExceeding, exponentialBackoff, redis, key = 'fastify-rate-limit-') {
this.continueExceeding = continueExceeding
this.exponentialBackoff = exponentialBackoff
this.redis = redis
this.key = key
if (!this.redis.rateLimit) {
this.redis.defineCommand('rateLimit', {
numberOfKeys: 1,
lua
})
}
}
RedisStore.prototype.incr = function (ip, cb, timeWindow, max) {
this.redis.rateLimit(this.key + ip, timeWindow, max, this.continueExceeding, this.exponentialBackoff, (err, result) => {
err ? cb(err, null) : cb(null, { current: result[0], ttl: result[1] })
})
}
RedisStore.prototype.child = function (routeOptions) {
return new RedisStore(routeOptions.continueExceeding, routeOptions.exponentialBackoff, this.redis, `${this.key}${routeOptions.routeInfo.method}${routeOptions.routeInfo.url}-`)
}
module.exports = RedisStore

View File

@@ -0,0 +1,224 @@
'use strict'
const { test, mock } = require('node:test')
const Fastify = require('fastify')
const rateLimit = require('../index')
test('With global rate limit options', async t => {
t.plan(8)
const clock = mock.timers
clock.enable(0)
const fastify = Fastify()
await fastify.register(rateLimit, {
global: false,
max: 2,
timeWindow: 1000
})
const checkRateLimit = fastify.createRateLimit()
fastify.get('/', async (req, reply) => {
const limit = await checkRateLimit(req)
return limit
})
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 1,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: false,
isBanned: false
})
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 0,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: false,
isBanned: false
})
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 0,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: true,
isBanned: false
})
clock.tick(1100)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 1,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: false,
isBanned: false
})
clock.reset()
})
test('With custom rate limit options', async t => {
t.plan(10)
const clock = mock.timers
clock.enable(0)
const fastify = Fastify()
await fastify.register(rateLimit, {
global: false,
max: 5,
timeWindow: 1000
})
const checkRateLimit = fastify.createRateLimit({
max: 2,
timeWindow: 1000,
ban: 1
})
fastify.get('/', async (req, reply) => {
const limit = await checkRateLimit(req)
return limit
})
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 1,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: false,
isBanned: false
})
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 0,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: false,
isBanned: false
})
// should be exceeded now
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 0,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: true,
isBanned: false
})
// should be banned now
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 0,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: true,
isBanned: true
})
clock.tick(1100)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.json(), {
isAllowed: false,
key: '127.0.0.1',
max: 2,
timeWindow: 1000,
remaining: 1,
ttl: 1000,
ttlInSeconds: 1,
isExceeded: false,
isBanned: false
})
clock.reset()
})
test('With allow list', async t => {
t.plan(2)
const clock = mock.timers
clock.enable(0)
const fastify = Fastify()
await fastify.register(rateLimit, {
global: false,
max: 5,
timeWindow: 1000
})
const checkRateLimit = fastify.createRateLimit({
allowList: ['127.0.0.1'],
max: 2,
timeWindow: 1000
})
fastify.get('/', async (req, reply) => {
const limit = await checkRateLimit(req)
return limit
})
const res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
// expect a different return type because isAllowed is true
t.assert.deepStrictEqual(res.json(), {
isAllowed: true,
key: '127.0.0.1'
})
})

View File

@@ -0,0 +1,232 @@
'use strict'
const { test } = require('node:test')
const assert = require('node:assert')
const Fastify = require('fastify')
const rateLimit = require('../index')
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
test('Exponential Backoff', async () => {
const fastify = Fastify()
// Register rate limit plugin with exponentialBackoff set to true in routeConfig
await fastify.register(rateLimit, { max: 2, timeWindow: 500 })
fastify.get(
'/expoential-backoff',
{
config: {
rateLimit: {
max: 2,
timeWindow: 500,
exponentialBackoff: true
}
}
},
async () => 'exponential backoff applied!'
)
// Test
const res = await fastify.inject({ url: '/expoential-backoff', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
const res2 = await fastify.inject({ url: '/expoential-backoff', method: 'GET' })
assert.deepStrictEqual(res2.statusCode, 200)
assert.deepStrictEqual(res2.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res2.headers['x-ratelimit-remaining'], '0')
const res3 = await fastify.inject({ url: '/expoential-backoff', method: 'GET' })
assert.deepStrictEqual(res3.statusCode, 429)
assert.deepStrictEqual(res3.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res3.headers['x-ratelimit-remaining'], '0')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res3.payload)
)
const res4 = await fastify.inject({ url: '/expoential-backoff', method: 'GET' })
assert.deepStrictEqual(res4.statusCode, 429)
assert.deepStrictEqual(res4.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res4.headers['x-ratelimit-remaining'], '0')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res4.payload)
)
// Wait for the window to reset
await sleep(1000)
const res5 = await fastify.inject({ url: '/expoential-backoff', method: 'GET' })
assert.deepStrictEqual(res5.statusCode, 200)
assert.deepStrictEqual(res5.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res5.headers['x-ratelimit-remaining'], '1')
})
test('Global Exponential Backoff', async () => {
const fastify = Fastify()
// Register rate limit plugin with exponentialBackoff set to true in routeConfig
await fastify.register(rateLimit, { max: 2, timeWindow: 500, exponentialBackoff: true })
fastify.get(
'/expoential-backoff-global',
{
config: {
rateLimit: {
max: 2,
timeWindow: 500
}
}
},
async () => 'exponential backoff applied!'
)
// Test
let res
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 2 seconds'
},
JSON.parse(res.payload)
)
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 4 seconds'
},
JSON.parse(res.payload)
)
})
test('MAx safe Exponential Backoff', async () => {
const fastify = Fastify()
// Register rate limit plugin with exponentialBackoff set to true in routeConfig
await fastify.register(rateLimit, { max: 2, timeWindow: 500, exponentialBackoff: true })
fastify.get(
'/expoential-backoff-global',
{
config: {
rateLimit: {
max: 2,
timeWindow: '285421 years'
}
}
},
async () => 'exponential backoff applied!'
)
// Test
let res
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 285421 years'
},
JSON.parse(res.payload)
)
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 285421 years'
},
JSON.parse(res.payload)
)
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 285421 years'
},
JSON.parse(res.payload)
)
res = await fastify.inject({ url: '/expoential-backoff-global', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 285421 years'
},
JSON.parse(res.payload)
)
})

View File

@@ -0,0 +1,120 @@
'use strict'
const { test, mock } = require('node:test')
const Fastify = require('fastify')
const rateLimit = require('../../index')
test('issue #207 - when continueExceeding is true and the store is local then it should reset the rate-limit', async (t) => {
const clock = mock.timers
clock.enable()
const fastify = Fastify()
await fastify.register(rateLimit, {
global: false
})
fastify.get(
'/',
{
config: {
rateLimit: {
max: 1,
timeWindow: 5000,
continueExceeding: true
}
}
},
async () => {
return 'hello!'
}
)
const firstOkResponse = await fastify.inject({
url: '/',
method: 'GET'
})
const firstRateLimitResponse = await fastify.inject({
url: '/',
method: 'GET'
})
clock.tick(3000)
const secondRateLimitWithResettingTheRateLimitTimer = await fastify.inject({
url: '/',
method: 'GET'
})
// after this the total time passed is 6s which WITHOUT `continueExceeding` the next request should be OK
clock.tick(3000)
const thirdRateLimitWithResettingTheRateLimitTimer = await fastify.inject({
url: '/',
method: 'GET'
})
// After this the rate limiter should allow for new requests
clock.tick(5000)
const okResponseAfterRateLimitCompleted = await fastify.inject({
url: '/',
method: 'GET'
})
t.assert.deepStrictEqual(firstOkResponse.statusCode, 200)
t.assert.deepStrictEqual(firstRateLimitResponse.statusCode, 429)
t.assert.deepStrictEqual(
firstRateLimitResponse.headers['x-ratelimit-limit'],
'1'
)
t.assert.deepStrictEqual(
firstRateLimitResponse.headers['x-ratelimit-remaining'],
'0'
)
t.assert.deepStrictEqual(
firstRateLimitResponse.headers['x-ratelimit-reset'],
'5'
)
t.assert.deepStrictEqual(
secondRateLimitWithResettingTheRateLimitTimer.statusCode,
429
)
t.assert.deepStrictEqual(
secondRateLimitWithResettingTheRateLimitTimer.headers['x-ratelimit-limit'],
'1'
)
t.assert.deepStrictEqual(
secondRateLimitWithResettingTheRateLimitTimer.headers[
'x-ratelimit-remaining'
],
'0'
)
t.assert.deepStrictEqual(
secondRateLimitWithResettingTheRateLimitTimer.headers['x-ratelimit-reset'],
'5'
)
t.assert.deepStrictEqual(
thirdRateLimitWithResettingTheRateLimitTimer.statusCode,
429
)
t.assert.deepStrictEqual(
thirdRateLimitWithResettingTheRateLimitTimer.headers['x-ratelimit-limit'],
'1'
)
t.assert.deepStrictEqual(
thirdRateLimitWithResettingTheRateLimitTimer.headers[
'x-ratelimit-remaining'
],
'0'
)
t.assert.deepStrictEqual(
thirdRateLimitWithResettingTheRateLimitTimer.headers['x-ratelimit-reset'],
'5'
)
t.assert.deepStrictEqual(okResponseAfterRateLimitCompleted.statusCode, 200)
clock.reset(0)
})

View File

@@ -0,0 +1,87 @@
'use strict'
const { test, mock } = require('node:test')
const Fastify = require('fastify')
const rateLimit = require('../../index')
test('issue #215 - when using local store, 2nd user should not be rate limited when the time window is passed for the 1st user', async (t) => {
t.plan(5)
const clock = mock.timers
clock.enable()
const fastify = Fastify()
await fastify.register(rateLimit, {
global: false
})
fastify.get(
'/',
{
config: {
rateLimit: {
max: 1,
timeWindow: 5000,
continueExceeding: false
}
}
},
async () => 'hello!'
)
const user1FirstRequest = await fastify.inject({
url: '/',
method: 'GET',
remoteAddress: '1.1.1.1'
})
// Waiting for the time to pass to make the 2nd user start in a different start point
clock.tick(3000)
const user2FirstRequest = await fastify.inject({
url: '/',
method: 'GET',
remoteAddress: '2.2.2.2'
})
const user2SecondRequestAndShouldBeRateLimited = await fastify.inject({
url: '/',
method: 'GET',
remoteAddress: '2.2.2.2'
})
// After this the total time passed for the 1st user is 6s and for the 2nd user only 3s
clock.tick(3000)
const user2ThirdRequestAndShouldStillBeRateLimited = await fastify.inject({
url: '/',
method: 'GET',
remoteAddress: '2.2.2.2'
})
// After this the total time passed for the 2nd user is 5.1s - he should not be rate limited
clock.tick(2100)
const user2OkResponseAfterRateLimitCompleted = await fastify.inject({
url: '/',
method: 'GET',
remoteAddress: '2.2.2.2'
})
t.assert.deepStrictEqual(user1FirstRequest.statusCode, 200)
t.assert.deepStrictEqual(user2FirstRequest.statusCode, 200)
t.assert.deepStrictEqual(
user2SecondRequestAndShouldBeRateLimited.statusCode,
429
)
t.assert.deepStrictEqual(
user2ThirdRequestAndShouldStillBeRateLimited.statusCode,
429
)
t.assert.deepStrictEqual(
user2OkResponseAfterRateLimitCompleted.statusCode,
200
)
clock.reset()
})

View File

@@ -0,0 +1,74 @@
'use strict'
const { test, mock } = require('node:test')
const Fastify = require('fastify')
const rateLimit = require('../../index')
test("issue #284 - don't set the reply code automatically", async (t) => {
const clock = mock.timers
clock.enable()
const fastify = Fastify()
await fastify.register(rateLimit, {
global: false
})
fastify.setErrorHandler((err, _req, res) => {
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(err.statusCode, 429)
res.redirect('/')
})
fastify.get(
'/',
{
config: {
rateLimit: {
max: 1,
timeWindow: 5000,
continueExceeding: true
}
}
},
async () => {
return 'hello!'
}
)
const firstOkResponse = await fastify.inject({
url: '/',
method: 'GET'
})
const firstRateLimitResponse = await fastify.inject({
url: '/',
method: 'GET'
})
// After this the rate limiter should allow for new requests
clock.tick(5000)
const okResponseAfterRateLimitCompleted = await fastify.inject({
url: '/',
method: 'GET'
})
t.assert.deepStrictEqual(firstOkResponse.statusCode, 200)
t.assert.deepStrictEqual(firstRateLimitResponse.statusCode, 302)
t.assert.deepStrictEqual(
firstRateLimitResponse.headers['x-ratelimit-limit'],
'1'
)
t.assert.deepStrictEqual(
firstRateLimitResponse.headers['x-ratelimit-remaining'],
'0'
)
t.assert.deepStrictEqual(
firstRateLimitResponse.headers['x-ratelimit-reset'],
'5'
)
t.assert.deepStrictEqual(okResponseAfterRateLimitCompleted.statusCode, 200)
clock.reset(0)
})

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,252 @@
'use strict'
const { test } = require('node:test')
const assert = require('node:assert')
const Fastify = require('fastify')
const rateLimit = require('../index')
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
test('GroupId from routeConfig', async () => {
const fastify = Fastify()
// Register rate limit plugin with groupId in routeConfig
await fastify.register(rateLimit, { max: 2, timeWindow: 500 })
fastify.get(
'/routeWithGroupId',
{
config: {
rateLimit: {
max: 2,
timeWindow: 500,
groupId: 'group1' // groupId specified in routeConfig
}
}
},
async () => 'hello from route with groupId!'
)
// Test: Request should have the correct groupId in response
const res = await fastify.inject({ url: '/routeWithGroupId', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
})
test('GroupId from routeOptions', async () => {
const fastify = Fastify()
// Register rate limit plugin with groupId in routeOptions
await fastify.register(rateLimit, { max: 2, timeWindow: 500 })
fastify.get(
'/routeWithGroupIdFromOptions',
{
config: {
rateLimit: {
max: 2,
timeWindow: 500
// groupId not specified here
}
}
},
async () => 'hello from route with groupId from options!'
)
// Test: Request should have the correct groupId from routeOptions
const res = await fastify.inject({ url: '/routeWithGroupIdFromOptions', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
})
test('No groupId provided', async () => {
const fastify = Fastify()
// Register rate limit plugin without groupId
await fastify.register(rateLimit, { max: 2, timeWindow: 500 })
// Route without groupId
fastify.get(
'/noGroupId',
{
config: {
rateLimit: {
max: 2,
timeWindow: 500
}
}
},
async () => 'hello from no groupId route!'
)
let res
// Test without groupId
res = await fastify.inject({ url: '/noGroupId', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
res = await fastify.inject({ url: '/noGroupId', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
res = await fastify.inject({ url: '/noGroupId', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
assert.deepStrictEqual(res.headers['retry-after'], '1')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
})
test('With multiple routes and custom groupId', async () => {
const fastify = Fastify()
// Register rate limit plugin
await fastify.register(rateLimit, { max: 2, timeWindow: 500 })
// Route 1 with groupId 'group1'
fastify.get(
'/route1',
{
config: {
rateLimit: {
max: 2,
timeWindow: 500,
groupId: 'group1'
}
}
},
async () => 'hello from route 1!'
)
// Route 2 with groupId 'group2'
fastify.get(
'/route2',
{
config: {
rateLimit: {
max: 2,
timeWindow: 1000,
groupId: 'group2'
}
}
},
async () => 'hello from route 2!'
)
let res
// Test Route 1
res = await fastify.inject({ url: '/route1', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
res = await fastify.inject({ url: '/route1', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
res = await fastify.inject({ url: '/route1', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
assert.deepStrictEqual(res.headers['retry-after'], '1')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
// Test Route 2
res = await fastify.inject({ url: '/route2', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
res = await fastify.inject({ url: '/route2', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
res = await fastify.inject({ url: '/route2', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 429)
assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
assert.deepStrictEqual(res.headers['retry-after'], '1')
assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
// Wait for the window to reset
await sleep(1000)
// After reset, Route 1 should succeed again
res = await fastify.inject({ url: '/route1', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
// Route 2 should also succeed after the reset
res = await fastify.inject({ url: '/route2', method: 'GET' })
assert.deepStrictEqual(res.statusCode, 200)
assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
})
test('Invalid groupId type', async () => {
const fastify = Fastify()
// Register rate limit plugin with a route having an invalid groupId
await fastify.register(rateLimit, { max: 2, timeWindow: 1000 })
try {
fastify.get(
'/invalidGroupId',
{
config: {
rateLimit: {
max: 2,
timeWindow: 1000,
groupId: 123 // Invalid groupId type
}
}
},
async () => 'hello with invalid groupId!'
)
assert.fail('should throw')
console.log('HER')
} catch (err) {
assert.deepStrictEqual(err.message, 'groupId must be a string')
}
})

View File

@@ -0,0 +1,18 @@
'use strict'
const { test } = require('node:test')
const Fastify = require('fastify')
const rateLimit = require('../index')
test('Fastify close on local store', async (t) => {
t.plan(1)
const fastify = Fastify()
await fastify.register(rateLimit, { max: 2, timeWindow: 1000 })
let counter = 1
fastify.addHook('onClose', (_instance, done) => {
counter++
done()
})
await fastify.close()
t.assert.deepStrictEqual(counter, 2)
})

View File

@@ -0,0 +1,116 @@
'use strict'
const { test } = require('node:test')
const Fastify = require('fastify')
const rateLimit = require('../index')
test('Set not found handler can be rate limited', async (t) => {
t.plan(18)
const fastify = Fastify()
await fastify.register(rateLimit, { max: 2, timeWindow: 1000 })
t.assert.ok(fastify.rateLimit)
fastify.setNotFoundHandler(
{
preHandler: fastify.rateLimit()
},
function (_request, reply) {
t.assert.ok('Error handler has been called')
reply.status(404).send(new Error('Not found'))
}
)
let res
res = await fastify.inject('/not-found')
t.assert.deepStrictEqual(res.statusCode, 404)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject('/not-found')
t.assert.deepStrictEqual(res.statusCode, 404)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject('/not-found')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
t.assert.deepStrictEqual(res.headers['retry-after'], '1')
t.assert.deepStrictEqual(JSON.parse(res.payload), {
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
})
})
test('Set not found handler can be rate limited with specific options', async (t) => {
t.plan(28)
const fastify = Fastify()
await fastify.register(rateLimit, { max: 2, timeWindow: 1000 })
t.assert.ok(fastify.rateLimit)
fastify.setNotFoundHandler(
{
preHandler: fastify.rateLimit({
max: 4,
timeWindow: 2000
})
},
function (_request, reply) {
t.assert.ok('Error handler has been called')
reply.status(404).send(new Error('Not found'))
}
)
let res
res = await fastify.inject('/not-found')
t.assert.deepStrictEqual(res.statusCode, 404)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '4')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '3')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '2')
res = await fastify.inject('/not-found')
t.assert.deepStrictEqual(res.statusCode, 404)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '4')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '2')
res = await fastify.inject('/not-found')
t.assert.deepStrictEqual(res.statusCode, 404)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '4')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '2')
res = await fastify.inject('/not-found')
t.assert.deepStrictEqual(res.statusCode, 404)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '4')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '2')
res = await fastify.inject('/not-found')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '4')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['retry-after'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '2')
t.assert.deepStrictEqual(JSON.parse(res.payload), {
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 2 seconds'
})
})

View File

@@ -0,0 +1,753 @@
'use strict'
const { test, describe } = require('node:test')
const Redis = require('ioredis')
const Fastify = require('fastify')
const rateLimit = require('../index')
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
const REDIS_HOST = '127.0.0.1'
describe('Global rate limit', () => {
test('With redis store', async (t) => {
t.plan(21)
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
max: 2,
timeWindow: 1000,
redis
})
fastify.get('/', async () => 'hello!')
let res
res = await fastify.inject('/')
t.assert.strictEqual(res.statusCode, 200)
t.assert.ok(res)
t.assert.strictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
await sleep(100)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
t.assert.deepStrictEqual(res.headers['retry-after'], '1')
t.assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
// Not using fake timers here as we use an external Redis that would not be effected by this
await sleep(1100)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
await redis.flushall()
await redis.quit()
})
test('With redis store (ban)', async (t) => {
t.plan(19)
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
max: 1,
ban: 1,
timeWindow: 1000,
redis
})
fastify.get('/', async () => 'hello!')
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 403)
t.assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
t.assert.deepStrictEqual(res.headers['retry-after'], '1')
t.assert.deepStrictEqual(
{
statusCode: 403,
error: 'Forbidden',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
// Not using fake timers here as we use an external Redis that would not be effected by this
await sleep(1100)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
await redis.flushall()
await redis.quit()
})
test('Skip on redis error', async (t) => {
t.plan(9)
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
max: 2,
timeWindow: 1000,
redis,
skipOnError: true
})
fastify.get('/', async () => 'hello!')
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
await redis.flushall()
await redis.quit()
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '2')
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '2')
})
test('Throw on redis error', async (t) => {
t.plan(5)
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
max: 2,
timeWindow: 1000,
redis,
skipOnError: false
})
fastify.get('/', async () => 'hello!')
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
await redis.flushall()
await redis.quit()
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 500)
t.assert.deepStrictEqual(
res.body,
'{"statusCode":500,"error":"Internal Server Error","message":"Connection is closed."}'
)
})
test('When continue exceeding is on (Redis)', async (t) => {
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
redis,
max: 1,
timeWindow: 5000,
continueExceeding: true
})
fastify.get('/', async () => 'hello!')
const first = await fastify.inject({
url: '/',
method: 'GET'
})
const second = await fastify.inject({
url: '/',
method: 'GET'
})
t.assert.deepStrictEqual(first.statusCode, 200)
t.assert.deepStrictEqual(second.statusCode, 429)
t.assert.deepStrictEqual(second.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(second.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(second.headers['x-ratelimit-reset'], '5')
await redis.flushall()
await redis.quit()
})
test('Redis with continueExceeding should not always return the timeWindow as ttl', async (t) => {
t.plan(19)
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
max: 2,
timeWindow: 3000,
continueExceeding: true,
redis
})
fastify.get('/', async () => 'hello!')
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '3')
// After this sleep, we should not see `x-ratelimit-reset === 3` anymore
await sleep(1000)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '2')
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '3')
t.assert.deepStrictEqual(res.headers['retry-after'], '3')
t.assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 3 seconds'
},
JSON.parse(res.payload)
)
// Not using fake timers here as we use an external Redis that would not be effected by this
await sleep(1000)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '3')
await redis.flushall()
await redis.quit()
})
test('When use a custom nameSpace', async (t) => {
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
max: 2,
timeWindow: 1000,
redis,
nameSpace: 'my-namespace:',
keyGenerator: (req) => req.headers['x-my-header']
})
fastify.get('/', async () => 'hello!')
const allowListHeader = {
method: 'GET',
url: '/',
headers: {
'x-my-header': 'custom name space'
}
}
let res
res = await fastify.inject(allowListHeader)
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject(allowListHeader)
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject(allowListHeader)
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
t.assert.deepStrictEqual(res.headers['retry-after'], '1')
t.assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
// Not using fake timers here as we use an external Redis that would not be effected by this
await sleep(1100)
res = await fastify.inject(allowListHeader)
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
await redis.flushall()
await redis.quit()
})
test('With redis store and exponential backoff', async (t) => {
t.plan(20)
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
max: 2,
timeWindow: 1000,
redis,
exponentialBackoff: true
})
fastify.get('/', async () => 'hello!')
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
// First attempt over the limit should have the normal timeWindow (1000ms)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(
res.headers['content-type'],
'application/json; charset=utf-8'
)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
t.assert.deepStrictEqual(res.headers['retry-after'], '1')
t.assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
// Second attempt over the limit should have doubled timeWindow (2000ms)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['retry-after'], '2')
t.assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 2 seconds'
},
JSON.parse(res.payload)
)
await redis.flushall()
await redis.quit()
})
})
describe('Route rate limit', () => {
test('With redis store', async t => {
t.plan(19)
const fastify = Fastify()
const redis = new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
global: false,
redis
})
fastify.get('/', {
config: {
rateLimit: {
max: 2,
timeWindow: 1000
},
someOtherPlugin: {
someValue: 1
}
}
}, async () => 'hello!')
let res
res = await fastify.inject('/')
t.assert.strictEqual(res.statusCode, 200)
t.assert.strictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.strictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.strictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject('/')
t.assert.strictEqual(res.statusCode, 200)
t.assert.strictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.strictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.strictEqual(res.headers['x-ratelimit-reset'], '1')
res = await fastify.inject('/')
t.assert.strictEqual(res.statusCode, 429)
t.assert.strictEqual(res.headers['content-type'], 'application/json; charset=utf-8')
t.assert.strictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.strictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.strictEqual(res.headers['x-ratelimit-reset'], '1')
t.assert.strictEqual(res.headers['retry-after'], '1')
t.assert.deepStrictEqual({
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
}, JSON.parse(res.payload))
// Not using fake timers here as we use an external Redis that would not be effected by this
await sleep(1100)
res = await fastify.inject('/')
t.assert.strictEqual(res.statusCode, 200)
t.assert.strictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.strictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.strictEqual(res.headers['x-ratelimit-reset'], '1')
await redis.flushall()
await redis.quit()
})
test('Throw on redis error', async (t) => {
t.plan(6)
const fastify = Fastify()
const redis = new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
redis,
global: false
})
fastify.get(
'/',
{
config: {
rateLimit: {
max: 2,
timeWindow: 1000,
skipOnError: false
}
}
},
async () => 'hello!'
)
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
await redis.flushall()
await redis.quit()
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 500)
t.assert.deepStrictEqual(
res.body,
'{"statusCode":500,"error":"Internal Server Error","message":"Connection is closed."}'
)
})
test('Skip on redis error', async (t) => {
t.plan(9)
const fastify = Fastify()
const redis = new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
redis,
global: false
})
fastify.get(
'/',
{
config: {
rateLimit: {
max: 2,
timeWindow: 1000,
skipOnError: true
}
}
},
async () => 'hello!'
)
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '1')
await redis.flushall()
await redis.quit()
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '2')
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '2')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '2')
})
test('When continue exceeding is on (Redis)', async (t) => {
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
global: false,
redis
})
fastify.get(
'/',
{
config: {
rateLimit: {
timeWindow: 5000,
max: 1,
continueExceeding: true
}
}
},
async () => 'hello!'
)
const first = await fastify.inject({
url: '/',
method: 'GET'
})
const second = await fastify.inject({
url: '/',
method: 'GET'
})
t.assert.deepStrictEqual(first.statusCode, 200)
t.assert.deepStrictEqual(second.statusCode, 429)
t.assert.deepStrictEqual(second.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(second.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(second.headers['x-ratelimit-reset'], '5')
await redis.flushall()
await redis.quit()
})
test('When continue exceeding is off under route (Redis)', async (t) => {
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
global: false,
continueExceeding: true,
redis
})
fastify.get(
'/',
{
config: {
rateLimit: {
timeWindow: 5000,
max: 1,
continueExceeding: false
}
}
},
async () => 'hello!'
)
const first = await fastify.inject({
url: '/',
method: 'GET'
})
const second = await fastify.inject({
url: '/',
method: 'GET'
})
await sleep(2000)
const third = await fastify.inject({
url: '/',
method: 'GET'
})
t.assert.deepStrictEqual(first.statusCode, 200)
t.assert.deepStrictEqual(second.statusCode, 429)
t.assert.deepStrictEqual(second.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(second.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(second.headers['x-ratelimit-reset'], '5')
t.assert.deepStrictEqual(third.statusCode, 429)
t.assert.deepStrictEqual(third.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(third.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(third.headers['x-ratelimit-reset'], '3')
await redis.flushall()
await redis.quit()
})
test('Route-specific exponential backoff with redis store', async (t) => {
t.plan(17)
const fastify = Fastify()
const redis = await new Redis({ host: REDIS_HOST })
await fastify.register(rateLimit, {
global: false,
redis
})
fastify.get('/', {
config: {
rateLimit: {
max: 1,
timeWindow: 1000,
exponentialBackoff: true
}
}
}, async () => 'hello!')
let res
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 200)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['x-ratelimit-reset'], '1')
// First attempt over the limit should have the normal timeWindow (1000ms)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['retry-after'], '1')
t.assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 1 second'
},
JSON.parse(res.payload)
)
// Second attempt over the limit should have doubled timeWindow (2000ms)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(res.headers['x-ratelimit-limit'], '1')
t.assert.deepStrictEqual(res.headers['x-ratelimit-remaining'], '0')
t.assert.deepStrictEqual(res.headers['retry-after'], '2')
t.assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 2 seconds'
},
JSON.parse(res.payload)
)
// Third attempt over the limit should have quadrupled timeWindow (4000ms)
res = await fastify.inject('/')
t.assert.deepStrictEqual(res.statusCode, 429)
t.assert.deepStrictEqual(res.headers['retry-after'], '4')
t.assert.deepStrictEqual(
{
statusCode: 429,
error: 'Too Many Requests',
message: 'Rate limit exceeded, retry in 4 seconds'
},
JSON.parse(res.payload)
)
await redis.flushall()
await redis.quit()
})
})

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,169 @@
/// <reference types='node' />
import {
ContextConfigDefault,
FastifyPluginCallback,
FastifyRequest,
FastifySchema,
preHandlerAsyncHookHandler,
RouteGenericInterface,
RouteOptions
} from 'fastify'
declare module 'fastify' {
interface FastifyInstance<RawServer, RawRequest, RawReply, Logger, TypeProvider> {
createRateLimit(options?: fastifyRateLimit.CreateRateLimitOptions): (req: FastifyRequest) => Promise<
| {
isAllowed: true
key: string
}
| {
isAllowed: false
key: string
max: number
timeWindow: number
remaining: number
ttl: number
ttlInSeconds: number
isExceeded: boolean
isBanned: boolean
}
>
rateLimit<
RouteGeneric extends RouteGenericInterface = RouteGenericInterface,
ContextConfig = ContextConfigDefault,
SchemaCompiler extends FastifySchema = FastifySchema
>(options?: fastifyRateLimit.RateLimitOptions): preHandlerAsyncHookHandler<
RawServer,
RawRequest,
RawReply,
RouteGeneric,
ContextConfig,
SchemaCompiler,
TypeProvider,
Logger
>;
}
interface FastifyContextConfig {
rateLimit?: fastifyRateLimit.RateLimitOptions | false;
}
}
type FastifyRateLimit = FastifyPluginCallback<fastifyRateLimit.RateLimitPluginOptions>
declare namespace fastifyRateLimit {
export interface FastifyRateLimitOptions { }
export interface errorResponseBuilderContext {
statusCode: number;
ban: boolean;
after: string;
max: number;
ttl: number;
}
export interface FastifyRateLimitStoreCtor {
new(options: FastifyRateLimitOptions): FastifyRateLimitStore;
}
export interface FastifyRateLimitStore {
incr(
key: string,
callback: (
error: Error | null,
result?: { current: number; ttl: number }
) => void
): void;
child(
routeOptions: RouteOptions & { path: string; prefix: string }
): FastifyRateLimitStore;
}
interface DefaultAddHeaders {
'x-ratelimit-limit'?: boolean;
'x-ratelimit-remaining'?: boolean;
'x-ratelimit-reset'?: boolean;
'retry-after'?: boolean;
}
interface DraftSpecAddHeaders {
'ratelimit-limit'?: boolean;
'ratelimit-remaining'?: boolean;
'ratelimit-reset'?: boolean;
'retry-after'?: boolean;
}
interface DefaultAddHeadersOnExceeding {
'x-ratelimit-limit'?: boolean;
'x-ratelimit-remaining'?: boolean;
'x-ratelimit-reset'?: boolean;
}
interface DraftSpecAddHeadersOnExceeding {
'ratelimit-limit'?: boolean;
'ratelimit-remaining'?: boolean;
'ratelimit-reset'?: boolean;
}
export interface CreateRateLimitOptions {
store?: FastifyRateLimitStoreCtor;
skipOnError?: boolean;
max?:
| number
| ((req: FastifyRequest, key: string) => number)
| ((req: FastifyRequest, key: string) => Promise<number>);
timeWindow?:
| number
| string
| ((req: FastifyRequest, key: string) => number)
| ((req: FastifyRequest, key: string) => Promise<number>);
/**
* @deprecated Use `allowList` property
*/
whitelist?: string[] | ((req: FastifyRequest, key: string) => boolean);
allowList?: string[] | ((req: FastifyRequest, key: string) => boolean | Promise<boolean>);
keyGenerator?: (req: FastifyRequest) => string | number | Promise<string | number>;
ban?: number;
}
export type RateLimitHook =
| 'onRequest'
| 'preParsing'
| 'preValidation'
| 'preHandler'
export interface RateLimitOptions extends CreateRateLimitOptions {
hook?: RateLimitHook;
cache?: number;
continueExceeding?: boolean;
onBanReach?: (req: FastifyRequest, key: string) => void;
groupId?: string;
errorResponseBuilder?: (
req: FastifyRequest,
context: errorResponseBuilderContext
) => object;
enableDraftSpec?: boolean;
onExceeding?: (req: FastifyRequest, key: string) => void;
onExceeded?: (req: FastifyRequest, key: string) => void;
exponentialBackoff?: boolean;
}
export interface RateLimitPluginOptions extends RateLimitOptions {
global?: boolean;
cache?: number;
redis?: any;
nameSpace?: string;
addHeaders?: DefaultAddHeaders | DraftSpecAddHeaders;
addHeadersOnExceeding?:
| DefaultAddHeadersOnExceeding
| DraftSpecAddHeadersOnExceeding;
}
export const fastifyRateLimit: FastifyRateLimit
export { fastifyRateLimit as default }
}
declare function fastifyRateLimit (...params: Parameters<FastifyRateLimit>): ReturnType<FastifyRateLimit>
export = fastifyRateLimit

View File

@@ -0,0 +1,277 @@
import fastify, {
FastifyInstance,
FastifyRequest,
preHandlerAsyncHookHandler,
RequestGenericInterface,
RouteOptions
} from 'fastify'
import * as http2 from 'node:http2'
import IORedis from 'ioredis'
import pino from 'pino'
import fastifyRateLimit, {
CreateRateLimitOptions,
errorResponseBuilderContext,
FastifyRateLimitOptions,
FastifyRateLimitStore,
RateLimitPluginOptions
} from '..'
import { expectAssignable, expectType } from 'tsd'
class CustomStore implements FastifyRateLimitStore {
options: FastifyRateLimitOptions
constructor (options: FastifyRateLimitOptions) {
this.options = options
}
incr (
_key: string,
_callback: (
error: Error | null,
result?: { current: number; ttl: number }
) => void
) {}
child (_routeOptions: RouteOptions & { path: string; prefix: string }) {
return <CustomStore>(<FastifyRateLimitOptions>{})
}
}
const appWithImplicitHttp = fastify()
const options1: RateLimitPluginOptions = {
global: true,
max: 3,
timeWindow: 5000,
cache: 10000,
allowList: ['127.0.0.1'],
redis: new IORedis({ host: '127.0.0.1' }),
skipOnError: true,
ban: 10,
continueExceeding: false,
keyGenerator: (req: FastifyRequest<RequestGenericInterface>) => req.ip,
groupId: '42',
errorResponseBuilder: (
req: FastifyRequest<RequestGenericInterface>,
context: errorResponseBuilderContext
) => {
if (context.ban) {
return {
statusCode: 403,
error: 'Forbidden',
message: `You can not access this service as you have sent too many requests that exceed your rate limit. Your IP: ${req.ip} and Limit: ${context.max}`,
}
} else {
return {
statusCode: 429,
error: 'Too Many Requests',
message: `You hit the rate limit, please slow down! You can retry in ${context.after}`,
}
}
},
addHeadersOnExceeding: {
'x-ratelimit-limit': false,
'x-ratelimit-remaining': false,
'x-ratelimit-reset': false
},
addHeaders: {
'x-ratelimit-limit': false,
'x-ratelimit-remaining': false,
'x-ratelimit-reset': false,
'retry-after': false
},
onExceeding: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => ({}),
onExceeded: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => ({}),
onBanReach: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => ({})
}
const options2: RateLimitPluginOptions = {
global: true,
max: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => 42,
allowList: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => false,
timeWindow: 5000,
hook: 'preParsing'
}
const options3: RateLimitPluginOptions = {
global: true,
max: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => 42,
timeWindow: 5000,
store: CustomStore,
hook: 'preValidation'
}
const options4: RateLimitPluginOptions = {
global: true,
max: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => Promise.resolve(42),
timeWindow: 5000,
store: CustomStore,
hook: 'preHandler'
}
const options5: RateLimitPluginOptions = {
max: 3,
timeWindow: 5000,
cache: 10000,
redis: new IORedis({ host: '127.0.0.1' }),
nameSpace: 'my-namespace'
}
const options6: RateLimitPluginOptions = {
global: true,
allowList: async (_req, _key) => true,
keyGenerator: async (_req) => '',
timeWindow: 5000,
store: CustomStore,
hook: 'preHandler'
}
const options7: RateLimitPluginOptions = {
global: true,
max: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => 42,
timeWindow: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => 5000,
store: CustomStore,
hook: 'preValidation'
}
const options8: RateLimitPluginOptions = {
global: true,
max: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => 42,
timeWindow: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => Promise.resolve(5000),
store: CustomStore,
hook: 'preValidation'
}
const options9: RateLimitPluginOptions = {
global: true,
max: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => Promise.resolve(42),
timeWindow: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => 5000,
store: CustomStore,
hook: 'preValidation',
exponentialBackoff: true
}
appWithImplicitHttp.register(fastifyRateLimit, options1)
appWithImplicitHttp.register(fastifyRateLimit, options2)
appWithImplicitHttp.register(fastifyRateLimit, options5)
appWithImplicitHttp.register(fastifyRateLimit, options9)
appWithImplicitHttp.register(fastifyRateLimit, options3).then(() => {
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit())
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options1))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options2))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options3))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options4))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options5))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options6))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options7))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options8))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options9))
// The following test is dependent on https://github.com/fastify/fastify/pull/2929
// appWithImplicitHttp.setNotFoundHandler({
// preHandler: appWithImplicitHttp.rateLimit()
// }, function (request:FastifyRequest<RequestGenericInterface>, reply: FastifyReply<ReplyGenericInterface>) {
// reply.status(404).send(new Error('Not found'))
// })
})
appWithImplicitHttp.get('/', { config: { rateLimit: { max: 10, timeWindow: '60s' } } }, () => { return 'limited' })
const appWithHttp2: FastifyInstance<
http2.Http2Server,
http2.Http2ServerRequest,
http2.Http2ServerResponse
> = fastify({ http2: true })
appWithHttp2.register(fastifyRateLimit, options1)
appWithHttp2.register(fastifyRateLimit, options2)
appWithHttp2.register(fastifyRateLimit, options3)
appWithHttp2.register(fastifyRateLimit, options5)
appWithHttp2.register(fastifyRateLimit, options6)
appWithHttp2.register(fastifyRateLimit, options7)
appWithHttp2.register(fastifyRateLimit, options8)
appWithHttp2.register(fastifyRateLimit, options9)
appWithHttp2.get('/public', {
config: {
rateLimit: false
}
}, (_request, reply) => {
reply.send({ hello: 'from ... public' })
})
expectAssignable<errorResponseBuilderContext>({
statusCode: 429,
ban: true,
after: '123',
max: 1000,
ttl: 123
})
const appWithCustomLogger = fastify({
loggerInstance: pino(),
}).withTypeProvider()
appWithCustomLogger.register(fastifyRateLimit, options1)
appWithCustomLogger.route({
method: 'GET',
url: '/',
preHandler: appWithCustomLogger.rateLimit({}),
handler: () => {},
})
const options10: CreateRateLimitOptions = {
store: CustomStore,
skipOnError: true,
max: 0,
timeWindow: 5000,
allowList: ['127.0.0.1'],
keyGenerator: (req: FastifyRequest<RequestGenericInterface>) => req.ip,
ban: 10
}
appWithImplicitHttp.register(fastifyRateLimit, { global: false })
const checkRateLimit = appWithImplicitHttp.createRateLimit(options10)
appWithImplicitHttp.route({
method: 'GET',
url: '/',
handler: async (req, _reply) => {
const limit = await checkRateLimit(req)
expectType<{
isAllowed: true;
key: string;
} | {
isAllowed: false;
key: string;
max: number;
timeWindow: number;
remaining: number;
ttl: number;
ttlInSeconds: number;
isExceeded: boolean;
isBanned: boolean;
}>(limit)
},
})
const options11: CreateRateLimitOptions = {
max: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => 42,
timeWindow: '10s',
allowList: (_req: FastifyRequest<RequestGenericInterface>) => true,
keyGenerator: (_req: FastifyRequest<RequestGenericInterface>) => 42,
}
const options12: CreateRateLimitOptions = {
max: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => Promise.resolve(42),
timeWindow: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => 5000,
allowList: (_req: FastifyRequest<RequestGenericInterface>) => Promise.resolve(true),
keyGenerator: (_req: FastifyRequest<RequestGenericInterface>) => Promise.resolve(42),
}
const options13: CreateRateLimitOptions = {
timeWindow: (_req: FastifyRequest<RequestGenericInterface>, _key: string) => Promise.resolve(5000),
keyGenerator: (_req: FastifyRequest<RequestGenericInterface>) => Promise.resolve('key'),
}
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options11))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options12))
expectType<preHandlerAsyncHookHandler>(appWithImplicitHttp.rateLimit(options13))

View File

@@ -11,6 +11,7 @@
"@fastify/cors": "^11.2.0",
"@fastify/helmet": "^13.0.2",
"@fastify/jwt": "^10.0.0",
"@fastify/rate-limit": "^10.3.0",
"@fastify/swagger": "^9.6.1",
"@fastify/swagger-ui": "^5.2.4",
"@prisma/client": "^5.22.0",
@@ -681,6 +682,27 @@
"ipaddr.js": "^2.1.0"
}
},
"node_modules/@fastify/rate-limit": {
"version": "10.3.0",
"resolved": "https://registry.npmjs.org/@fastify/rate-limit/-/rate-limit-10.3.0.tgz",
"integrity": "sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT",
"dependencies": {
"@lukeed/ms": "^2.0.2",
"fastify-plugin": "^5.0.0",
"toad-cache": "^3.7.0"
}
},
"node_modules/@fastify/send": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@fastify/send/-/send-4.1.0.tgz",

View File

@@ -11,25 +11,27 @@
"start:worker": "node dist/worker.js",
"prisma:generate": "prisma generate",
"prisma:migrate": "prisma migrate dev",
"prisma:seed": "node --loader ts-node/esm prisma/seed.ts"
"prisma:seed": "node --loader ts-node/esm prisma/seed.ts",
"admin:reset": "tsx scripts/admin-reset.ts"
},
"dependencies": {
"@fastify/cors": "^11.2.0",
"@fastify/helmet": "^13.0.2",
"@fastify/jwt": "^10.0.0",
"@fastify/rate-limit": "^10.3.0",
"@fastify/swagger": "^9.6.1",
"@fastify/swagger-ui": "^5.2.4",
"@prisma/client": "^5.22.0",
"archiver": "^6.0.2",
"argon2": "^0.41.1",
"bullmq": "^5.48.1",
"fastify": "^5.7.1",
"fastify-plugin": "^5.0.1",
"googleapis": "^170.1.0",
"imapflow": "^1.0.180",
"ioredis": "^5.5.0",
"mailparser": "^3.7.1",
"nodemailer": "^6.9.16",
"googleapis": "^170.1.0",
"archiver": "^6.0.2",
"pino": "^9.5.0",
"pino-pretty": "^10.3.1",
"zod": "^3.24.2"

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "User" ADD COLUMN "passwordResetRequired" BOOLEAN NOT NULL DEFAULT false;

View File

@@ -87,6 +87,7 @@ model User {
password String
role UserRole @default(USER)
isActive Boolean @default(true)
passwordResetRequired Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt

View File

@@ -0,0 +1,47 @@
import argon2 from "argon2";
import { PrismaClient } from "@prisma/client";
import { randomBytes } from "node:crypto";
const prisma = new PrismaClient();
const [email, passwordArg] = process.argv.slice(2);
if (!email) {
process.stderr.write("Usage: npm run admin:reset -- <email> [newPassword]\n");
process.exit(1);
}
const run = async () => {
const user = await prisma.user.findUnique({ where: { email } });
if (!user) {
process.stderr.write("User not found.\n");
process.exit(1);
}
const generated = !passwordArg;
const password = passwordArg ?? randomBytes(12).toString("base64url");
const hashed = await argon2.hash(password);
await prisma.user.update({
where: { id: user.id },
data: {
password: hashed,
role: "ADMIN",
isActive: true,
passwordResetRequired: generated
}
});
if (generated) {
process.stdout.write(`Temporary admin password for ${email}: ${password}\n`);
process.stdout.write("Password reset required at next login.\n");
} else {
process.stdout.write(`Admin password updated for ${email}\n`);
}
await prisma.$disconnect();
};
run().catch((err) => {
process.stderr.write(String(err) + "\n");
process.exit(1);
});

View File

@@ -1,21 +1,26 @@
import { prisma } from "../db.js";
import { decryptSecret, encryptSecret } from "../security/crypto.js";
const sensitiveKeys = new Set(["google.client_secret"]);
export const getSetting = async (key: string) => {
const setting = await prisma.appSetting.findUnique({ where: { key } });
return setting?.value ?? null;
if (!setting) return null;
return sensitiveKeys.has(key) ? decryptSecret(setting.value) : setting.value;
};
export const setSetting = async (key: string, value: string) => {
const stored = sensitiveKeys.has(key) ? encryptSecret(value) : value;
return prisma.appSetting.upsert({
where: { key },
update: { value },
create: { key, value }
update: { value: stored },
create: { key, value: stored }
});
};
export const listSettings = async (keys: string[]) => {
const settings = await prisma.appSetting.findMany({ where: { key: { in: keys } } });
const map = new Map(settings.map((s) => [s.key, s.value]));
const map = new Map(settings.map((s) => [s.key, sensitiveKeys.has(s.key) ? decryptSecret(s.value) : s.value]));
return keys.reduce<Record<string, string | null>>((acc, key) => {
acc[key] = map.get(key) ?? null;
return acc;

View File

@@ -11,6 +11,9 @@ export default fp(async function adminPlugin(app: FastifyInstance) {
if (!user || user.role !== "ADMIN") {
return reply.code(403).send({ message: "Forbidden" });
}
if (user.passwordResetRequired) {
return reply.code(403).send({ message: "Password reset required" });
}
} catch (err) {
return reply.code(401).send({ message: "Unauthorized" });
}

View File

@@ -9,7 +9,19 @@ export default fp(async function authPlugin(app: FastifyInstance) {
const tokenFromQuery = (request.query as { token?: string } | undefined)?.token;
if (!authHeader && tokenFromQuery) {
const allowedQueryRoutes = new Set(["/jobs/:id/stream", "/jobs/exports/:id/stream"]);
if (!allowedQueryRoutes.has(request.routerPath ?? "")) {
return reply.code(401).send({ message: "Unauthorized" });
}
await request.jwtVerify({ token: tokenFromQuery });
if ((request.user as { scope?: string }).scope !== "sse") {
return reply.code(401).send({ message: "Unauthorized" });
}
if ((request.user as { jobId?: string }).jobId && (request.params as { id?: string }).id) {
if ((request.user as { jobId?: string }).jobId !== (request.params as { id?: string }).id) {
return reply.code(401).send({ message: "Unauthorized" });
}
}
} else {
await request.jwtVerify();
}
@@ -18,6 +30,13 @@ export default fp(async function authPlugin(app: FastifyInstance) {
if (!active) {
return reply.code(403).send({ message: "User disabled" });
}
if (active.passwordResetRequired) {
const allowed = new Set(["/auth/password", "/auth/logout"]);
const routePath = request.routerPath ?? request.url?.split("?")[0] ?? "";
if (!allowed.has(routePath)) {
return reply.code(403).send({ message: "Password reset required" });
}
}
} catch (err) {
reply.code(401).send({ message: "Unauthorized" });
return;

View File

@@ -2,6 +2,7 @@ import { FastifyInstance } from "fastify";
import argon2 from "argon2";
import { z } from "zod";
import { prisma } from "../db.js";
import { config } from "../config.js";
const registerSchema = z.object({
tenantName: z.string().min(2),
@@ -14,8 +15,14 @@ const loginSchema = z.object({
password: z.string().min(1)
});
const passwordSchema = z.object({
password: z.string().min(10)
});
export async function authRoutes(app: FastifyInstance) {
app.post("/register", async (request, reply) => {
app.post("/register", {
config: { rateLimit: { max: config.RATE_LIMIT_AUTH_MAX, timeWindow: config.RATE_LIMIT_AUTH_WINDOW } }
}, async (request, reply) => {
const input = registerSchema.parse(request.body);
const existing = await prisma.user.findUnique({ where: { email: input.email } });
@@ -40,10 +47,21 @@ export async function authRoutes(app: FastifyInstance) {
const token = app.jwt.sign({ sub: user.id, tenantId: user.tenantId });
return { token, user: { id: user.id, email: user.email, tenantId: user.tenantId, role: user.role } };
return {
token,
user: {
id: user.id,
email: user.email,
tenantId: user.tenantId,
role: user.role,
passwordResetRequired: user.passwordResetRequired
}
};
});
app.post("/login", async (request, reply) => {
app.post("/login", {
config: { rateLimit: { max: config.RATE_LIMIT_AUTH_MAX, timeWindow: config.RATE_LIMIT_AUTH_WINDOW } }
}, async (request, reply) => {
const input = loginSchema.parse(request.body);
const user = await prisma.user.findUnique({ where: { email: input.email }, include: { tenant: true } });
@@ -60,7 +78,32 @@ export async function authRoutes(app: FastifyInstance) {
}
const token = app.jwt.sign({ sub: user.id, tenantId: user.tenantId });
return { token, user: { id: user.id, email: user.email, tenantId: user.tenantId, role: user.role } };
return {
token,
user: {
id: user.id,
email: user.email,
tenantId: user.tenantId,
role: user.role,
passwordResetRequired: user.passwordResetRequired
}
};
});
app.post("/password", { preHandler: app.authenticate }, async (request, reply) => {
const input = passwordSchema.parse(request.body);
const user = await prisma.user.findUnique({ where: { id: request.user.sub } });
if (!user) {
return reply.code(404).send({ message: "User not found" });
}
const hashed = await argon2.hash(input.password);
await prisma.user.update({
where: { id: user.id },
data: { password: hashed, passwordResetRequired: false }
});
return { success: true };
});
app.post("/logout", async () => ({ success: true }));

View File

@@ -6,6 +6,18 @@ const envSchema = z.object({
DATABASE_URL: z.string().url(),
REDIS_URL: z.string().url(),
JWT_SECRET: z.string().min(12),
WEB_BASE_URL: z.string().url().optional(),
CORS_ORIGINS: z.string().optional(),
ENABLE_SWAGGER: z.coerce.boolean().default(false),
RATE_LIMIT_MAX: z.coerce.number().default(300),
RATE_LIMIT_WINDOW: z.string().default("1 minute"),
RATE_LIMIT_AUTH_MAX: z.coerce.number().default(10),
RATE_LIMIT_AUTH_WINDOW: z.string().default("1 minute"),
SSE_TOKEN_TTL_SECONDS: z.coerce.number().default(300),
OAUTH_STATE_TTL_SECONDS: z.coerce.number().default(600),
ALLOW_CUSTOM_MAIL_HOSTS: z.coerce.boolean().default(false),
BLOCK_PRIVATE_NETWORKS: z.coerce.boolean().default(true),
ENCRYPTION_KEY: z.string().optional(),
GOOGLE_CLIENT_ID: z.string().optional(),
GOOGLE_CLIENT_SECRET: z.string().optional(),
GOOGLE_REDIRECT_URI: z.string().optional(),
@@ -26,10 +38,22 @@ const parsed = envSchema.safeParse({
DATABASE_URL: process.env.DATABASE_URL,
REDIS_URL: process.env.REDIS_URL,
JWT_SECRET: process.env.JWT_SECRET,
WEB_BASE_URL: process.env.WEB_BASE_URL,
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET,
GOOGLE_REDIRECT_URI: process.env.GOOGLE_REDIRECT_URI,
TRUST_PROXY: process.env.TRUST_PROXY,
CORS_ORIGINS: process.env.CORS_ORIGINS,
ENABLE_SWAGGER: process.env.ENABLE_SWAGGER,
RATE_LIMIT_MAX: process.env.RATE_LIMIT_MAX,
RATE_LIMIT_WINDOW: process.env.RATE_LIMIT_WINDOW,
RATE_LIMIT_AUTH_MAX: process.env.RATE_LIMIT_AUTH_MAX,
RATE_LIMIT_AUTH_WINDOW: process.env.RATE_LIMIT_AUTH_WINDOW,
SSE_TOKEN_TTL_SECONDS: process.env.SSE_TOKEN_TTL_SECONDS,
OAUTH_STATE_TTL_SECONDS: process.env.OAUTH_STATE_TTL_SECONDS,
ALLOW_CUSTOM_MAIL_HOSTS: process.env.ALLOW_CUSTOM_MAIL_HOSTS,
BLOCK_PRIVATE_NETWORKS: process.env.BLOCK_PRIVATE_NETWORKS,
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
SEED_ENABLED: process.env.SEED_ENABLED,
SEED_TENANT: process.env.SEED_TENANT,
SEED_TENANT_ID: process.env.SEED_TENANT_ID,
@@ -43,4 +67,23 @@ if (!parsed.success) {
throw new Error(`Invalid environment configuration: ${details}`);
}
export const config = parsed.data;
const data = parsed.data;
if (data.NODE_ENV !== "development") {
const forbidden = new Set(["dev-change-me", "change-me-now", "dev-insecure-change-me"]);
if (forbidden.has(data.JWT_SECRET)) {
throw new Error("Invalid environment configuration: JWT_SECRET must be changed for production");
}
if (!data.CORS_ORIGINS) {
throw new Error("Invalid environment configuration: CORS_ORIGINS must be set for production");
}
if (!data.ENCRYPTION_KEY || data.ENCRYPTION_KEY.length < 32 || forbidden.has(data.ENCRYPTION_KEY)) {
throw new Error("Invalid environment configuration: ENCRYPTION_KEY must be at least 32 characters for production");
}
if (data.SEED_ENABLED) {
if (!data.SEED_ADMIN_PASSWORD || forbidden.has(data.SEED_ADMIN_PASSWORD)) {
throw new Error("Invalid environment configuration: SEED_ADMIN_PASSWORD must be set and not default for production");
}
}
}
export const config = data;

View File

@@ -3,6 +3,7 @@ import { MailboxAccount } from "@prisma/client";
import { config } from "../config.js";
import { prisma } from "../db.js";
import { getSetting } from "../admin/settings.js";
import { decryptSecret, encryptSecret } from "../security/crypto.js";
const getOAuthClient = async () => {
const clientId = (await getSetting("google.client_id")) ?? config.GOOGLE_CLIENT_ID;
@@ -38,8 +39,8 @@ export const storeGmailTokens = async (accountId: string, tokens: { access_token
return prisma.mailboxAccount.update({
where: { id: accountId },
data: {
oauthAccessToken: tokens.access_token ?? undefined,
oauthRefreshToken: tokens.refresh_token ?? undefined,
oauthAccessToken: tokens.access_token ? encryptSecret(tokens.access_token) : undefined,
oauthRefreshToken: tokens.refresh_token ? encryptSecret(tokens.refresh_token) : undefined,
oauthExpiresAt: tokens.expiry_date ? new Date(tokens.expiry_date) : undefined
}
});
@@ -52,8 +53,8 @@ export const gmailClientForAccount = async (account: MailboxAccount) => {
const client = await getOAuthClient();
client.setCredentials({
refresh_token: account.oauthRefreshToken ?? undefined,
access_token: account.oauthAccessToken ?? undefined,
refresh_token: account.oauthRefreshToken ? decryptSecret(account.oauthRefreshToken) : undefined,
access_token: account.oauthAccessToken ? decryptSecret(account.oauthAccessToken) : undefined,
expiry_date: account.oauthExpiresAt?.getTime()
});
@@ -69,8 +70,12 @@ export const refreshGmailTokens = async (account: MailboxAccount) => {
await prisma.mailboxAccount.update({
where: { id: account.id },
data: {
oauthAccessToken: token?.token ?? account.oauthAccessToken ?? undefined,
oauthRefreshToken: client.credentials.refresh_token ?? account.oauthRefreshToken ?? undefined,
oauthAccessToken: token?.token
? encryptSecret(token.token)
: account.oauthAccessToken ?? undefined,
oauthRefreshToken: client.credentials.refresh_token
? encryptSecret(client.credentials.refresh_token)
: account.oauthRefreshToken ?? undefined,
oauthExpiresAt: expiresAt ?? undefined
}
});

View File

@@ -1,6 +1,7 @@
import { ImapFlow } from "imapflow";
import { simpleParser } from "mailparser";
import { MailboxAccount } from "@prisma/client";
import { decryptSecret } from "../security/crypto.js";
export const createImapClient = (account: MailboxAccount) => {
return new ImapFlow({
@@ -8,8 +9,8 @@ export const createImapClient = (account: MailboxAccount) => {
port: account.imapPort,
secure: account.imapTLS,
auth: account.oauthToken
? { user: account.email, accessToken: account.oauthToken }
: { user: account.email, pass: account.appPassword ?? "" }
? { user: account.email, accessToken: decryptSecret(account.oauthToken) }
: { user: account.email, pass: account.appPassword ? decryptSecret(account.appPassword) : "" }
});
};

View File

@@ -3,13 +3,12 @@ import { z } from "zod";
import { prisma } from "../db.js";
import { exchangeGmailCode, getGmailAuthUrl, storeGmailTokens, refreshGmailTokens, pingGmail } from "./gmail.js";
import { mapGmailError } from "./oauthErrors.js";
import { config } from "../config.js";
const urlSchema = z.object({ accountId: z.string() });
export async function oauthRoutes(app: FastifyInstance) {
app.addHook("preHandler", app.authenticate);
app.post("/gmail/url", async (request, reply) => {
app.post("/gmail/url", { preHandler: app.authenticate }, async (request, reply) => {
const input = urlSchema.parse(request.body);
const account = await prisma.mailboxAccount.findFirst({
where: { id: input.accountId, tenantId: request.user.tenantId, provider: "GMAIL" }
@@ -18,7 +17,10 @@ export async function oauthRoutes(app: FastifyInstance) {
return { url: null };
}
const state = `${account.id}:${request.user.tenantId}`;
const state = app.jwt.sign(
{ scope: "oauth", accountId: account.id, tenantId: request.user.tenantId },
{ expiresIn: `${config.OAUTH_STATE_TTL_SECONDS}s` }
);
try {
const url = await getGmailAuthUrl(state);
return { url };
@@ -33,7 +35,18 @@ export async function oauthRoutes(app: FastifyInstance) {
return reply.code(400).send({ message: "Missing code/state" });
}
const [accountId, tenantId] = query.state.split(":");
let decoded: { accountId?: string; tenantId?: string; scope?: string };
try {
decoded = app.jwt.verify(query.state) as { accountId?: string; tenantId?: string; scope?: string };
} catch {
return reply.code(400).send({ message: "Invalid state" });
}
if (decoded.scope !== "oauth" || !decoded.accountId || !decoded.tenantId) {
return reply.code(400).send({ message: "Invalid state" });
}
const accountId = decoded.accountId;
const tenantId = decoded.tenantId;
const account = await prisma.mailboxAccount.findFirst({
where: { id: accountId, tenantId, provider: "GMAIL" }
});
@@ -53,10 +66,14 @@ export async function oauthRoutes(app: FastifyInstance) {
expiry_date: tokens.expiry_date ?? undefined
});
return reply.redirect(`/oauth-success?accountId=${account.id}`);
const base = config.WEB_BASE_URL ?? "";
const target = base
? `${base}/oauth-success?accountId=${account.id}`
: `/oauth-success?accountId=${account.id}`;
return reply.redirect(target);
});
app.get("/gmail/status/:accountId", async (request, reply) => {
app.get("/gmail/status/:accountId", { preHandler: app.authenticate }, async (request, reply) => {
const params = request.params as { accountId: string };
const account = await prisma.mailboxAccount.findFirst({
where: { id: params.accountId, tenantId: request.user.tenantId, provider: "GMAIL" }
@@ -71,7 +88,7 @@ export async function oauthRoutes(app: FastifyInstance) {
};
});
app.get("/gmail/ping/:accountId", async (request, reply) => {
app.get("/gmail/ping/:accountId", { preHandler: app.authenticate }, async (request, reply) => {
const params = request.params as { accountId: string };
const account = await prisma.mailboxAccount.findFirst({
where: { id: params.accountId, tenantId: request.user.tenantId, provider: "GMAIL" }

View File

@@ -3,6 +3,9 @@ import { z } from "zod";
import { prisma } from "../db.js";
import { providerDefaults } from "./providers.js";
import { queueCleanupJob } from "../queue/queue.js";
import { config } from "../config.js";
import { encryptSecret } from "../security/crypto.js";
import { isPrivateHost } from "../security/ssrf.js";
const createAccountSchema = z.object({
email: z.string().email(),
@@ -53,21 +56,34 @@ export async function mailRoutes(app: FastifyInstance) {
const input = createAccountSchema.parse(request.body);
const defaults = providerDefaults[input.provider];
const allowCustom = config.ALLOW_CUSTOM_MAIL_HOSTS;
const imapHost = allowCustom ? (input.imapHost ?? defaults.imapHost) : defaults.imapHost;
const smtpHost = allowCustom ? (input.smtpHost ?? defaults.smtpHost) : defaults.smtpHost;
if (allowCustom && config.BLOCK_PRIVATE_NETWORKS) {
if (imapHost && await isPrivateHost(imapHost)) {
return reply.code(400).send({ message: "IMAP host is not allowed" });
}
if (smtpHost && await isPrivateHost(smtpHost)) {
return reply.code(400).send({ message: "SMTP host is not allowed" });
}
}
const account = await prisma.mailboxAccount.create({
data: {
tenantId: request.user.tenantId,
email: input.email,
provider: input.provider,
imapHost: input.imapHost ?? defaults.imapHost,
imapPort: input.imapPort ?? defaults.imapPort,
imapTLS: input.imapTLS ?? defaults.imapTLS,
smtpHost: input.smtpHost ?? defaults.smtpHost,
smtpPort: input.smtpPort ?? defaults.smtpPort,
smtpTLS: input.smtpTLS ?? defaults.smtpTLS,
oauthToken: input.oauthToken,
oauthRefreshToken: input.oauthRefreshToken,
oauthAccessToken: input.oauthAccessToken,
appPassword: input.appPassword
imapHost: imapHost,
imapPort: allowCustom ? (input.imapPort ?? defaults.imapPort) : defaults.imapPort,
imapTLS: allowCustom ? (input.imapTLS ?? defaults.imapTLS) : defaults.imapTLS,
smtpHost: smtpHost,
smtpPort: allowCustom ? (input.smtpPort ?? defaults.smtpPort) : defaults.smtpPort,
smtpTLS: allowCustom ? (input.smtpTLS ?? defaults.smtpTLS) : defaults.smtpTLS,
oauthToken: input.oauthToken ? encryptSecret(input.oauthToken) : undefined,
oauthRefreshToken: input.oauthRefreshToken ? encryptSecret(input.oauthRefreshToken) : undefined,
oauthAccessToken: input.oauthAccessToken ? encryptSecret(input.oauthAccessToken) : undefined,
appPassword: input.appPassword ? encryptSecret(input.appPassword) : undefined
}
});

View File

@@ -1,5 +1,8 @@
import nodemailer from "nodemailer";
import { MailboxAccount } from "@prisma/client";
import { isPrivateHost } from "../security/ssrf.js";
import { decryptSecret } from "../security/crypto.js";
import { config } from "../config.js";
const parseListUnsubscribe = (value: string) => {
const tokens = value
@@ -29,13 +32,47 @@ export const unsubscribeFromHeader = async (params: {
if (httpLinks.length > 0) {
const target = httpLinks[0];
let parsed: URL;
try {
parsed = new URL(target);
} catch {
return { status: "failed", message: "Invalid unsubscribe URL" };
}
if (!["http:", "https:"].includes(parsed.protocol)) {
return { status: "failed", message: "Unsupported URL scheme" };
}
if (config.BLOCK_PRIVATE_NETWORKS && await isPrivateHost(parsed.hostname)) {
return { status: "failed", message: "Blocked private network URL" };
}
const usePost = postHint.includes("one-click");
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 8000);
const response = await fetch(target, {
method: usePost ? "POST" : "GET",
headers: usePost ? { "Content-Type": "application/x-www-form-urlencoded" } : undefined,
body: usePost ? "List-Unsubscribe=One-Click" : undefined
body: usePost ? "List-Unsubscribe=One-Click" : undefined,
redirect: "manual",
signal: controller.signal
});
clearTimeout(timeout);
if (response.status >= 300 && response.status < 400) {
const location = response.headers.get("location");
if (!location) {
return { status: "failed", message: `HTTP ${response.status}` };
}
try {
const redirected = new URL(location, parsed);
if (config.BLOCK_PRIVATE_NETWORKS && await isPrivateHost(redirected.hostname)) {
return { status: "failed", message: "Blocked private redirect" };
}
} catch {
return { status: "failed", message: "Invalid redirect URL" };
}
}
return { status: response.ok ? "ok" : "failed", message: `HTTP ${response.status}` };
}
@@ -56,7 +93,7 @@ export const unsubscribeFromHeader = async (params: {
secure: smtpTLS,
auth: {
user: params.account.email,
pass: params.account.appPassword
pass: decryptSecret(params.account.appPassword)
}
});

View File

@@ -2,6 +2,7 @@ import Fastify from "fastify";
import cors from "@fastify/cors";
import helmet from "@fastify/helmet";
import jwt from "@fastify/jwt";
import rateLimit from "@fastify/rate-limit";
import swagger from "@fastify/swagger";
import swaggerUi from "@fastify/swagger-ui";
import { config } from "./config.js";
@@ -27,22 +28,33 @@ const app = Fastify({
trustProxy: config.TRUST_PROXY
});
const corsOrigins = config.CORS_ORIGINS
? config.CORS_ORIGINS.split(",").map((origin) => origin.trim()).filter(Boolean)
: true;
await app.register(cors, {
origin: true,
origin: corsOrigins,
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
allowedHeaders: ["Authorization", "Content-Type"]
});
await app.register(helmet);
await app.register(rateLimit, {
global: true,
max: config.RATE_LIMIT_MAX,
timeWindow: config.RATE_LIMIT_WINDOW
});
await app.register(jwt, { secret: config.JWT_SECRET });
await app.register(authPlugin);
await app.register(adminPlugin);
await app.register(swagger, {
openapi: {
info: { title: "Simple Mail Cleaner API", version: "0.1.0" }
}
});
await app.register(swaggerUi, { routePrefix: "/docs" });
if (config.ENABLE_SWAGGER) {
await app.register(swagger, {
openapi: {
info: { title: "Simple Mail Cleaner API", version: "0.1.0" }
}
});
await app.register(swaggerUi, { routePrefix: "/docs" });
}
await app.register(healthRoutes, { prefix: "/health" });
await app.register(authRoutes, { prefix: "/auth" });

View File

@@ -1,5 +1,6 @@
import { FastifyInstance } from "fastify";
import { prisma } from "../db.js";
import { config } from "../config.js";
export async function queueRoutes(app: FastifyInstance) {
app.addHook("preHandler", app.authenticate);
@@ -46,6 +47,21 @@ export async function queueRoutes(app: FastifyInstance) {
return { events };
});
app.get("/:id/stream-token", async (request, reply) => {
const params = request.params as { id: string };
const job = await prisma.cleanupJob.findFirst({
where: { id: params.id, tenantId: request.user.tenantId }
});
if (!job) {
return reply.code(404).send({ message: "Job not found" });
}
const token = app.jwt.sign(
{ scope: "sse", jobId: job.id, tenantId: request.user.tenantId, sub: request.user.sub },
{ expiresIn: `${config.SSE_TOKEN_TTL_SECONDS}s` }
);
return { token };
});
app.get("/:id/stream", async (request, reply) => {
const params = request.params as { id: string };
@@ -60,7 +76,8 @@ export async function queueRoutes(app: FastifyInstance) {
reply.raw.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
Connection: "keep-alive"
Connection: "keep-alive",
"X-Accel-Buffering": "no"
});
let lastEventId = 0;
@@ -82,6 +99,23 @@ export async function queueRoutes(app: FastifyInstance) {
});
});
app.get("/exports/:id/stream-token", async (request, reply) => {
const params = request.params as { id: string };
const user = await prisma.user.findUnique({ where: { id: request.user.sub } });
if (!user || user.role !== "ADMIN") {
return reply.code(403).send({ message: "Forbidden" });
}
const exportJob = await prisma.exportJob.findUnique({ where: { id: params.id } });
if (!exportJob) {
return reply.code(404).send({ message: "Export job not found" });
}
const token = app.jwt.sign(
{ scope: "sse", jobId: exportJob.id, tenantId: user.tenantId, sub: user.id },
{ expiresIn: `${config.SSE_TOKEN_TTL_SECONDS}s` }
);
return { token };
});
app.get("/exports/:id/stream", async (request, reply) => {
const params = request.params as { id: string };
const user = await prisma.user.findUnique({ where: { id: request.user.sub } });
@@ -96,7 +130,8 @@ export async function queueRoutes(app: FastifyInstance) {
reply.raw.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
Connection: "keep-alive"
Connection: "keep-alive",
"X-Accel-Buffering": "no"
});
let lastStatus = "";

View File

@@ -0,0 +1,44 @@
import { createHash, randomBytes, createCipheriv, createDecipheriv } from "node:crypto";
import { config } from "../config.js";
const deriveKey = () => {
if (!config.ENCRYPTION_KEY) {
return null;
}
return createHash("sha256").update(config.ENCRYPTION_KEY).digest();
};
const ENCRYPTION_PREFIX = "enc";
export const encryptSecret = (plain: string) => {
const key = deriveKey();
if (!key) {
return plain;
}
const iv = randomBytes(12);
const cipher = createCipheriv("aes-256-gcm", key, iv);
const encrypted = Buffer.concat([cipher.update(plain, "utf8"), cipher.final()]);
const tag = cipher.getAuthTag();
return `${ENCRYPTION_PREFIX}:${iv.toString("base64")}:${tag.toString("base64")}:${encrypted.toString("base64")}`;
};
export const decryptSecret = (value: string) => {
if (!value.startsWith(`${ENCRYPTION_PREFIX}:`)) {
return value;
}
const key = deriveKey();
if (!key) {
throw new Error("ENCRYPTION_KEY is not configured");
}
const [, ivB64, tagB64, dataB64] = value.split(":");
if (!ivB64 || !tagB64 || !dataB64) {
throw new Error("Invalid encrypted payload");
}
const iv = Buffer.from(ivB64, "base64");
const tag = Buffer.from(tagB64, "base64");
const data = Buffer.from(dataB64, "base64");
const decipher = createDecipheriv("aes-256-gcm", key, iv);
decipher.setAuthTag(tag);
const decrypted = Buffer.concat([decipher.update(data), decipher.final()]);
return decrypted.toString("utf8");
};

View File

@@ -0,0 +1,45 @@
import { lookup } from "node:dns/promises";
import net from "node:net";
const isPrivateIpv4 = (ip: string) => {
const parts = ip.split(".").map((part) => Number(part));
if (parts.length !== 4 || parts.some((part) => Number.isNaN(part))) return false;
const [a, b] = parts;
if (a === 10) return true;
if (a === 127) return true;
if (a === 169 && b === 254) return true;
if (a === 172 && b >= 16 && b <= 31) return true;
if (a === 192 && b === 168) return true;
return false;
};
const isPrivateIpv6 = (ip: string) => {
const normalized = ip.toLowerCase();
return normalized.startsWith("::1") ||
normalized.startsWith("fc") ||
normalized.startsWith("fd") ||
normalized.startsWith("fe80");
};
export const isPrivateHost = async (hostname: string) => {
if (!hostname) return true;
const lower = hostname.toLowerCase();
if (lower === "localhost" || lower.endsWith(".localhost") || lower.endsWith(".local")) {
return true;
}
const ipType = net.isIP(hostname);
if (ipType === 4) return isPrivateIpv4(hostname);
if (ipType === 6) return isPrivateIpv6(hostname);
try {
const results = await lookup(hostname, { all: true });
return results.some((result) => {
if (result.family === 4) return isPrivateIpv4(result.address);
if (result.family === 6) return isPrivateIpv6(result.address);
return true;
});
} catch {
return true;
}
};

View File

@@ -11,7 +11,13 @@ export async function tenantRoutes(app: FastifyInstance) {
});
return {
user: user ? { id: user.id, email: user.email, role: user.role, isActive: user.isActive } : null,
user: user ? {
id: user.id,
email: user.email,
role: user.role,
isActive: user.isActive,
passwordResetRequired: user.passwordResetRequired
} : null,
tenant: user?.tenant ? { id: user.tenant.id, name: user.tenant.name, isActive: user.tenant.isActive } : null
};
});

View File

@@ -9,13 +9,9 @@ services:
POSTGRES_DB: mailcleaner
volumes:
- pgdata:/var/lib/postgresql/data
ports:
- "5432:5432"
redis:
image: redis:7
ports:
- "6379:6379"
api:
build:
@@ -24,7 +20,7 @@ services:
env_file:
- ./.env
environment:
NODE_ENV: development
NODE_ENV: ${NODE_ENV:-development}
PORT: ${API_PORT:-8000}
DATABASE_URL: ${DATABASE_URL:-postgresql://mailcleaner:mailcleaner@postgres:5432/mailcleaner}
REDIS_URL: ${REDIS_URL:-redis://redis:6379}
@@ -42,11 +38,15 @@ services:
depends_on:
- postgres
- redis
command: ["sh", "-c", "npm run prisma:generate && npm run dev"]
command: ["sh", "-c", "if [ \"${AUTO_MIGRATE:-false}\" = \"true\" ]; then npx prisma migrate deploy; fi; npm run prisma:generate && npm run dev"]
ports:
- "${API_PORT:-8000}:${API_PORT:-8000}"
- "${BIND_IP:-127.0.0.1}:${API_PORT:-8000}:${API_PORT:-8000}"
volumes:
- ./backend:/app
- api_node_modules:/app/node_modules
networks:
- default
- npm
worker:
build:
@@ -55,7 +55,7 @@ services:
env_file:
- ./.env
environment:
NODE_ENV: development
NODE_ENV: ${NODE_ENV:-development}
DATABASE_URL: ${DATABASE_URL:-postgresql://mailcleaner:mailcleaner@postgres:5432/mailcleaner}
REDIS_URL: ${REDIS_URL:-redis://redis:6379}
JWT_SECRET: ${JWT_SECRET:-dev-change-me}
@@ -75,6 +75,7 @@ services:
command: ["sh", "-c", "npm run prisma:generate && npm run worker:dev"]
volumes:
- ./backend:/app
- worker_node_modules:/app/node_modules
web:
build:
@@ -89,9 +90,19 @@ services:
depends_on:
- api
ports:
- "${WEB_PORT:-3000}:3000"
- "${BIND_IP:-127.0.0.1}:${WEB_PORT:-3000}:3000"
volumes:
- ./frontend:/app
networks:
- default
- npm
volumes:
pgdata:
api_node_modules:
worker_node_modules:
networks:
npm:
external: true
name: nginx-proxy-manager_default

View File

@@ -1,58 +1,58 @@
{
"hash": "41bfd1b1",
"configHash": "93f7377b",
"hash": "6daf73ca",
"configHash": "e680e5b2",
"lockfileHash": "914c8d77",
"browserHash": "2951be62",
"browserHash": "0cec7f04",
"optimized": {
"react": {
"src": "../../react/index.js",
"file": "react.js",
"fileHash": "c765b11a",
"fileHash": "de4c7070",
"needsInterop": true
},
"react-dom": {
"src": "../../react-dom/index.js",
"file": "react-dom.js",
"fileHash": "4de0ccdf",
"fileHash": "485c623a",
"needsInterop": true
},
"react/jsx-dev-runtime": {
"src": "../../react/jsx-dev-runtime.js",
"file": "react_jsx-dev-runtime.js",
"fileHash": "64e4a1af",
"fileHash": "5fec023e",
"needsInterop": true
},
"react/jsx-runtime": {
"src": "../../react/jsx-runtime.js",
"file": "react_jsx-runtime.js",
"fileHash": "c81f8d21",
"fileHash": "da804d6c",
"needsInterop": true
},
"i18next": {
"src": "../../i18next/dist/esm/i18next.js",
"file": "i18next.js",
"fileHash": "5517618c",
"fileHash": "9ce4e80e",
"needsInterop": false
},
"react-dom/client": {
"src": "../../react-dom/client.js",
"file": "react-dom_client.js",
"fileHash": "b0f8b97d",
"fileHash": "e5b05483",
"needsInterop": true
},
"react-i18next": {
"src": "../../react-i18next/dist/es/index.js",
"file": "react-i18next.js",
"fileHash": "4379788b",
"fileHash": "980b006c",
"needsInterop": false
}
},
"chunks": {
"chunk-BVI7NZOO": {
"file": "chunk-BVI7NZOO.js"
"chunk-WJR2STNY": {
"file": "chunk-WJR2STNY.js"
},
"chunk-4HAMFFQC": {
"file": "chunk-4HAMFFQC.js"
"chunk-FXM4O2JN": {
"file": "chunk-FXM4O2JN.js"
},
"chunk-EQCVQC35": {
"file": "chunk-EQCVQC35.js"

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

305
frontend/node_modules/.vite/deps/chunk-FXM4O2JN.js generated vendored Normal file
View File

@@ -0,0 +1,305 @@
import {
__commonJS
} from "./chunk-EQCVQC35.js";
// node_modules/react/cjs/react.production.min.js
var require_react_production_min = __commonJS({
"node_modules/react/cjs/react.production.min.js"(exports) {
"use strict";
var l = Symbol.for("react.element");
var n = Symbol.for("react.portal");
var p = Symbol.for("react.fragment");
var q = Symbol.for("react.strict_mode");
var r = Symbol.for("react.profiler");
var t = Symbol.for("react.provider");
var u = Symbol.for("react.context");
var v = Symbol.for("react.forward_ref");
var w = Symbol.for("react.suspense");
var x = Symbol.for("react.memo");
var y = Symbol.for("react.lazy");
var z = Symbol.iterator;
function A(a) {
if (null === a || "object" !== typeof a) return null;
a = z && a[z] || a["@@iterator"];
return "function" === typeof a ? a : null;
}
var B = { isMounted: function() {
return false;
}, enqueueForceUpdate: function() {
}, enqueueReplaceState: function() {
}, enqueueSetState: function() {
} };
var C = Object.assign;
var D = {};
function E(a, b, e) {
this.props = a;
this.context = b;
this.refs = D;
this.updater = e || B;
}
E.prototype.isReactComponent = {};
E.prototype.setState = function(a, b) {
if ("object" !== typeof a && "function" !== typeof a && null != a) throw Error("setState(...): takes an object of state variables to update or a function which returns an object of state variables.");
this.updater.enqueueSetState(this, a, b, "setState");
};
E.prototype.forceUpdate = function(a) {
this.updater.enqueueForceUpdate(this, a, "forceUpdate");
};
function F() {
}
F.prototype = E.prototype;
function G(a, b, e) {
this.props = a;
this.context = b;
this.refs = D;
this.updater = e || B;
}
var H = G.prototype = new F();
H.constructor = G;
C(H, E.prototype);
H.isPureReactComponent = true;
var I = Array.isArray;
var J = Object.prototype.hasOwnProperty;
var K = { current: null };
var L = { key: true, ref: true, __self: true, __source: true };
function M(a, b, e) {
var d, c = {}, k = null, h = null;
if (null != b) for (d in void 0 !== b.ref && (h = b.ref), void 0 !== b.key && (k = "" + b.key), b) J.call(b, d) && !L.hasOwnProperty(d) && (c[d] = b[d]);
var g = arguments.length - 2;
if (1 === g) c.children = e;
else if (1 < g) {
for (var f = Array(g), m = 0; m < g; m++) f[m] = arguments[m + 2];
c.children = f;
}
if (a && a.defaultProps) for (d in g = a.defaultProps, g) void 0 === c[d] && (c[d] = g[d]);
return { $$typeof: l, type: a, key: k, ref: h, props: c, _owner: K.current };
}
function N(a, b) {
return { $$typeof: l, type: a.type, key: b, ref: a.ref, props: a.props, _owner: a._owner };
}
function O(a) {
return "object" === typeof a && null !== a && a.$$typeof === l;
}
function escape(a) {
var b = { "=": "=0", ":": "=2" };
return "$" + a.replace(/[=:]/g, function(a2) {
return b[a2];
});
}
var P = /\/+/g;
function Q(a, b) {
return "object" === typeof a && null !== a && null != a.key ? escape("" + a.key) : b.toString(36);
}
function R(a, b, e, d, c) {
var k = typeof a;
if ("undefined" === k || "boolean" === k) a = null;
var h = false;
if (null === a) h = true;
else switch (k) {
case "string":
case "number":
h = true;
break;
case "object":
switch (a.$$typeof) {
case l:
case n:
h = true;
}
}
if (h) return h = a, c = c(h), a = "" === d ? "." + Q(h, 0) : d, I(c) ? (e = "", null != a && (e = a.replace(P, "$&/") + "/"), R(c, b, e, "", function(a2) {
return a2;
})) : null != c && (O(c) && (c = N(c, e + (!c.key || h && h.key === c.key ? "" : ("" + c.key).replace(P, "$&/") + "/") + a)), b.push(c)), 1;
h = 0;
d = "" === d ? "." : d + ":";
if (I(a)) for (var g = 0; g < a.length; g++) {
k = a[g];
var f = d + Q(k, g);
h += R(k, b, e, f, c);
}
else if (f = A(a), "function" === typeof f) for (a = f.call(a), g = 0; !(k = a.next()).done; ) k = k.value, f = d + Q(k, g++), h += R(k, b, e, f, c);
else if ("object" === k) throw b = String(a), Error("Objects are not valid as a React child (found: " + ("[object Object]" === b ? "object with keys {" + Object.keys(a).join(", ") + "}" : b) + "). If you meant to render a collection of children, use an array instead.");
return h;
}
function S(a, b, e) {
if (null == a) return a;
var d = [], c = 0;
R(a, d, "", "", function(a2) {
return b.call(e, a2, c++);
});
return d;
}
function T(a) {
if (-1 === a._status) {
var b = a._result;
b = b();
b.then(function(b2) {
if (0 === a._status || -1 === a._status) a._status = 1, a._result = b2;
}, function(b2) {
if (0 === a._status || -1 === a._status) a._status = 2, a._result = b2;
});
-1 === a._status && (a._status = 0, a._result = b);
}
if (1 === a._status) return a._result.default;
throw a._result;
}
var U = { current: null };
var V = { transition: null };
var W = { ReactCurrentDispatcher: U, ReactCurrentBatchConfig: V, ReactCurrentOwner: K };
function X() {
throw Error("act(...) is not supported in production builds of React.");
}
exports.Children = { map: S, forEach: function(a, b, e) {
S(a, function() {
b.apply(this, arguments);
}, e);
}, count: function(a) {
var b = 0;
S(a, function() {
b++;
});
return b;
}, toArray: function(a) {
return S(a, function(a2) {
return a2;
}) || [];
}, only: function(a) {
if (!O(a)) throw Error("React.Children.only expected to receive a single React element child.");
return a;
} };
exports.Component = E;
exports.Fragment = p;
exports.Profiler = r;
exports.PureComponent = G;
exports.StrictMode = q;
exports.Suspense = w;
exports.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED = W;
exports.act = X;
exports.cloneElement = function(a, b, e) {
if (null === a || void 0 === a) throw Error("React.cloneElement(...): The argument must be a React element, but you passed " + a + ".");
var d = C({}, a.props), c = a.key, k = a.ref, h = a._owner;
if (null != b) {
void 0 !== b.ref && (k = b.ref, h = K.current);
void 0 !== b.key && (c = "" + b.key);
if (a.type && a.type.defaultProps) var g = a.type.defaultProps;
for (f in b) J.call(b, f) && !L.hasOwnProperty(f) && (d[f] = void 0 === b[f] && void 0 !== g ? g[f] : b[f]);
}
var f = arguments.length - 2;
if (1 === f) d.children = e;
else if (1 < f) {
g = Array(f);
for (var m = 0; m < f; m++) g[m] = arguments[m + 2];
d.children = g;
}
return { $$typeof: l, type: a.type, key: c, ref: k, props: d, _owner: h };
};
exports.createContext = function(a) {
a = { $$typeof: u, _currentValue: a, _currentValue2: a, _threadCount: 0, Provider: null, Consumer: null, _defaultValue: null, _globalName: null };
a.Provider = { $$typeof: t, _context: a };
return a.Consumer = a;
};
exports.createElement = M;
exports.createFactory = function(a) {
var b = M.bind(null, a);
b.type = a;
return b;
};
exports.createRef = function() {
return { current: null };
};
exports.forwardRef = function(a) {
return { $$typeof: v, render: a };
};
exports.isValidElement = O;
exports.lazy = function(a) {
return { $$typeof: y, _payload: { _status: -1, _result: a }, _init: T };
};
exports.memo = function(a, b) {
return { $$typeof: x, type: a, compare: void 0 === b ? null : b };
};
exports.startTransition = function(a) {
var b = V.transition;
V.transition = {};
try {
a();
} finally {
V.transition = b;
}
};
exports.unstable_act = X;
exports.useCallback = function(a, b) {
return U.current.useCallback(a, b);
};
exports.useContext = function(a) {
return U.current.useContext(a);
};
exports.useDebugValue = function() {
};
exports.useDeferredValue = function(a) {
return U.current.useDeferredValue(a);
};
exports.useEffect = function(a, b) {
return U.current.useEffect(a, b);
};
exports.useId = function() {
return U.current.useId();
};
exports.useImperativeHandle = function(a, b, e) {
return U.current.useImperativeHandle(a, b, e);
};
exports.useInsertionEffect = function(a, b) {
return U.current.useInsertionEffect(a, b);
};
exports.useLayoutEffect = function(a, b) {
return U.current.useLayoutEffect(a, b);
};
exports.useMemo = function(a, b) {
return U.current.useMemo(a, b);
};
exports.useReducer = function(a, b, e) {
return U.current.useReducer(a, b, e);
};
exports.useRef = function(a) {
return U.current.useRef(a);
};
exports.useState = function(a) {
return U.current.useState(a);
};
exports.useSyncExternalStore = function(a, b, e) {
return U.current.useSyncExternalStore(a, b, e);
};
exports.useTransition = function() {
return U.current.useTransition();
};
exports.version = "18.3.1";
}
});
// node_modules/react/index.js
var require_react = __commonJS({
"node_modules/react/index.js"(exports, module) {
if (true) {
module.exports = require_react_production_min();
} else {
module.exports = null;
}
}
});
export {
require_react
};
/*! Bundled license information:
react/cjs/react.production.min.js:
(**
* @license React
* react.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
*/
//# sourceMappingURL=chunk-FXM4O2JN.js.map

File diff suppressed because one or more lines are too long

6935
frontend/node_modules/.vite/deps/chunk-WJR2STNY.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +1,7 @@
import {
require_react_dom
} from "./chunk-BVI7NZOO.js";
import "./chunk-4HAMFFQC.js";
} from "./chunk-WJR2STNY.js";
import "./chunk-FXM4O2JN.js";
import "./chunk-EQCVQC35.js";
export default require_react_dom();
//# sourceMappingURL=react-dom.js.map

View File

@@ -1,7 +1,7 @@
import {
require_react_dom
} from "./chunk-BVI7NZOO.js";
import "./chunk-4HAMFFQC.js";
} from "./chunk-WJR2STNY.js";
import "./chunk-FXM4O2JN.js";
import {
__commonJS
} from "./chunk-EQCVQC35.js";
@@ -10,7 +10,7 @@ import {
var require_client = __commonJS({
"node_modules/react-dom/client.js"(exports) {
var m = require_react_dom();
if (false) {
if (true) {
exports.createRoot = m.createRoot;
exports.hydrateRoot = m.hydrateRoot;
} else {

View File

@@ -2,6 +2,6 @@
"version": 3,
"sources": ["../../react-dom/client.js"],
"sourcesContent": ["'use strict';\n\nvar m = require('react-dom');\nif (process.env.NODE_ENV === 'production') {\n exports.createRoot = m.createRoot;\n exports.hydrateRoot = m.hydrateRoot;\n} else {\n var i = m.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;\n exports.createRoot = function(c, o) {\n i.usingClientEntryPoint = true;\n try {\n return m.createRoot(c, o);\n } finally {\n i.usingClientEntryPoint = false;\n }\n };\n exports.hydrateRoot = function(c, h, o) {\n i.usingClientEntryPoint = true;\n try {\n return m.hydrateRoot(c, h, o);\n } finally {\n i.usingClientEntryPoint = false;\n }\n };\n}\n"],
"mappings": ";;;;;;;;;AAAA;AAAA;AAEA,QAAI,IAAI;AACR,QAAI,OAAuC;AACzC,cAAQ,aAAa,EAAE;AACvB,cAAQ,cAAc,EAAE;AAAA,IAC1B,OAAO;AACD,UAAI,EAAE;AACV,cAAQ,aAAa,SAAS,GAAG,GAAG;AAClC,UAAE,wBAAwB;AAC1B,YAAI;AACF,iBAAO,EAAE,WAAW,GAAG,CAAC;AAAA,QAC1B,UAAE;AACA,YAAE,wBAAwB;AAAA,QAC5B;AAAA,MACF;AACA,cAAQ,cAAc,SAAS,GAAG,GAAG,GAAG;AACtC,UAAE,wBAAwB;AAC1B,YAAI;AACF,iBAAO,EAAE,YAAY,GAAG,GAAG,CAAC;AAAA,QAC9B,UAAE;AACA,YAAE,wBAAwB;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AAjBM;AAAA;AAAA;",
"mappings": ";;;;;;;;;AAAA;AAAA;AAEA,QAAI,IAAI;AACR,QAAI,MAAuC;AACzC,cAAQ,aAAa,EAAE;AACvB,cAAQ,cAAc,EAAE;AAAA,IAC1B,OAAO;AACD,UAAI,EAAE;AACV,cAAQ,aAAa,SAAS,GAAG,GAAG;AAClC,UAAE,wBAAwB;AAC1B,YAAI;AACF,iBAAO,EAAE,WAAW,GAAG,CAAC;AAAA,QAC1B,UAAE;AACA,YAAE,wBAAwB;AAAA,QAC5B;AAAA,MACF;AACA,cAAQ,cAAc,SAAS,GAAG,GAAG,GAAG;AACtC,UAAE,wBAAwB;AAC1B,YAAI;AACF,iBAAO,EAAE,YAAY,GAAG,GAAG,CAAC;AAAA,QAC9B,UAAE;AACA,YAAE,wBAAwB;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AAjBM;AAAA;AAAA;",
"names": []
}

View File

@@ -1,6 +1,6 @@
import {
require_react
} from "./chunk-4HAMFFQC.js";
} from "./chunk-FXM4O2JN.js";
import {
__commonJS,
__publicField,

View File

@@ -1,6 +1,6 @@
import {
require_react
} from "./chunk-4HAMFFQC.js";
} from "./chunk-FXM4O2JN.js";
import "./chunk-EQCVQC35.js";
export default require_react();
//# sourceMappingURL=react.js.map

View File

@@ -1,908 +1,34 @@
import {
require_react
} from "./chunk-4HAMFFQC.js";
import {
__commonJS
} from "./chunk-EQCVQC35.js";
// node_modules/react/cjs/react-jsx-dev-runtime.development.js
var require_react_jsx_dev_runtime_development = __commonJS({
"node_modules/react/cjs/react-jsx-dev-runtime.development.js"(exports) {
// node_modules/react/cjs/react-jsx-dev-runtime.production.min.js
var require_react_jsx_dev_runtime_production_min = __commonJS({
"node_modules/react/cjs/react-jsx-dev-runtime.production.min.js"(exports) {
"use strict";
if (true) {
(function() {
"use strict";
var React = require_react();
var REACT_ELEMENT_TYPE = Symbol.for("react.element");
var REACT_PORTAL_TYPE = Symbol.for("react.portal");
var REACT_FRAGMENT_TYPE = Symbol.for("react.fragment");
var REACT_STRICT_MODE_TYPE = Symbol.for("react.strict_mode");
var REACT_PROFILER_TYPE = Symbol.for("react.profiler");
var REACT_PROVIDER_TYPE = Symbol.for("react.provider");
var REACT_CONTEXT_TYPE = Symbol.for("react.context");
var REACT_FORWARD_REF_TYPE = Symbol.for("react.forward_ref");
var REACT_SUSPENSE_TYPE = Symbol.for("react.suspense");
var REACT_SUSPENSE_LIST_TYPE = Symbol.for("react.suspense_list");
var REACT_MEMO_TYPE = Symbol.for("react.memo");
var REACT_LAZY_TYPE = Symbol.for("react.lazy");
var REACT_OFFSCREEN_TYPE = Symbol.for("react.offscreen");
var MAYBE_ITERATOR_SYMBOL = Symbol.iterator;
var FAUX_ITERATOR_SYMBOL = "@@iterator";
function getIteratorFn(maybeIterable) {
if (maybeIterable === null || typeof maybeIterable !== "object") {
return null;
}
var maybeIterator = MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL];
if (typeof maybeIterator === "function") {
return maybeIterator;
}
return null;
}
var ReactSharedInternals = React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;
function error(format) {
{
{
for (var _len2 = arguments.length, args = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
args[_key2 - 1] = arguments[_key2];
}
printWarning("error", format, args);
}
}
}
function printWarning(level, format, args) {
{
var ReactDebugCurrentFrame2 = ReactSharedInternals.ReactDebugCurrentFrame;
var stack = ReactDebugCurrentFrame2.getStackAddendum();
if (stack !== "") {
format += "%s";
args = args.concat([stack]);
}
var argsWithFormat = args.map(function(item) {
return String(item);
});
argsWithFormat.unshift("Warning: " + format);
Function.prototype.apply.call(console[level], console, argsWithFormat);
}
}
var enableScopeAPI = false;
var enableCacheElement = false;
var enableTransitionTracing = false;
var enableLegacyHidden = false;
var enableDebugTracing = false;
var REACT_MODULE_REFERENCE;
{
REACT_MODULE_REFERENCE = Symbol.for("react.module.reference");
}
function isValidElementType(type) {
if (typeof type === "string" || typeof type === "function") {
return true;
}
if (type === REACT_FRAGMENT_TYPE || type === REACT_PROFILER_TYPE || enableDebugTracing || type === REACT_STRICT_MODE_TYPE || type === REACT_SUSPENSE_TYPE || type === REACT_SUSPENSE_LIST_TYPE || enableLegacyHidden || type === REACT_OFFSCREEN_TYPE || enableScopeAPI || enableCacheElement || enableTransitionTracing) {
return true;
}
if (typeof type === "object" && type !== null) {
if (type.$$typeof === REACT_LAZY_TYPE || type.$$typeof === REACT_MEMO_TYPE || type.$$typeof === REACT_PROVIDER_TYPE || type.$$typeof === REACT_CONTEXT_TYPE || type.$$typeof === REACT_FORWARD_REF_TYPE || // This needs to include all possible module reference object
// types supported by any Flight configuration anywhere since
// we don't know which Flight build this will end up being used
// with.
type.$$typeof === REACT_MODULE_REFERENCE || type.getModuleId !== void 0) {
return true;
}
}
return false;
}
function getWrappedName(outerType, innerType, wrapperName) {
var displayName = outerType.displayName;
if (displayName) {
return displayName;
}
var functionName = innerType.displayName || innerType.name || "";
return functionName !== "" ? wrapperName + "(" + functionName + ")" : wrapperName;
}
function getContextName(type) {
return type.displayName || "Context";
}
function getComponentNameFromType(type) {
if (type == null) {
return null;
}
{
if (typeof type.tag === "number") {
error("Received an unexpected object in getComponentNameFromType(). This is likely a bug in React. Please file an issue.");
}
}
if (typeof type === "function") {
return type.displayName || type.name || null;
}
if (typeof type === "string") {
return type;
}
switch (type) {
case REACT_FRAGMENT_TYPE:
return "Fragment";
case REACT_PORTAL_TYPE:
return "Portal";
case REACT_PROFILER_TYPE:
return "Profiler";
case REACT_STRICT_MODE_TYPE:
return "StrictMode";
case REACT_SUSPENSE_TYPE:
return "Suspense";
case REACT_SUSPENSE_LIST_TYPE:
return "SuspenseList";
}
if (typeof type === "object") {
switch (type.$$typeof) {
case REACT_CONTEXT_TYPE:
var context = type;
return getContextName(context) + ".Consumer";
case REACT_PROVIDER_TYPE:
var provider = type;
return getContextName(provider._context) + ".Provider";
case REACT_FORWARD_REF_TYPE:
return getWrappedName(type, type.render, "ForwardRef");
case REACT_MEMO_TYPE:
var outerName = type.displayName || null;
if (outerName !== null) {
return outerName;
}
return getComponentNameFromType(type.type) || "Memo";
case REACT_LAZY_TYPE: {
var lazyComponent = type;
var payload = lazyComponent._payload;
var init = lazyComponent._init;
try {
return getComponentNameFromType(init(payload));
} catch (x) {
return null;
}
}
}
}
return null;
}
var assign = Object.assign;
var disabledDepth = 0;
var prevLog;
var prevInfo;
var prevWarn;
var prevError;
var prevGroup;
var prevGroupCollapsed;
var prevGroupEnd;
function disabledLog() {
}
disabledLog.__reactDisabledLog = true;
function disableLogs() {
{
if (disabledDepth === 0) {
prevLog = console.log;
prevInfo = console.info;
prevWarn = console.warn;
prevError = console.error;
prevGroup = console.group;
prevGroupCollapsed = console.groupCollapsed;
prevGroupEnd = console.groupEnd;
var props = {
configurable: true,
enumerable: true,
value: disabledLog,
writable: true
};
Object.defineProperties(console, {
info: props,
log: props,
warn: props,
error: props,
group: props,
groupCollapsed: props,
groupEnd: props
});
}
disabledDepth++;
}
}
function reenableLogs() {
{
disabledDepth--;
if (disabledDepth === 0) {
var props = {
configurable: true,
enumerable: true,
writable: true
};
Object.defineProperties(console, {
log: assign({}, props, {
value: prevLog
}),
info: assign({}, props, {
value: prevInfo
}),
warn: assign({}, props, {
value: prevWarn
}),
error: assign({}, props, {
value: prevError
}),
group: assign({}, props, {
value: prevGroup
}),
groupCollapsed: assign({}, props, {
value: prevGroupCollapsed
}),
groupEnd: assign({}, props, {
value: prevGroupEnd
})
});
}
if (disabledDepth < 0) {
error("disabledDepth fell below zero. This is a bug in React. Please file an issue.");
}
}
}
var ReactCurrentDispatcher = ReactSharedInternals.ReactCurrentDispatcher;
var prefix;
function describeBuiltInComponentFrame(name, source, ownerFn) {
{
if (prefix === void 0) {
try {
throw Error();
} catch (x) {
var match = x.stack.trim().match(/\n( *(at )?)/);
prefix = match && match[1] || "";
}
}
return "\n" + prefix + name;
}
}
var reentry = false;
var componentFrameCache;
{
var PossiblyWeakMap = typeof WeakMap === "function" ? WeakMap : Map;
componentFrameCache = new PossiblyWeakMap();
}
function describeNativeComponentFrame(fn, construct) {
if (!fn || reentry) {
return "";
}
{
var frame = componentFrameCache.get(fn);
if (frame !== void 0) {
return frame;
}
}
var control;
reentry = true;
var previousPrepareStackTrace = Error.prepareStackTrace;
Error.prepareStackTrace = void 0;
var previousDispatcher;
{
previousDispatcher = ReactCurrentDispatcher.current;
ReactCurrentDispatcher.current = null;
disableLogs();
}
try {
if (construct) {
var Fake = function() {
throw Error();
};
Object.defineProperty(Fake.prototype, "props", {
set: function() {
throw Error();
}
});
if (typeof Reflect === "object" && Reflect.construct) {
try {
Reflect.construct(Fake, []);
} catch (x) {
control = x;
}
Reflect.construct(fn, [], Fake);
} else {
try {
Fake.call();
} catch (x) {
control = x;
}
fn.call(Fake.prototype);
}
} else {
try {
throw Error();
} catch (x) {
control = x;
}
fn();
}
} catch (sample) {
if (sample && control && typeof sample.stack === "string") {
var sampleLines = sample.stack.split("\n");
var controlLines = control.stack.split("\n");
var s = sampleLines.length - 1;
var c = controlLines.length - 1;
while (s >= 1 && c >= 0 && sampleLines[s] !== controlLines[c]) {
c--;
}
for (; s >= 1 && c >= 0; s--, c--) {
if (sampleLines[s] !== controlLines[c]) {
if (s !== 1 || c !== 1) {
do {
s--;
c--;
if (c < 0 || sampleLines[s] !== controlLines[c]) {
var _frame = "\n" + sampleLines[s].replace(" at new ", " at ");
if (fn.displayName && _frame.includes("<anonymous>")) {
_frame = _frame.replace("<anonymous>", fn.displayName);
}
{
if (typeof fn === "function") {
componentFrameCache.set(fn, _frame);
}
}
return _frame;
}
} while (s >= 1 && c >= 0);
}
break;
}
}
}
} finally {
reentry = false;
{
ReactCurrentDispatcher.current = previousDispatcher;
reenableLogs();
}
Error.prepareStackTrace = previousPrepareStackTrace;
}
var name = fn ? fn.displayName || fn.name : "";
var syntheticFrame = name ? describeBuiltInComponentFrame(name) : "";
{
if (typeof fn === "function") {
componentFrameCache.set(fn, syntheticFrame);
}
}
return syntheticFrame;
}
function describeFunctionComponentFrame(fn, source, ownerFn) {
{
return describeNativeComponentFrame(fn, false);
}
}
function shouldConstruct(Component) {
var prototype = Component.prototype;
return !!(prototype && prototype.isReactComponent);
}
function describeUnknownElementTypeFrameInDEV(type, source, ownerFn) {
if (type == null) {
return "";
}
if (typeof type === "function") {
{
return describeNativeComponentFrame(type, shouldConstruct(type));
}
}
if (typeof type === "string") {
return describeBuiltInComponentFrame(type);
}
switch (type) {
case REACT_SUSPENSE_TYPE:
return describeBuiltInComponentFrame("Suspense");
case REACT_SUSPENSE_LIST_TYPE:
return describeBuiltInComponentFrame("SuspenseList");
}
if (typeof type === "object") {
switch (type.$$typeof) {
case REACT_FORWARD_REF_TYPE:
return describeFunctionComponentFrame(type.render);
case REACT_MEMO_TYPE:
return describeUnknownElementTypeFrameInDEV(type.type, source, ownerFn);
case REACT_LAZY_TYPE: {
var lazyComponent = type;
var payload = lazyComponent._payload;
var init = lazyComponent._init;
try {
return describeUnknownElementTypeFrameInDEV(init(payload), source, ownerFn);
} catch (x) {
}
}
}
}
return "";
}
var hasOwnProperty = Object.prototype.hasOwnProperty;
var loggedTypeFailures = {};
var ReactDebugCurrentFrame = ReactSharedInternals.ReactDebugCurrentFrame;
function setCurrentlyValidatingElement(element) {
{
if (element) {
var owner = element._owner;
var stack = describeUnknownElementTypeFrameInDEV(element.type, element._source, owner ? owner.type : null);
ReactDebugCurrentFrame.setExtraStackFrame(stack);
} else {
ReactDebugCurrentFrame.setExtraStackFrame(null);
}
}
}
function checkPropTypes(typeSpecs, values, location, componentName, element) {
{
var has = Function.call.bind(hasOwnProperty);
for (var typeSpecName in typeSpecs) {
if (has(typeSpecs, typeSpecName)) {
var error$1 = void 0;
try {
if (typeof typeSpecs[typeSpecName] !== "function") {
var err = Error((componentName || "React class") + ": " + location + " type `" + typeSpecName + "` is invalid; it must be a function, usually from the `prop-types` package, but received `" + typeof typeSpecs[typeSpecName] + "`.This often happens because of typos such as `PropTypes.function` instead of `PropTypes.func`.");
err.name = "Invariant Violation";
throw err;
}
error$1 = typeSpecs[typeSpecName](values, typeSpecName, componentName, location, null, "SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED");
} catch (ex) {
error$1 = ex;
}
if (error$1 && !(error$1 instanceof Error)) {
setCurrentlyValidatingElement(element);
error("%s: type specification of %s `%s` is invalid; the type checker function must return `null` or an `Error` but returned a %s. You may have forgotten to pass an argument to the type checker creator (arrayOf, instanceOf, objectOf, oneOf, oneOfType, and shape all require an argument).", componentName || "React class", location, typeSpecName, typeof error$1);
setCurrentlyValidatingElement(null);
}
if (error$1 instanceof Error && !(error$1.message in loggedTypeFailures)) {
loggedTypeFailures[error$1.message] = true;
setCurrentlyValidatingElement(element);
error("Failed %s type: %s", location, error$1.message);
setCurrentlyValidatingElement(null);
}
}
}
}
}
var isArrayImpl = Array.isArray;
function isArray(a) {
return isArrayImpl(a);
}
function typeName(value) {
{
var hasToStringTag = typeof Symbol === "function" && Symbol.toStringTag;
var type = hasToStringTag && value[Symbol.toStringTag] || value.constructor.name || "Object";
return type;
}
}
function willCoercionThrow(value) {
{
try {
testStringCoercion(value);
return false;
} catch (e) {
return true;
}
}
}
function testStringCoercion(value) {
return "" + value;
}
function checkKeyStringCoercion(value) {
{
if (willCoercionThrow(value)) {
error("The provided key is an unsupported type %s. This value must be coerced to a string before before using it here.", typeName(value));
return testStringCoercion(value);
}
}
}
var ReactCurrentOwner = ReactSharedInternals.ReactCurrentOwner;
var RESERVED_PROPS = {
key: true,
ref: true,
__self: true,
__source: true
};
var specialPropKeyWarningShown;
var specialPropRefWarningShown;
var didWarnAboutStringRefs;
{
didWarnAboutStringRefs = {};
}
function hasValidRef(config) {
{
if (hasOwnProperty.call(config, "ref")) {
var getter = Object.getOwnPropertyDescriptor(config, "ref").get;
if (getter && getter.isReactWarning) {
return false;
}
}
}
return config.ref !== void 0;
}
function hasValidKey(config) {
{
if (hasOwnProperty.call(config, "key")) {
var getter = Object.getOwnPropertyDescriptor(config, "key").get;
if (getter && getter.isReactWarning) {
return false;
}
}
}
return config.key !== void 0;
}
function warnIfStringRefCannotBeAutoConverted(config, self) {
{
if (typeof config.ref === "string" && ReactCurrentOwner.current && self && ReactCurrentOwner.current.stateNode !== self) {
var componentName = getComponentNameFromType(ReactCurrentOwner.current.type);
if (!didWarnAboutStringRefs[componentName]) {
error('Component "%s" contains the string ref "%s". Support for string refs will be removed in a future major release. This case cannot be automatically converted to an arrow function. We ask you to manually fix this case by using useRef() or createRef() instead. Learn more about using refs safely here: https://reactjs.org/link/strict-mode-string-ref', getComponentNameFromType(ReactCurrentOwner.current.type), config.ref);
didWarnAboutStringRefs[componentName] = true;
}
}
}
}
function defineKeyPropWarningGetter(props, displayName) {
{
var warnAboutAccessingKey = function() {
if (!specialPropKeyWarningShown) {
specialPropKeyWarningShown = true;
error("%s: `key` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://reactjs.org/link/special-props)", displayName);
}
};
warnAboutAccessingKey.isReactWarning = true;
Object.defineProperty(props, "key", {
get: warnAboutAccessingKey,
configurable: true
});
}
}
function defineRefPropWarningGetter(props, displayName) {
{
var warnAboutAccessingRef = function() {
if (!specialPropRefWarningShown) {
specialPropRefWarningShown = true;
error("%s: `ref` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://reactjs.org/link/special-props)", displayName);
}
};
warnAboutAccessingRef.isReactWarning = true;
Object.defineProperty(props, "ref", {
get: warnAboutAccessingRef,
configurable: true
});
}
}
var ReactElement = function(type, key, ref, self, source, owner, props) {
var element = {
// This tag allows us to uniquely identify this as a React Element
$$typeof: REACT_ELEMENT_TYPE,
// Built-in properties that belong on the element
type,
key,
ref,
props,
// Record the component responsible for creating this element.
_owner: owner
};
{
element._store = {};
Object.defineProperty(element._store, "validated", {
configurable: false,
enumerable: false,
writable: true,
value: false
});
Object.defineProperty(element, "_self", {
configurable: false,
enumerable: false,
writable: false,
value: self
});
Object.defineProperty(element, "_source", {
configurable: false,
enumerable: false,
writable: false,
value: source
});
if (Object.freeze) {
Object.freeze(element.props);
Object.freeze(element);
}
}
return element;
};
function jsxDEV(type, config, maybeKey, source, self) {
{
var propName;
var props = {};
var key = null;
var ref = null;
if (maybeKey !== void 0) {
{
checkKeyStringCoercion(maybeKey);
}
key = "" + maybeKey;
}
if (hasValidKey(config)) {
{
checkKeyStringCoercion(config.key);
}
key = "" + config.key;
}
if (hasValidRef(config)) {
ref = config.ref;
warnIfStringRefCannotBeAutoConverted(config, self);
}
for (propName in config) {
if (hasOwnProperty.call(config, propName) && !RESERVED_PROPS.hasOwnProperty(propName)) {
props[propName] = config[propName];
}
}
if (type && type.defaultProps) {
var defaultProps = type.defaultProps;
for (propName in defaultProps) {
if (props[propName] === void 0) {
props[propName] = defaultProps[propName];
}
}
}
if (key || ref) {
var displayName = typeof type === "function" ? type.displayName || type.name || "Unknown" : type;
if (key) {
defineKeyPropWarningGetter(props, displayName);
}
if (ref) {
defineRefPropWarningGetter(props, displayName);
}
}
return ReactElement(type, key, ref, self, source, ReactCurrentOwner.current, props);
}
}
var ReactCurrentOwner$1 = ReactSharedInternals.ReactCurrentOwner;
var ReactDebugCurrentFrame$1 = ReactSharedInternals.ReactDebugCurrentFrame;
function setCurrentlyValidatingElement$1(element) {
{
if (element) {
var owner = element._owner;
var stack = describeUnknownElementTypeFrameInDEV(element.type, element._source, owner ? owner.type : null);
ReactDebugCurrentFrame$1.setExtraStackFrame(stack);
} else {
ReactDebugCurrentFrame$1.setExtraStackFrame(null);
}
}
}
var propTypesMisspellWarningShown;
{
propTypesMisspellWarningShown = false;
}
function isValidElement(object) {
{
return typeof object === "object" && object !== null && object.$$typeof === REACT_ELEMENT_TYPE;
}
}
function getDeclarationErrorAddendum() {
{
if (ReactCurrentOwner$1.current) {
var name = getComponentNameFromType(ReactCurrentOwner$1.current.type);
if (name) {
return "\n\nCheck the render method of `" + name + "`.";
}
}
return "";
}
}
function getSourceInfoErrorAddendum(source) {
{
if (source !== void 0) {
var fileName = source.fileName.replace(/^.*[\\\/]/, "");
var lineNumber = source.lineNumber;
return "\n\nCheck your code at " + fileName + ":" + lineNumber + ".";
}
return "";
}
}
var ownerHasKeyUseWarning = {};
function getCurrentComponentErrorInfo(parentType) {
{
var info = getDeclarationErrorAddendum();
if (!info) {
var parentName = typeof parentType === "string" ? parentType : parentType.displayName || parentType.name;
if (parentName) {
info = "\n\nCheck the top-level render call using <" + parentName + ">.";
}
}
return info;
}
}
function validateExplicitKey(element, parentType) {
{
if (!element._store || element._store.validated || element.key != null) {
return;
}
element._store.validated = true;
var currentComponentErrorInfo = getCurrentComponentErrorInfo(parentType);
if (ownerHasKeyUseWarning[currentComponentErrorInfo]) {
return;
}
ownerHasKeyUseWarning[currentComponentErrorInfo] = true;
var childOwner = "";
if (element && element._owner && element._owner !== ReactCurrentOwner$1.current) {
childOwner = " It was passed a child from " + getComponentNameFromType(element._owner.type) + ".";
}
setCurrentlyValidatingElement$1(element);
error('Each child in a list should have a unique "key" prop.%s%s See https://reactjs.org/link/warning-keys for more information.', currentComponentErrorInfo, childOwner);
setCurrentlyValidatingElement$1(null);
}
}
function validateChildKeys(node, parentType) {
{
if (typeof node !== "object") {
return;
}
if (isArray(node)) {
for (var i = 0; i < node.length; i++) {
var child = node[i];
if (isValidElement(child)) {
validateExplicitKey(child, parentType);
}
}
} else if (isValidElement(node)) {
if (node._store) {
node._store.validated = true;
}
} else if (node) {
var iteratorFn = getIteratorFn(node);
if (typeof iteratorFn === "function") {
if (iteratorFn !== node.entries) {
var iterator = iteratorFn.call(node);
var step;
while (!(step = iterator.next()).done) {
if (isValidElement(step.value)) {
validateExplicitKey(step.value, parentType);
}
}
}
}
}
}
}
function validatePropTypes(element) {
{
var type = element.type;
if (type === null || type === void 0 || typeof type === "string") {
return;
}
var propTypes;
if (typeof type === "function") {
propTypes = type.propTypes;
} else if (typeof type === "object" && (type.$$typeof === REACT_FORWARD_REF_TYPE || // Note: Memo only checks outer props here.
// Inner props are checked in the reconciler.
type.$$typeof === REACT_MEMO_TYPE)) {
propTypes = type.propTypes;
} else {
return;
}
if (propTypes) {
var name = getComponentNameFromType(type);
checkPropTypes(propTypes, element.props, "prop", name, element);
} else if (type.PropTypes !== void 0 && !propTypesMisspellWarningShown) {
propTypesMisspellWarningShown = true;
var _name = getComponentNameFromType(type);
error("Component %s declared `PropTypes` instead of `propTypes`. Did you misspell the property assignment?", _name || "Unknown");
}
if (typeof type.getDefaultProps === "function" && !type.getDefaultProps.isReactClassApproved) {
error("getDefaultProps is only used on classic React.createClass definitions. Use a static property named `defaultProps` instead.");
}
}
}
function validateFragmentProps(fragment) {
{
var keys = Object.keys(fragment.props);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (key !== "children" && key !== "key") {
setCurrentlyValidatingElement$1(fragment);
error("Invalid prop `%s` supplied to `React.Fragment`. React.Fragment can only have `key` and `children` props.", key);
setCurrentlyValidatingElement$1(null);
break;
}
}
if (fragment.ref !== null) {
setCurrentlyValidatingElement$1(fragment);
error("Invalid attribute `ref` supplied to `React.Fragment`.");
setCurrentlyValidatingElement$1(null);
}
}
}
var didWarnAboutKeySpread = {};
function jsxWithValidation(type, props, key, isStaticChildren, source, self) {
{
var validType = isValidElementType(type);
if (!validType) {
var info = "";
if (type === void 0 || typeof type === "object" && type !== null && Object.keys(type).length === 0) {
info += " You likely forgot to export your component from the file it's defined in, or you might have mixed up default and named imports.";
}
var sourceInfo = getSourceInfoErrorAddendum(source);
if (sourceInfo) {
info += sourceInfo;
} else {
info += getDeclarationErrorAddendum();
}
var typeString;
if (type === null) {
typeString = "null";
} else if (isArray(type)) {
typeString = "array";
} else if (type !== void 0 && type.$$typeof === REACT_ELEMENT_TYPE) {
typeString = "<" + (getComponentNameFromType(type.type) || "Unknown") + " />";
info = " Did you accidentally export a JSX literal instead of a component?";
} else {
typeString = typeof type;
}
error("React.jsx: type is invalid -- expected a string (for built-in components) or a class/function (for composite components) but got: %s.%s", typeString, info);
}
var element = jsxDEV(type, props, key, source, self);
if (element == null) {
return element;
}
if (validType) {
var children = props.children;
if (children !== void 0) {
if (isStaticChildren) {
if (isArray(children)) {
for (var i = 0; i < children.length; i++) {
validateChildKeys(children[i], type);
}
if (Object.freeze) {
Object.freeze(children);
}
} else {
error("React.jsx: Static children should always be an array. You are likely explicitly calling React.jsxs or React.jsxDEV. Use the Babel transform instead.");
}
} else {
validateChildKeys(children, type);
}
}
}
{
if (hasOwnProperty.call(props, "key")) {
var componentName = getComponentNameFromType(type);
var keys = Object.keys(props).filter(function(k) {
return k !== "key";
});
var beforeExample = keys.length > 0 ? "{key: someKey, " + keys.join(": ..., ") + ": ...}" : "{key: someKey}";
if (!didWarnAboutKeySpread[componentName + beforeExample]) {
var afterExample = keys.length > 0 ? "{" + keys.join(": ..., ") + ": ...}" : "{}";
error('A props object containing a "key" prop is being spread into JSX:\n let props = %s;\n <%s {...props} />\nReact keys must be passed directly to JSX without using spread:\n let props = %s;\n <%s key={someKey} {...props} />', beforeExample, componentName, afterExample, componentName);
didWarnAboutKeySpread[componentName + beforeExample] = true;
}
}
}
if (type === REACT_FRAGMENT_TYPE) {
validateFragmentProps(element);
} else {
validatePropTypes(element);
}
return element;
}
}
var jsxDEV$1 = jsxWithValidation;
exports.Fragment = REACT_FRAGMENT_TYPE;
exports.jsxDEV = jsxDEV$1;
})();
}
var a = Symbol.for("react.fragment");
exports.Fragment = a;
exports.jsxDEV = void 0;
}
});
// node_modules/react/jsx-dev-runtime.js
var require_jsx_dev_runtime = __commonJS({
"node_modules/react/jsx-dev-runtime.js"(exports, module) {
if (false) {
module.exports = null;
if (true) {
module.exports = require_react_jsx_dev_runtime_production_min();
} else {
module.exports = require_react_jsx_dev_runtime_development();
module.exports = null;
}
}
});
export default require_jsx_dev_runtime();
/*! Bundled license information:
react/cjs/react-jsx-dev-runtime.development.js:
react/cjs/react-jsx-dev-runtime.production.min.js:
(**
* @license React
* react-jsx-dev-runtime.development.js
* react-jsx-dev-runtime.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*

File diff suppressed because one or more lines are too long

View File

@@ -1,920 +1,52 @@
import {
require_react
} from "./chunk-4HAMFFQC.js";
} from "./chunk-FXM4O2JN.js";
import {
__commonJS
} from "./chunk-EQCVQC35.js";
// node_modules/react/cjs/react-jsx-runtime.development.js
var require_react_jsx_runtime_development = __commonJS({
"node_modules/react/cjs/react-jsx-runtime.development.js"(exports) {
// node_modules/react/cjs/react-jsx-runtime.production.min.js
var require_react_jsx_runtime_production_min = __commonJS({
"node_modules/react/cjs/react-jsx-runtime.production.min.js"(exports) {
"use strict";
if (true) {
(function() {
"use strict";
var React = require_react();
var REACT_ELEMENT_TYPE = Symbol.for("react.element");
var REACT_PORTAL_TYPE = Symbol.for("react.portal");
var REACT_FRAGMENT_TYPE = Symbol.for("react.fragment");
var REACT_STRICT_MODE_TYPE = Symbol.for("react.strict_mode");
var REACT_PROFILER_TYPE = Symbol.for("react.profiler");
var REACT_PROVIDER_TYPE = Symbol.for("react.provider");
var REACT_CONTEXT_TYPE = Symbol.for("react.context");
var REACT_FORWARD_REF_TYPE = Symbol.for("react.forward_ref");
var REACT_SUSPENSE_TYPE = Symbol.for("react.suspense");
var REACT_SUSPENSE_LIST_TYPE = Symbol.for("react.suspense_list");
var REACT_MEMO_TYPE = Symbol.for("react.memo");
var REACT_LAZY_TYPE = Symbol.for("react.lazy");
var REACT_OFFSCREEN_TYPE = Symbol.for("react.offscreen");
var MAYBE_ITERATOR_SYMBOL = Symbol.iterator;
var FAUX_ITERATOR_SYMBOL = "@@iterator";
function getIteratorFn(maybeIterable) {
if (maybeIterable === null || typeof maybeIterable !== "object") {
return null;
}
var maybeIterator = MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL];
if (typeof maybeIterator === "function") {
return maybeIterator;
}
return null;
}
var ReactSharedInternals = React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;
function error(format) {
{
{
for (var _len2 = arguments.length, args = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
args[_key2 - 1] = arguments[_key2];
}
printWarning("error", format, args);
}
}
}
function printWarning(level, format, args) {
{
var ReactDebugCurrentFrame2 = ReactSharedInternals.ReactDebugCurrentFrame;
var stack = ReactDebugCurrentFrame2.getStackAddendum();
if (stack !== "") {
format += "%s";
args = args.concat([stack]);
}
var argsWithFormat = args.map(function(item) {
return String(item);
});
argsWithFormat.unshift("Warning: " + format);
Function.prototype.apply.call(console[level], console, argsWithFormat);
}
}
var enableScopeAPI = false;
var enableCacheElement = false;
var enableTransitionTracing = false;
var enableLegacyHidden = false;
var enableDebugTracing = false;
var REACT_MODULE_REFERENCE;
{
REACT_MODULE_REFERENCE = Symbol.for("react.module.reference");
}
function isValidElementType(type) {
if (typeof type === "string" || typeof type === "function") {
return true;
}
if (type === REACT_FRAGMENT_TYPE || type === REACT_PROFILER_TYPE || enableDebugTracing || type === REACT_STRICT_MODE_TYPE || type === REACT_SUSPENSE_TYPE || type === REACT_SUSPENSE_LIST_TYPE || enableLegacyHidden || type === REACT_OFFSCREEN_TYPE || enableScopeAPI || enableCacheElement || enableTransitionTracing) {
return true;
}
if (typeof type === "object" && type !== null) {
if (type.$$typeof === REACT_LAZY_TYPE || type.$$typeof === REACT_MEMO_TYPE || type.$$typeof === REACT_PROVIDER_TYPE || type.$$typeof === REACT_CONTEXT_TYPE || type.$$typeof === REACT_FORWARD_REF_TYPE || // This needs to include all possible module reference object
// types supported by any Flight configuration anywhere since
// we don't know which Flight build this will end up being used
// with.
type.$$typeof === REACT_MODULE_REFERENCE || type.getModuleId !== void 0) {
return true;
}
}
return false;
}
function getWrappedName(outerType, innerType, wrapperName) {
var displayName = outerType.displayName;
if (displayName) {
return displayName;
}
var functionName = innerType.displayName || innerType.name || "";
return functionName !== "" ? wrapperName + "(" + functionName + ")" : wrapperName;
}
function getContextName(type) {
return type.displayName || "Context";
}
function getComponentNameFromType(type) {
if (type == null) {
return null;
}
{
if (typeof type.tag === "number") {
error("Received an unexpected object in getComponentNameFromType(). This is likely a bug in React. Please file an issue.");
}
}
if (typeof type === "function") {
return type.displayName || type.name || null;
}
if (typeof type === "string") {
return type;
}
switch (type) {
case REACT_FRAGMENT_TYPE:
return "Fragment";
case REACT_PORTAL_TYPE:
return "Portal";
case REACT_PROFILER_TYPE:
return "Profiler";
case REACT_STRICT_MODE_TYPE:
return "StrictMode";
case REACT_SUSPENSE_TYPE:
return "Suspense";
case REACT_SUSPENSE_LIST_TYPE:
return "SuspenseList";
}
if (typeof type === "object") {
switch (type.$$typeof) {
case REACT_CONTEXT_TYPE:
var context = type;
return getContextName(context) + ".Consumer";
case REACT_PROVIDER_TYPE:
var provider = type;
return getContextName(provider._context) + ".Provider";
case REACT_FORWARD_REF_TYPE:
return getWrappedName(type, type.render, "ForwardRef");
case REACT_MEMO_TYPE:
var outerName = type.displayName || null;
if (outerName !== null) {
return outerName;
}
return getComponentNameFromType(type.type) || "Memo";
case REACT_LAZY_TYPE: {
var lazyComponent = type;
var payload = lazyComponent._payload;
var init = lazyComponent._init;
try {
return getComponentNameFromType(init(payload));
} catch (x) {
return null;
}
}
}
}
return null;
}
var assign = Object.assign;
var disabledDepth = 0;
var prevLog;
var prevInfo;
var prevWarn;
var prevError;
var prevGroup;
var prevGroupCollapsed;
var prevGroupEnd;
function disabledLog() {
}
disabledLog.__reactDisabledLog = true;
function disableLogs() {
{
if (disabledDepth === 0) {
prevLog = console.log;
prevInfo = console.info;
prevWarn = console.warn;
prevError = console.error;
prevGroup = console.group;
prevGroupCollapsed = console.groupCollapsed;
prevGroupEnd = console.groupEnd;
var props = {
configurable: true,
enumerable: true,
value: disabledLog,
writable: true
};
Object.defineProperties(console, {
info: props,
log: props,
warn: props,
error: props,
group: props,
groupCollapsed: props,
groupEnd: props
});
}
disabledDepth++;
}
}
function reenableLogs() {
{
disabledDepth--;
if (disabledDepth === 0) {
var props = {
configurable: true,
enumerable: true,
writable: true
};
Object.defineProperties(console, {
log: assign({}, props, {
value: prevLog
}),
info: assign({}, props, {
value: prevInfo
}),
warn: assign({}, props, {
value: prevWarn
}),
error: assign({}, props, {
value: prevError
}),
group: assign({}, props, {
value: prevGroup
}),
groupCollapsed: assign({}, props, {
value: prevGroupCollapsed
}),
groupEnd: assign({}, props, {
value: prevGroupEnd
})
});
}
if (disabledDepth < 0) {
error("disabledDepth fell below zero. This is a bug in React. Please file an issue.");
}
}
}
var ReactCurrentDispatcher = ReactSharedInternals.ReactCurrentDispatcher;
var prefix;
function describeBuiltInComponentFrame(name, source, ownerFn) {
{
if (prefix === void 0) {
try {
throw Error();
} catch (x) {
var match = x.stack.trim().match(/\n( *(at )?)/);
prefix = match && match[1] || "";
}
}
return "\n" + prefix + name;
}
}
var reentry = false;
var componentFrameCache;
{
var PossiblyWeakMap = typeof WeakMap === "function" ? WeakMap : Map;
componentFrameCache = new PossiblyWeakMap();
}
function describeNativeComponentFrame(fn, construct) {
if (!fn || reentry) {
return "";
}
{
var frame = componentFrameCache.get(fn);
if (frame !== void 0) {
return frame;
}
}
var control;
reentry = true;
var previousPrepareStackTrace = Error.prepareStackTrace;
Error.prepareStackTrace = void 0;
var previousDispatcher;
{
previousDispatcher = ReactCurrentDispatcher.current;
ReactCurrentDispatcher.current = null;
disableLogs();
}
try {
if (construct) {
var Fake = function() {
throw Error();
};
Object.defineProperty(Fake.prototype, "props", {
set: function() {
throw Error();
}
});
if (typeof Reflect === "object" && Reflect.construct) {
try {
Reflect.construct(Fake, []);
} catch (x) {
control = x;
}
Reflect.construct(fn, [], Fake);
} else {
try {
Fake.call();
} catch (x) {
control = x;
}
fn.call(Fake.prototype);
}
} else {
try {
throw Error();
} catch (x) {
control = x;
}
fn();
}
} catch (sample) {
if (sample && control && typeof sample.stack === "string") {
var sampleLines = sample.stack.split("\n");
var controlLines = control.stack.split("\n");
var s = sampleLines.length - 1;
var c = controlLines.length - 1;
while (s >= 1 && c >= 0 && sampleLines[s] !== controlLines[c]) {
c--;
}
for (; s >= 1 && c >= 0; s--, c--) {
if (sampleLines[s] !== controlLines[c]) {
if (s !== 1 || c !== 1) {
do {
s--;
c--;
if (c < 0 || sampleLines[s] !== controlLines[c]) {
var _frame = "\n" + sampleLines[s].replace(" at new ", " at ");
if (fn.displayName && _frame.includes("<anonymous>")) {
_frame = _frame.replace("<anonymous>", fn.displayName);
}
{
if (typeof fn === "function") {
componentFrameCache.set(fn, _frame);
}
}
return _frame;
}
} while (s >= 1 && c >= 0);
}
break;
}
}
}
} finally {
reentry = false;
{
ReactCurrentDispatcher.current = previousDispatcher;
reenableLogs();
}
Error.prepareStackTrace = previousPrepareStackTrace;
}
var name = fn ? fn.displayName || fn.name : "";
var syntheticFrame = name ? describeBuiltInComponentFrame(name) : "";
{
if (typeof fn === "function") {
componentFrameCache.set(fn, syntheticFrame);
}
}
return syntheticFrame;
}
function describeFunctionComponentFrame(fn, source, ownerFn) {
{
return describeNativeComponentFrame(fn, false);
}
}
function shouldConstruct(Component) {
var prototype = Component.prototype;
return !!(prototype && prototype.isReactComponent);
}
function describeUnknownElementTypeFrameInDEV(type, source, ownerFn) {
if (type == null) {
return "";
}
if (typeof type === "function") {
{
return describeNativeComponentFrame(type, shouldConstruct(type));
}
}
if (typeof type === "string") {
return describeBuiltInComponentFrame(type);
}
switch (type) {
case REACT_SUSPENSE_TYPE:
return describeBuiltInComponentFrame("Suspense");
case REACT_SUSPENSE_LIST_TYPE:
return describeBuiltInComponentFrame("SuspenseList");
}
if (typeof type === "object") {
switch (type.$$typeof) {
case REACT_FORWARD_REF_TYPE:
return describeFunctionComponentFrame(type.render);
case REACT_MEMO_TYPE:
return describeUnknownElementTypeFrameInDEV(type.type, source, ownerFn);
case REACT_LAZY_TYPE: {
var lazyComponent = type;
var payload = lazyComponent._payload;
var init = lazyComponent._init;
try {
return describeUnknownElementTypeFrameInDEV(init(payload), source, ownerFn);
} catch (x) {
}
}
}
}
return "";
}
var hasOwnProperty = Object.prototype.hasOwnProperty;
var loggedTypeFailures = {};
var ReactDebugCurrentFrame = ReactSharedInternals.ReactDebugCurrentFrame;
function setCurrentlyValidatingElement(element) {
{
if (element) {
var owner = element._owner;
var stack = describeUnknownElementTypeFrameInDEV(element.type, element._source, owner ? owner.type : null);
ReactDebugCurrentFrame.setExtraStackFrame(stack);
} else {
ReactDebugCurrentFrame.setExtraStackFrame(null);
}
}
}
function checkPropTypes(typeSpecs, values, location, componentName, element) {
{
var has = Function.call.bind(hasOwnProperty);
for (var typeSpecName in typeSpecs) {
if (has(typeSpecs, typeSpecName)) {
var error$1 = void 0;
try {
if (typeof typeSpecs[typeSpecName] !== "function") {
var err = Error((componentName || "React class") + ": " + location + " type `" + typeSpecName + "` is invalid; it must be a function, usually from the `prop-types` package, but received `" + typeof typeSpecs[typeSpecName] + "`.This often happens because of typos such as `PropTypes.function` instead of `PropTypes.func`.");
err.name = "Invariant Violation";
throw err;
}
error$1 = typeSpecs[typeSpecName](values, typeSpecName, componentName, location, null, "SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED");
} catch (ex) {
error$1 = ex;
}
if (error$1 && !(error$1 instanceof Error)) {
setCurrentlyValidatingElement(element);
error("%s: type specification of %s `%s` is invalid; the type checker function must return `null` or an `Error` but returned a %s. You may have forgotten to pass an argument to the type checker creator (arrayOf, instanceOf, objectOf, oneOf, oneOfType, and shape all require an argument).", componentName || "React class", location, typeSpecName, typeof error$1);
setCurrentlyValidatingElement(null);
}
if (error$1 instanceof Error && !(error$1.message in loggedTypeFailures)) {
loggedTypeFailures[error$1.message] = true;
setCurrentlyValidatingElement(element);
error("Failed %s type: %s", location, error$1.message);
setCurrentlyValidatingElement(null);
}
}
}
}
}
var isArrayImpl = Array.isArray;
function isArray(a) {
return isArrayImpl(a);
}
function typeName(value) {
{
var hasToStringTag = typeof Symbol === "function" && Symbol.toStringTag;
var type = hasToStringTag && value[Symbol.toStringTag] || value.constructor.name || "Object";
return type;
}
}
function willCoercionThrow(value) {
{
try {
testStringCoercion(value);
return false;
} catch (e) {
return true;
}
}
}
function testStringCoercion(value) {
return "" + value;
}
function checkKeyStringCoercion(value) {
{
if (willCoercionThrow(value)) {
error("The provided key is an unsupported type %s. This value must be coerced to a string before before using it here.", typeName(value));
return testStringCoercion(value);
}
}
}
var ReactCurrentOwner = ReactSharedInternals.ReactCurrentOwner;
var RESERVED_PROPS = {
key: true,
ref: true,
__self: true,
__source: true
};
var specialPropKeyWarningShown;
var specialPropRefWarningShown;
var didWarnAboutStringRefs;
{
didWarnAboutStringRefs = {};
}
function hasValidRef(config) {
{
if (hasOwnProperty.call(config, "ref")) {
var getter = Object.getOwnPropertyDescriptor(config, "ref").get;
if (getter && getter.isReactWarning) {
return false;
}
}
}
return config.ref !== void 0;
}
function hasValidKey(config) {
{
if (hasOwnProperty.call(config, "key")) {
var getter = Object.getOwnPropertyDescriptor(config, "key").get;
if (getter && getter.isReactWarning) {
return false;
}
}
}
return config.key !== void 0;
}
function warnIfStringRefCannotBeAutoConverted(config, self) {
{
if (typeof config.ref === "string" && ReactCurrentOwner.current && self && ReactCurrentOwner.current.stateNode !== self) {
var componentName = getComponentNameFromType(ReactCurrentOwner.current.type);
if (!didWarnAboutStringRefs[componentName]) {
error('Component "%s" contains the string ref "%s". Support for string refs will be removed in a future major release. This case cannot be automatically converted to an arrow function. We ask you to manually fix this case by using useRef() or createRef() instead. Learn more about using refs safely here: https://reactjs.org/link/strict-mode-string-ref', getComponentNameFromType(ReactCurrentOwner.current.type), config.ref);
didWarnAboutStringRefs[componentName] = true;
}
}
}
}
function defineKeyPropWarningGetter(props, displayName) {
{
var warnAboutAccessingKey = function() {
if (!specialPropKeyWarningShown) {
specialPropKeyWarningShown = true;
error("%s: `key` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://reactjs.org/link/special-props)", displayName);
}
};
warnAboutAccessingKey.isReactWarning = true;
Object.defineProperty(props, "key", {
get: warnAboutAccessingKey,
configurable: true
});
}
}
function defineRefPropWarningGetter(props, displayName) {
{
var warnAboutAccessingRef = function() {
if (!specialPropRefWarningShown) {
specialPropRefWarningShown = true;
error("%s: `ref` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://reactjs.org/link/special-props)", displayName);
}
};
warnAboutAccessingRef.isReactWarning = true;
Object.defineProperty(props, "ref", {
get: warnAboutAccessingRef,
configurable: true
});
}
}
var ReactElement = function(type, key, ref, self, source, owner, props) {
var element = {
// This tag allows us to uniquely identify this as a React Element
$$typeof: REACT_ELEMENT_TYPE,
// Built-in properties that belong on the element
type,
key,
ref,
props,
// Record the component responsible for creating this element.
_owner: owner
};
{
element._store = {};
Object.defineProperty(element._store, "validated", {
configurable: false,
enumerable: false,
writable: true,
value: false
});
Object.defineProperty(element, "_self", {
configurable: false,
enumerable: false,
writable: false,
value: self
});
Object.defineProperty(element, "_source", {
configurable: false,
enumerable: false,
writable: false,
value: source
});
if (Object.freeze) {
Object.freeze(element.props);
Object.freeze(element);
}
}
return element;
};
function jsxDEV(type, config, maybeKey, source, self) {
{
var propName;
var props = {};
var key = null;
var ref = null;
if (maybeKey !== void 0) {
{
checkKeyStringCoercion(maybeKey);
}
key = "" + maybeKey;
}
if (hasValidKey(config)) {
{
checkKeyStringCoercion(config.key);
}
key = "" + config.key;
}
if (hasValidRef(config)) {
ref = config.ref;
warnIfStringRefCannotBeAutoConverted(config, self);
}
for (propName in config) {
if (hasOwnProperty.call(config, propName) && !RESERVED_PROPS.hasOwnProperty(propName)) {
props[propName] = config[propName];
}
}
if (type && type.defaultProps) {
var defaultProps = type.defaultProps;
for (propName in defaultProps) {
if (props[propName] === void 0) {
props[propName] = defaultProps[propName];
}
}
}
if (key || ref) {
var displayName = typeof type === "function" ? type.displayName || type.name || "Unknown" : type;
if (key) {
defineKeyPropWarningGetter(props, displayName);
}
if (ref) {
defineRefPropWarningGetter(props, displayName);
}
}
return ReactElement(type, key, ref, self, source, ReactCurrentOwner.current, props);
}
}
var ReactCurrentOwner$1 = ReactSharedInternals.ReactCurrentOwner;
var ReactDebugCurrentFrame$1 = ReactSharedInternals.ReactDebugCurrentFrame;
function setCurrentlyValidatingElement$1(element) {
{
if (element) {
var owner = element._owner;
var stack = describeUnknownElementTypeFrameInDEV(element.type, element._source, owner ? owner.type : null);
ReactDebugCurrentFrame$1.setExtraStackFrame(stack);
} else {
ReactDebugCurrentFrame$1.setExtraStackFrame(null);
}
}
}
var propTypesMisspellWarningShown;
{
propTypesMisspellWarningShown = false;
}
function isValidElement(object) {
{
return typeof object === "object" && object !== null && object.$$typeof === REACT_ELEMENT_TYPE;
}
}
function getDeclarationErrorAddendum() {
{
if (ReactCurrentOwner$1.current) {
var name = getComponentNameFromType(ReactCurrentOwner$1.current.type);
if (name) {
return "\n\nCheck the render method of `" + name + "`.";
}
}
return "";
}
}
function getSourceInfoErrorAddendum(source) {
{
if (source !== void 0) {
var fileName = source.fileName.replace(/^.*[\\\/]/, "");
var lineNumber = source.lineNumber;
return "\n\nCheck your code at " + fileName + ":" + lineNumber + ".";
}
return "";
}
}
var ownerHasKeyUseWarning = {};
function getCurrentComponentErrorInfo(parentType) {
{
var info = getDeclarationErrorAddendum();
if (!info) {
var parentName = typeof parentType === "string" ? parentType : parentType.displayName || parentType.name;
if (parentName) {
info = "\n\nCheck the top-level render call using <" + parentName + ">.";
}
}
return info;
}
}
function validateExplicitKey(element, parentType) {
{
if (!element._store || element._store.validated || element.key != null) {
return;
}
element._store.validated = true;
var currentComponentErrorInfo = getCurrentComponentErrorInfo(parentType);
if (ownerHasKeyUseWarning[currentComponentErrorInfo]) {
return;
}
ownerHasKeyUseWarning[currentComponentErrorInfo] = true;
var childOwner = "";
if (element && element._owner && element._owner !== ReactCurrentOwner$1.current) {
childOwner = " It was passed a child from " + getComponentNameFromType(element._owner.type) + ".";
}
setCurrentlyValidatingElement$1(element);
error('Each child in a list should have a unique "key" prop.%s%s See https://reactjs.org/link/warning-keys for more information.', currentComponentErrorInfo, childOwner);
setCurrentlyValidatingElement$1(null);
}
}
function validateChildKeys(node, parentType) {
{
if (typeof node !== "object") {
return;
}
if (isArray(node)) {
for (var i = 0; i < node.length; i++) {
var child = node[i];
if (isValidElement(child)) {
validateExplicitKey(child, parentType);
}
}
} else if (isValidElement(node)) {
if (node._store) {
node._store.validated = true;
}
} else if (node) {
var iteratorFn = getIteratorFn(node);
if (typeof iteratorFn === "function") {
if (iteratorFn !== node.entries) {
var iterator = iteratorFn.call(node);
var step;
while (!(step = iterator.next()).done) {
if (isValidElement(step.value)) {
validateExplicitKey(step.value, parentType);
}
}
}
}
}
}
}
function validatePropTypes(element) {
{
var type = element.type;
if (type === null || type === void 0 || typeof type === "string") {
return;
}
var propTypes;
if (typeof type === "function") {
propTypes = type.propTypes;
} else if (typeof type === "object" && (type.$$typeof === REACT_FORWARD_REF_TYPE || // Note: Memo only checks outer props here.
// Inner props are checked in the reconciler.
type.$$typeof === REACT_MEMO_TYPE)) {
propTypes = type.propTypes;
} else {
return;
}
if (propTypes) {
var name = getComponentNameFromType(type);
checkPropTypes(propTypes, element.props, "prop", name, element);
} else if (type.PropTypes !== void 0 && !propTypesMisspellWarningShown) {
propTypesMisspellWarningShown = true;
var _name = getComponentNameFromType(type);
error("Component %s declared `PropTypes` instead of `propTypes`. Did you misspell the property assignment?", _name || "Unknown");
}
if (typeof type.getDefaultProps === "function" && !type.getDefaultProps.isReactClassApproved) {
error("getDefaultProps is only used on classic React.createClass definitions. Use a static property named `defaultProps` instead.");
}
}
}
function validateFragmentProps(fragment) {
{
var keys = Object.keys(fragment.props);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (key !== "children" && key !== "key") {
setCurrentlyValidatingElement$1(fragment);
error("Invalid prop `%s` supplied to `React.Fragment`. React.Fragment can only have `key` and `children` props.", key);
setCurrentlyValidatingElement$1(null);
break;
}
}
if (fragment.ref !== null) {
setCurrentlyValidatingElement$1(fragment);
error("Invalid attribute `ref` supplied to `React.Fragment`.");
setCurrentlyValidatingElement$1(null);
}
}
}
var didWarnAboutKeySpread = {};
function jsxWithValidation(type, props, key, isStaticChildren, source, self) {
{
var validType = isValidElementType(type);
if (!validType) {
var info = "";
if (type === void 0 || typeof type === "object" && type !== null && Object.keys(type).length === 0) {
info += " You likely forgot to export your component from the file it's defined in, or you might have mixed up default and named imports.";
}
var sourceInfo = getSourceInfoErrorAddendum(source);
if (sourceInfo) {
info += sourceInfo;
} else {
info += getDeclarationErrorAddendum();
}
var typeString;
if (type === null) {
typeString = "null";
} else if (isArray(type)) {
typeString = "array";
} else if (type !== void 0 && type.$$typeof === REACT_ELEMENT_TYPE) {
typeString = "<" + (getComponentNameFromType(type.type) || "Unknown") + " />";
info = " Did you accidentally export a JSX literal instead of a component?";
} else {
typeString = typeof type;
}
error("React.jsx: type is invalid -- expected a string (for built-in components) or a class/function (for composite components) but got: %s.%s", typeString, info);
}
var element = jsxDEV(type, props, key, source, self);
if (element == null) {
return element;
}
if (validType) {
var children = props.children;
if (children !== void 0) {
if (isStaticChildren) {
if (isArray(children)) {
for (var i = 0; i < children.length; i++) {
validateChildKeys(children[i], type);
}
if (Object.freeze) {
Object.freeze(children);
}
} else {
error("React.jsx: Static children should always be an array. You are likely explicitly calling React.jsxs or React.jsxDEV. Use the Babel transform instead.");
}
} else {
validateChildKeys(children, type);
}
}
}
{
if (hasOwnProperty.call(props, "key")) {
var componentName = getComponentNameFromType(type);
var keys = Object.keys(props).filter(function(k) {
return k !== "key";
});
var beforeExample = keys.length > 0 ? "{key: someKey, " + keys.join(": ..., ") + ": ...}" : "{key: someKey}";
if (!didWarnAboutKeySpread[componentName + beforeExample]) {
var afterExample = keys.length > 0 ? "{" + keys.join(": ..., ") + ": ...}" : "{}";
error('A props object containing a "key" prop is being spread into JSX:\n let props = %s;\n <%s {...props} />\nReact keys must be passed directly to JSX without using spread:\n let props = %s;\n <%s key={someKey} {...props} />', beforeExample, componentName, afterExample, componentName);
didWarnAboutKeySpread[componentName + beforeExample] = true;
}
}
}
if (type === REACT_FRAGMENT_TYPE) {
validateFragmentProps(element);
} else {
validatePropTypes(element);
}
return element;
}
}
function jsxWithValidationStatic(type, props, key) {
{
return jsxWithValidation(type, props, key, true);
}
}
function jsxWithValidationDynamic(type, props, key) {
{
return jsxWithValidation(type, props, key, false);
}
}
var jsx = jsxWithValidationDynamic;
var jsxs = jsxWithValidationStatic;
exports.Fragment = REACT_FRAGMENT_TYPE;
exports.jsx = jsx;
exports.jsxs = jsxs;
})();
var f = require_react();
var k = Symbol.for("react.element");
var l = Symbol.for("react.fragment");
var m = Object.prototype.hasOwnProperty;
var n = f.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.ReactCurrentOwner;
var p = { key: true, ref: true, __self: true, __source: true };
function q(c, a, g) {
var b, d = {}, e = null, h = null;
void 0 !== g && (e = "" + g);
void 0 !== a.key && (e = "" + a.key);
void 0 !== a.ref && (h = a.ref);
for (b in a) m.call(a, b) && !p.hasOwnProperty(b) && (d[b] = a[b]);
if (c && c.defaultProps) for (b in a = c.defaultProps, a) void 0 === d[b] && (d[b] = a[b]);
return { $$typeof: k, type: c, key: e, ref: h, props: d, _owner: n.current };
}
exports.Fragment = l;
exports.jsx = q;
exports.jsxs = q;
}
});
// node_modules/react/jsx-runtime.js
var require_jsx_runtime = __commonJS({
"node_modules/react/jsx-runtime.js"(exports, module) {
if (false) {
module.exports = null;
if (true) {
module.exports = require_react_jsx_runtime_production_min();
} else {
module.exports = require_react_jsx_runtime_development();
module.exports = null;
}
}
});
export default require_jsx_runtime();
/*! Bundled license information:
react/cjs/react-jsx-runtime.development.js:
react/cjs/react-jsx-runtime.production.min.js:
(**
* @license React
* react-jsx-runtime.development.js
* react-jsx-runtime.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,34 @@
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
export {
__commonJS,
__toESM,
__publicField
};
//# sourceMappingURL=chunk-EQCVQC35.js.map

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"sources": [],
"sourcesContent": [],
"mappings": "",
"names": []
}

View File

@@ -0,0 +1,305 @@
import {
__commonJS
} from "./chunk-EQCVQC35.js";
// node_modules/react/cjs/react.production.min.js
var require_react_production_min = __commonJS({
"node_modules/react/cjs/react.production.min.js"(exports) {
"use strict";
var l = Symbol.for("react.element");
var n = Symbol.for("react.portal");
var p = Symbol.for("react.fragment");
var q = Symbol.for("react.strict_mode");
var r = Symbol.for("react.profiler");
var t = Symbol.for("react.provider");
var u = Symbol.for("react.context");
var v = Symbol.for("react.forward_ref");
var w = Symbol.for("react.suspense");
var x = Symbol.for("react.memo");
var y = Symbol.for("react.lazy");
var z = Symbol.iterator;
function A(a) {
if (null === a || "object" !== typeof a) return null;
a = z && a[z] || a["@@iterator"];
return "function" === typeof a ? a : null;
}
var B = { isMounted: function() {
return false;
}, enqueueForceUpdate: function() {
}, enqueueReplaceState: function() {
}, enqueueSetState: function() {
} };
var C = Object.assign;
var D = {};
function E(a, b, e) {
this.props = a;
this.context = b;
this.refs = D;
this.updater = e || B;
}
E.prototype.isReactComponent = {};
E.prototype.setState = function(a, b) {
if ("object" !== typeof a && "function" !== typeof a && null != a) throw Error("setState(...): takes an object of state variables to update or a function which returns an object of state variables.");
this.updater.enqueueSetState(this, a, b, "setState");
};
E.prototype.forceUpdate = function(a) {
this.updater.enqueueForceUpdate(this, a, "forceUpdate");
};
function F() {
}
F.prototype = E.prototype;
function G(a, b, e) {
this.props = a;
this.context = b;
this.refs = D;
this.updater = e || B;
}
var H = G.prototype = new F();
H.constructor = G;
C(H, E.prototype);
H.isPureReactComponent = true;
var I = Array.isArray;
var J = Object.prototype.hasOwnProperty;
var K = { current: null };
var L = { key: true, ref: true, __self: true, __source: true };
function M(a, b, e) {
var d, c = {}, k = null, h = null;
if (null != b) for (d in void 0 !== b.ref && (h = b.ref), void 0 !== b.key && (k = "" + b.key), b) J.call(b, d) && !L.hasOwnProperty(d) && (c[d] = b[d]);
var g = arguments.length - 2;
if (1 === g) c.children = e;
else if (1 < g) {
for (var f = Array(g), m = 0; m < g; m++) f[m] = arguments[m + 2];
c.children = f;
}
if (a && a.defaultProps) for (d in g = a.defaultProps, g) void 0 === c[d] && (c[d] = g[d]);
return { $$typeof: l, type: a, key: k, ref: h, props: c, _owner: K.current };
}
function N(a, b) {
return { $$typeof: l, type: a.type, key: b, ref: a.ref, props: a.props, _owner: a._owner };
}
function O(a) {
return "object" === typeof a && null !== a && a.$$typeof === l;
}
function escape(a) {
var b = { "=": "=0", ":": "=2" };
return "$" + a.replace(/[=:]/g, function(a2) {
return b[a2];
});
}
var P = /\/+/g;
function Q(a, b) {
return "object" === typeof a && null !== a && null != a.key ? escape("" + a.key) : b.toString(36);
}
function R(a, b, e, d, c) {
var k = typeof a;
if ("undefined" === k || "boolean" === k) a = null;
var h = false;
if (null === a) h = true;
else switch (k) {
case "string":
case "number":
h = true;
break;
case "object":
switch (a.$$typeof) {
case l:
case n:
h = true;
}
}
if (h) return h = a, c = c(h), a = "" === d ? "." + Q(h, 0) : d, I(c) ? (e = "", null != a && (e = a.replace(P, "$&/") + "/"), R(c, b, e, "", function(a2) {
return a2;
})) : null != c && (O(c) && (c = N(c, e + (!c.key || h && h.key === c.key ? "" : ("" + c.key).replace(P, "$&/") + "/") + a)), b.push(c)), 1;
h = 0;
d = "" === d ? "." : d + ":";
if (I(a)) for (var g = 0; g < a.length; g++) {
k = a[g];
var f = d + Q(k, g);
h += R(k, b, e, f, c);
}
else if (f = A(a), "function" === typeof f) for (a = f.call(a), g = 0; !(k = a.next()).done; ) k = k.value, f = d + Q(k, g++), h += R(k, b, e, f, c);
else if ("object" === k) throw b = String(a), Error("Objects are not valid as a React child (found: " + ("[object Object]" === b ? "object with keys {" + Object.keys(a).join(", ") + "}" : b) + "). If you meant to render a collection of children, use an array instead.");
return h;
}
function S(a, b, e) {
if (null == a) return a;
var d = [], c = 0;
R(a, d, "", "", function(a2) {
return b.call(e, a2, c++);
});
return d;
}
function T(a) {
if (-1 === a._status) {
var b = a._result;
b = b();
b.then(function(b2) {
if (0 === a._status || -1 === a._status) a._status = 1, a._result = b2;
}, function(b2) {
if (0 === a._status || -1 === a._status) a._status = 2, a._result = b2;
});
-1 === a._status && (a._status = 0, a._result = b);
}
if (1 === a._status) return a._result.default;
throw a._result;
}
var U = { current: null };
var V = { transition: null };
var W = { ReactCurrentDispatcher: U, ReactCurrentBatchConfig: V, ReactCurrentOwner: K };
function X() {
throw Error("act(...) is not supported in production builds of React.");
}
exports.Children = { map: S, forEach: function(a, b, e) {
S(a, function() {
b.apply(this, arguments);
}, e);
}, count: function(a) {
var b = 0;
S(a, function() {
b++;
});
return b;
}, toArray: function(a) {
return S(a, function(a2) {
return a2;
}) || [];
}, only: function(a) {
if (!O(a)) throw Error("React.Children.only expected to receive a single React element child.");
return a;
} };
exports.Component = E;
exports.Fragment = p;
exports.Profiler = r;
exports.PureComponent = G;
exports.StrictMode = q;
exports.Suspense = w;
exports.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED = W;
exports.act = X;
exports.cloneElement = function(a, b, e) {
if (null === a || void 0 === a) throw Error("React.cloneElement(...): The argument must be a React element, but you passed " + a + ".");
var d = C({}, a.props), c = a.key, k = a.ref, h = a._owner;
if (null != b) {
void 0 !== b.ref && (k = b.ref, h = K.current);
void 0 !== b.key && (c = "" + b.key);
if (a.type && a.type.defaultProps) var g = a.type.defaultProps;
for (f in b) J.call(b, f) && !L.hasOwnProperty(f) && (d[f] = void 0 === b[f] && void 0 !== g ? g[f] : b[f]);
}
var f = arguments.length - 2;
if (1 === f) d.children = e;
else if (1 < f) {
g = Array(f);
for (var m = 0; m < f; m++) g[m] = arguments[m + 2];
d.children = g;
}
return { $$typeof: l, type: a.type, key: c, ref: k, props: d, _owner: h };
};
exports.createContext = function(a) {
a = { $$typeof: u, _currentValue: a, _currentValue2: a, _threadCount: 0, Provider: null, Consumer: null, _defaultValue: null, _globalName: null };
a.Provider = { $$typeof: t, _context: a };
return a.Consumer = a;
};
exports.createElement = M;
exports.createFactory = function(a) {
var b = M.bind(null, a);
b.type = a;
return b;
};
exports.createRef = function() {
return { current: null };
};
exports.forwardRef = function(a) {
return { $$typeof: v, render: a };
};
exports.isValidElement = O;
exports.lazy = function(a) {
return { $$typeof: y, _payload: { _status: -1, _result: a }, _init: T };
};
exports.memo = function(a, b) {
return { $$typeof: x, type: a, compare: void 0 === b ? null : b };
};
exports.startTransition = function(a) {
var b = V.transition;
V.transition = {};
try {
a();
} finally {
V.transition = b;
}
};
exports.unstable_act = X;
exports.useCallback = function(a, b) {
return U.current.useCallback(a, b);
};
exports.useContext = function(a) {
return U.current.useContext(a);
};
exports.useDebugValue = function() {
};
exports.useDeferredValue = function(a) {
return U.current.useDeferredValue(a);
};
exports.useEffect = function(a, b) {
return U.current.useEffect(a, b);
};
exports.useId = function() {
return U.current.useId();
};
exports.useImperativeHandle = function(a, b, e) {
return U.current.useImperativeHandle(a, b, e);
};
exports.useInsertionEffect = function(a, b) {
return U.current.useInsertionEffect(a, b);
};
exports.useLayoutEffect = function(a, b) {
return U.current.useLayoutEffect(a, b);
};
exports.useMemo = function(a, b) {
return U.current.useMemo(a, b);
};
exports.useReducer = function(a, b, e) {
return U.current.useReducer(a, b, e);
};
exports.useRef = function(a) {
return U.current.useRef(a);
};
exports.useState = function(a) {
return U.current.useState(a);
};
exports.useSyncExternalStore = function(a, b, e) {
return U.current.useSyncExternalStore(a, b, e);
};
exports.useTransition = function() {
return U.current.useTransition();
};
exports.version = "18.3.1";
}
});
// node_modules/react/index.js
var require_react = __commonJS({
"node_modules/react/index.js"(exports, module) {
if (true) {
module.exports = require_react_production_min();
} else {
module.exports = null;
}
}
});
export {
require_react
};
/*! Bundled license information:
react/cjs/react.production.min.js:
(**
* @license React
* react.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
*/
//# sourceMappingURL=chunk-FXM4O2JN.js.map

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

2392
frontend/node_modules/.vite/deps_temp_67299285/i18next.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
{
"type": "module"
}

View File

@@ -0,0 +1,7 @@
import {
require_react_dom
} from "./chunk-WJR2STNY.js";
import "./chunk-FXM4O2JN.js";
import "./chunk-EQCVQC35.js";
export default require_react_dom();
//# sourceMappingURL=react-dom.js.map

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"sources": [],
"sourcesContent": [],
"mappings": "",
"names": []
}

View File

@@ -0,0 +1,39 @@
import {
require_react_dom
} from "./chunk-WJR2STNY.js";
import "./chunk-FXM4O2JN.js";
import {
__commonJS
} from "./chunk-EQCVQC35.js";
// node_modules/react-dom/client.js
var require_client = __commonJS({
"node_modules/react-dom/client.js"(exports) {
var m = require_react_dom();
if (true) {
exports.createRoot = m.createRoot;
exports.hydrateRoot = m.hydrateRoot;
} else {
i = m.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;
exports.createRoot = function(c, o) {
i.usingClientEntryPoint = true;
try {
return m.createRoot(c, o);
} finally {
i.usingClientEntryPoint = false;
}
};
exports.hydrateRoot = function(c, h, o) {
i.usingClientEntryPoint = true;
try {
return m.hydrateRoot(c, h, o);
} finally {
i.usingClientEntryPoint = false;
}
};
}
var i;
}
});
export default require_client();
//# sourceMappingURL=react-dom_client.js.map

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"sources": ["../../react-dom/client.js"],
"sourcesContent": ["'use strict';\n\nvar m = require('react-dom');\nif (process.env.NODE_ENV === 'production') {\n exports.createRoot = m.createRoot;\n exports.hydrateRoot = m.hydrateRoot;\n} else {\n var i = m.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;\n exports.createRoot = function(c, o) {\n i.usingClientEntryPoint = true;\n try {\n return m.createRoot(c, o);\n } finally {\n i.usingClientEntryPoint = false;\n }\n };\n exports.hydrateRoot = function(c, h, o) {\n i.usingClientEntryPoint = true;\n try {\n return m.hydrateRoot(c, h, o);\n } finally {\n i.usingClientEntryPoint = false;\n }\n };\n}\n"],
"mappings": ";;;;;;;;;AAAA;AAAA;AAEA,QAAI,IAAI;AACR,QAAI,MAAuC;AACzC,cAAQ,aAAa,EAAE;AACvB,cAAQ,cAAc,EAAE;AAAA,IAC1B,OAAO;AACD,UAAI,EAAE;AACV,cAAQ,aAAa,SAAS,GAAG,GAAG;AAClC,UAAE,wBAAwB;AAC1B,YAAI;AACF,iBAAO,EAAE,WAAW,GAAG,CAAC;AAAA,QAC1B,UAAE;AACA,YAAE,wBAAwB;AAAA,QAC5B;AAAA,MACF;AACA,cAAQ,cAAc,SAAS,GAAG,GAAG,GAAG;AACtC,UAAE,wBAAwB;AAC1B,YAAI;AACF,iBAAO,EAAE,YAAY,GAAG,GAAG,CAAC;AAAA,QAC9B,UAAE;AACA,YAAE,wBAAwB;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AAjBM;AAAA;AAAA;",
"names": []
}

View File

@@ -0,0 +1,840 @@
import {
require_react
} from "./chunk-FXM4O2JN.js";
import {
__commonJS,
__publicField,
__toESM
} from "./chunk-EQCVQC35.js";
// node_modules/void-elements/index.js
var require_void_elements = __commonJS({
"node_modules/void-elements/index.js"(exports, module) {
module.exports = {
"area": true,
"base": true,
"br": true,
"col": true,
"embed": true,
"hr": true,
"img": true,
"input": true,
"link": true,
"meta": true,
"param": true,
"source": true,
"track": true,
"wbr": true
};
}
});
// node_modules/react-i18next/dist/es/Trans.js
var import_react3 = __toESM(require_react(), 1);
// node_modules/react-i18next/dist/es/TransWithoutContext.js
var import_react = __toESM(require_react(), 1);
// node_modules/html-parse-stringify/dist/html-parse-stringify.module.js
var import_void_elements = __toESM(require_void_elements());
var t = /\s([^'"/\s><]+?)[\s/>]|([^\s=]+)=\s?(".*?"|'.*?')/g;
function n(n2) {
var r2 = { type: "tag", name: "", voidElement: false, attrs: {}, children: [] }, i2 = n2.match(/<\/?([^\s]+?)[/\s>]/);
if (i2 && (r2.name = i2[1], (import_void_elements.default[i2[1]] || "/" === n2.charAt(n2.length - 2)) && (r2.voidElement = true), r2.name.startsWith("!--"))) {
var s2 = n2.indexOf("-->");
return { type: "comment", comment: -1 !== s2 ? n2.slice(4, s2) : "" };
}
for (var a2 = new RegExp(t), c2 = null; null !== (c2 = a2.exec(n2)); ) if (c2[0].trim()) if (c2[1]) {
var o = c2[1].trim(), l = [o, ""];
o.indexOf("=") > -1 && (l = o.split("=")), r2.attrs[l[0]] = l[1], a2.lastIndex--;
} else c2[2] && (r2.attrs[c2[2]] = c2[3].trim().substring(1, c2[3].length - 1));
return r2;
}
var r = /<[a-zA-Z0-9\-\!\/](?:"[^"]*"|'[^']*'|[^'">])*>/g;
var i = /^\s*$/;
var s = /* @__PURE__ */ Object.create(null);
function a(e2, t2) {
switch (t2.type) {
case "text":
return e2 + t2.content;
case "tag":
return e2 += "<" + t2.name + (t2.attrs ? function(e3) {
var t3 = [];
for (var n2 in e3) t3.push(n2 + '="' + e3[n2] + '"');
return t3.length ? " " + t3.join(" ") : "";
}(t2.attrs) : "") + (t2.voidElement ? "/>" : ">"), t2.voidElement ? e2 : e2 + t2.children.reduce(a, "") + "</" + t2.name + ">";
case "comment":
return e2 + "<!--" + t2.comment + "-->";
}
}
var c = { parse: function(e2, t2) {
t2 || (t2 = {}), t2.components || (t2.components = s);
var a2, c2 = [], o = [], l = -1, m = false;
if (0 !== e2.indexOf("<")) {
var u = e2.indexOf("<");
c2.push({ type: "text", content: -1 === u ? e2 : e2.substring(0, u) });
}
return e2.replace(r, function(r2, s2) {
if (m) {
if (r2 !== "</" + a2.name + ">") return;
m = false;
}
var u2, f = "/" !== r2.charAt(1), h = r2.startsWith("<!--"), p = s2 + r2.length, d = e2.charAt(p);
if (h) {
var v = n(r2);
return l < 0 ? (c2.push(v), c2) : ((u2 = o[l]).children.push(v), c2);
}
if (f && (l++, "tag" === (a2 = n(r2)).type && t2.components[a2.name] && (a2.type = "component", m = true), a2.voidElement || m || !d || "<" === d || a2.children.push({ type: "text", content: e2.slice(p, e2.indexOf("<", p)) }), 0 === l && c2.push(a2), (u2 = o[l - 1]) && u2.children.push(a2), o[l] = a2), (!f || a2.voidElement) && (l > -1 && (a2.voidElement || a2.name === r2.slice(2, -1)) && (l--, a2 = -1 === l ? c2 : o[l]), !m && "<" !== d && d)) {
u2 = -1 === l ? c2 : o[l].children;
var x = e2.indexOf("<", p), g = e2.slice(p, -1 === x ? void 0 : x);
i.test(g) && (g = " "), (x > -1 && l + u2.length >= 0 || " " !== g) && u2.push({ type: "text", content: g });
}
}), c2;
}, stringify: function(e2) {
return e2.reduce(function(e3, t2) {
return e3 + a("", t2);
}, "");
} };
var html_parse_stringify_module_default = c;
// node_modules/react-i18next/dist/es/utils.js
function warn() {
if (console && console.warn) {
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
if (isString(args[0])) args[0] = `react-i18next:: ${args[0]}`;
console.warn(...args);
}
}
var alreadyWarned = {};
function warnOnce() {
for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
args[_key2] = arguments[_key2];
}
if (isString(args[0]) && alreadyWarned[args[0]]) return;
if (isString(args[0])) alreadyWarned[args[0]] = /* @__PURE__ */ new Date();
warn(...args);
}
var loadedClb = (i18n, cb) => () => {
if (i18n.isInitialized) {
cb();
} else {
const initialized = () => {
setTimeout(() => {
i18n.off("initialized", initialized);
}, 0);
cb();
};
i18n.on("initialized", initialized);
}
};
var loadNamespaces = (i18n, ns, cb) => {
i18n.loadNamespaces(ns, loadedClb(i18n, cb));
};
var loadLanguages = (i18n, lng, ns, cb) => {
if (isString(ns)) ns = [ns];
ns.forEach((n2) => {
if (i18n.options.ns.indexOf(n2) < 0) i18n.options.ns.push(n2);
});
i18n.loadLanguages(lng, loadedClb(i18n, cb));
};
var oldI18nextHasLoadedNamespace = function(ns, i18n) {
let options = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : {};
const lng = i18n.languages[0];
const fallbackLng = i18n.options ? i18n.options.fallbackLng : false;
const lastLng = i18n.languages[i18n.languages.length - 1];
if (lng.toLowerCase() === "cimode") return true;
const loadNotPending = (l, n2) => {
const loadState = i18n.services.backendConnector.state[`${l}|${n2}`];
return loadState === -1 || loadState === 2;
};
if (options.bindI18n && options.bindI18n.indexOf("languageChanging") > -1 && i18n.services.backendConnector.backend && i18n.isLanguageChangingTo && !loadNotPending(i18n.isLanguageChangingTo, ns)) return false;
if (i18n.hasResourceBundle(lng, ns)) return true;
if (!i18n.services.backendConnector.backend || i18n.options.resources && !i18n.options.partialBundledLanguages) return true;
if (loadNotPending(lng, ns) && (!fallbackLng || loadNotPending(lastLng, ns))) return true;
return false;
};
var hasLoadedNamespace = function(ns, i18n) {
let options = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : {};
if (!i18n.languages || !i18n.languages.length) {
warnOnce("i18n.languages were undefined or empty", i18n.languages);
return true;
}
const isNewerI18next = i18n.options.ignoreJSONStructure !== void 0;
if (!isNewerI18next) {
return oldI18nextHasLoadedNamespace(ns, i18n, options);
}
return i18n.hasLoadedNamespace(ns, {
lng: options.lng,
precheck: (i18nInstance2, loadNotPending) => {
if (options.bindI18n && options.bindI18n.indexOf("languageChanging") > -1 && i18nInstance2.services.backendConnector.backend && i18nInstance2.isLanguageChangingTo && !loadNotPending(i18nInstance2.isLanguageChangingTo, ns)) return false;
}
});
};
var getDisplayName = (Component) => Component.displayName || Component.name || (isString(Component) && Component.length > 0 ? Component : "Unknown");
var isString = (obj) => typeof obj === "string";
var isObject = (obj) => typeof obj === "object" && obj !== null;
// node_modules/react-i18next/dist/es/unescape.js
var matchHtmlEntity = /&(?:amp|#38|lt|#60|gt|#62|apos|#39|quot|#34|nbsp|#160|copy|#169|reg|#174|hellip|#8230|#x2F|#47);/g;
var htmlEntities = {
"&amp;": "&",
"&#38;": "&",
"&lt;": "<",
"&#60;": "<",
"&gt;": ">",
"&#62;": ">",
"&apos;": "'",
"&#39;": "'",
"&quot;": '"',
"&#34;": '"',
"&nbsp;": " ",
"&#160;": " ",
"&copy;": "©",
"&#169;": "©",
"&reg;": "®",
"&#174;": "®",
"&hellip;": "…",
"&#8230;": "…",
"&#x2F;": "/",
"&#47;": "/"
};
var unescapeHtmlEntity = (m) => htmlEntities[m];
var unescape = (text) => text.replace(matchHtmlEntity, unescapeHtmlEntity);
// node_modules/react-i18next/dist/es/defaults.js
var defaultOptions = {
bindI18n: "languageChanged",
bindI18nStore: "",
transEmptyNodeValue: "",
transSupportBasicHtmlNodes: true,
transWrapTextNodes: "",
transKeepBasicHtmlNodesFor: ["br", "strong", "i", "p"],
useSuspense: true,
unescape
};
var setDefaults = function() {
let options = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
defaultOptions = {
...defaultOptions,
...options
};
};
var getDefaults = () => defaultOptions;
// node_modules/react-i18next/dist/es/i18nInstance.js
var i18nInstance;
var setI18n = (instance) => {
i18nInstance = instance;
};
var getI18n = () => i18nInstance;
// node_modules/react-i18next/dist/es/TransWithoutContext.js
var hasChildren = (node, checkLength) => {
if (!node) return false;
const base = node.props ? node.props.children : node.children;
if (checkLength) return base.length > 0;
return !!base;
};
var getChildren = (node) => {
if (!node) return [];
const children = node.props ? node.props.children : node.children;
return node.props && node.props.i18nIsDynamicList ? getAsArray(children) : children;
};
var hasValidReactChildren = (children) => Array.isArray(children) && children.every(import_react.isValidElement);
var getAsArray = (data) => Array.isArray(data) ? data : [data];
var mergeProps = (source, target) => {
const newTarget = {
...target
};
newTarget.props = Object.assign(source.props, target.props);
return newTarget;
};
var nodesToString = (children, i18nOptions) => {
if (!children) return "";
let stringNode = "";
const childrenArray = getAsArray(children);
const keepArray = i18nOptions.transSupportBasicHtmlNodes && i18nOptions.transKeepBasicHtmlNodesFor ? i18nOptions.transKeepBasicHtmlNodesFor : [];
childrenArray.forEach((child, childIndex) => {
if (isString(child)) {
stringNode += `${child}`;
} else if ((0, import_react.isValidElement)(child)) {
const {
props,
type
} = child;
const childPropsCount = Object.keys(props).length;
const shouldKeepChild = keepArray.indexOf(type) > -1;
const childChildren = props.children;
if (!childChildren && shouldKeepChild && !childPropsCount) {
stringNode += `<${type}/>`;
} else if (!childChildren && (!shouldKeepChild || childPropsCount) || props.i18nIsDynamicList) {
stringNode += `<${childIndex}></${childIndex}>`;
} else if (shouldKeepChild && childPropsCount === 1 && isString(childChildren)) {
stringNode += `<${type}>${childChildren}</${type}>`;
} else {
const content = nodesToString(childChildren, i18nOptions);
stringNode += `<${childIndex}>${content}</${childIndex}>`;
}
} else if (child === null) {
warn(`Trans: the passed in value is invalid - seems you passed in a null child.`);
} else if (isObject(child)) {
const {
format,
...clone
} = child;
const keys = Object.keys(clone);
if (keys.length === 1) {
const value = format ? `${keys[0]}, ${format}` : keys[0];
stringNode += `{{${value}}}`;
} else {
warn(`react-i18next: the passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.`, child);
}
} else {
warn(`Trans: the passed in value is invalid - seems you passed in a variable like {number} - please pass in variables for interpolation as full objects like {{number}}.`, child);
}
});
return stringNode;
};
var renderNodes = (children, targetString, i18n, i18nOptions, combinedTOpts, shouldUnescape) => {
if (targetString === "") return [];
const keepArray = i18nOptions.transKeepBasicHtmlNodesFor || [];
const emptyChildrenButNeedsHandling = targetString && new RegExp(keepArray.map((keep) => `<${keep}`).join("|")).test(targetString);
if (!children && !emptyChildrenButNeedsHandling && !shouldUnescape) return [targetString];
const data = {};
const getData = (childs) => {
const childrenArray = getAsArray(childs);
childrenArray.forEach((child) => {
if (isString(child)) return;
if (hasChildren(child)) getData(getChildren(child));
else if (isObject(child) && !(0, import_react.isValidElement)(child)) Object.assign(data, child);
});
};
getData(children);
const ast = html_parse_stringify_module_default.parse(`<0>${targetString}</0>`);
const opts = {
...data,
...combinedTOpts
};
const renderInner = (child, node, rootReactNode) => {
const childs = getChildren(child);
const mappedChildren = mapAST(childs, node.children, rootReactNode);
return hasValidReactChildren(childs) && mappedChildren.length === 0 || child.props && child.props.i18nIsDynamicList ? childs : mappedChildren;
};
const pushTranslatedJSX = (child, inner, mem, i2, isVoid) => {
if (child.dummy) {
child.children = inner;
mem.push((0, import_react.cloneElement)(child, {
key: i2
}, isVoid ? void 0 : inner));
} else {
mem.push(...import_react.Children.map([child], (c2) => {
const props = {
...c2.props
};
delete props.i18nIsDynamicList;
return (0, import_react.createElement)(c2.type, {
...props,
key: i2,
ref: c2.ref
}, isVoid ? null : inner);
}));
}
};
const mapAST = (reactNode, astNode, rootReactNode) => {
const reactNodes = getAsArray(reactNode);
const astNodes = getAsArray(astNode);
return astNodes.reduce((mem, node, i2) => {
const translationContent = node.children && node.children[0] && node.children[0].content && i18n.services.interpolator.interpolate(node.children[0].content, opts, i18n.language);
if (node.type === "tag") {
let tmp = reactNodes[parseInt(node.name, 10)];
if (rootReactNode.length === 1 && !tmp) tmp = rootReactNode[0][node.name];
if (!tmp) tmp = {};
const child = Object.keys(node.attrs).length !== 0 ? mergeProps({
props: node.attrs
}, tmp) : tmp;
const isElement = (0, import_react.isValidElement)(child);
const isValidTranslationWithChildren = isElement && hasChildren(node, true) && !node.voidElement;
const isEmptyTransWithHTML = emptyChildrenButNeedsHandling && isObject(child) && child.dummy && !isElement;
const isKnownComponent = isObject(children) && Object.hasOwnProperty.call(children, node.name);
if (isString(child)) {
const value = i18n.services.interpolator.interpolate(child, opts, i18n.language);
mem.push(value);
} else if (hasChildren(child) || isValidTranslationWithChildren) {
const inner = renderInner(child, node, rootReactNode);
pushTranslatedJSX(child, inner, mem, i2);
} else if (isEmptyTransWithHTML) {
const inner = mapAST(reactNodes, node.children, rootReactNode);
pushTranslatedJSX(child, inner, mem, i2);
} else if (Number.isNaN(parseFloat(node.name))) {
if (isKnownComponent) {
const inner = renderInner(child, node, rootReactNode);
pushTranslatedJSX(child, inner, mem, i2, node.voidElement);
} else if (i18nOptions.transSupportBasicHtmlNodes && keepArray.indexOf(node.name) > -1) {
if (node.voidElement) {
mem.push((0, import_react.createElement)(node.name, {
key: `${node.name}-${i2}`
}));
} else {
const inner = mapAST(reactNodes, node.children, rootReactNode);
mem.push((0, import_react.createElement)(node.name, {
key: `${node.name}-${i2}`
}, inner));
}
} else if (node.voidElement) {
mem.push(`<${node.name} />`);
} else {
const inner = mapAST(reactNodes, node.children, rootReactNode);
mem.push(`<${node.name}>${inner}</${node.name}>`);
}
} else if (isObject(child) && !isElement) {
const content = node.children[0] ? translationContent : null;
if (content) mem.push(content);
} else {
pushTranslatedJSX(child, translationContent, mem, i2, node.children.length !== 1 || !translationContent);
}
} else if (node.type === "text") {
const wrapTextNodes = i18nOptions.transWrapTextNodes;
const content = shouldUnescape ? i18nOptions.unescape(i18n.services.interpolator.interpolate(node.content, opts, i18n.language)) : i18n.services.interpolator.interpolate(node.content, opts, i18n.language);
if (wrapTextNodes) {
mem.push((0, import_react.createElement)(wrapTextNodes, {
key: `${node.name}-${i2}`
}, content));
} else {
mem.push(content);
}
}
return mem;
}, []);
};
const result = mapAST([{
dummy: true,
children: children || []
}], ast, getAsArray(children || []));
return getChildren(result[0]);
};
function Trans(_ref) {
let {
children,
count,
parent,
i18nKey,
context,
tOptions = {},
values,
defaults,
components,
ns,
i18n: i18nFromProps,
t: tFromProps,
shouldUnescape,
...additionalProps
} = _ref;
const i18n = i18nFromProps || getI18n();
if (!i18n) {
warnOnce("You will need to pass in an i18next instance by using i18nextReactModule");
return children;
}
const t2 = tFromProps || i18n.t.bind(i18n) || ((k) => k);
const reactI18nextOptions = {
...getDefaults(),
...i18n.options && i18n.options.react
};
let namespaces = ns || t2.ns || i18n.options && i18n.options.defaultNS;
namespaces = isString(namespaces) ? [namespaces] : namespaces || ["translation"];
const nodeAsString = nodesToString(children, reactI18nextOptions);
const defaultValue = defaults || nodeAsString || reactI18nextOptions.transEmptyNodeValue || i18nKey;
const {
hashTransKey
} = reactI18nextOptions;
const key = i18nKey || (hashTransKey ? hashTransKey(nodeAsString || defaultValue) : nodeAsString || defaultValue);
if (i18n.options && i18n.options.interpolation && i18n.options.interpolation.defaultVariables) {
values = values && Object.keys(values).length > 0 ? {
...values,
...i18n.options.interpolation.defaultVariables
} : {
...i18n.options.interpolation.defaultVariables
};
}
const interpolationOverride = values || count !== void 0 || !children ? tOptions.interpolation : {
interpolation: {
...tOptions.interpolation,
prefix: "#$?",
suffix: "?$#"
}
};
const combinedTOpts = {
...tOptions,
context: context || tOptions.context,
count,
...values,
...interpolationOverride,
defaultValue,
ns: namespaces
};
const translation = key ? t2(key, combinedTOpts) : defaultValue;
if (components) {
Object.keys(components).forEach((c2) => {
const comp = components[c2];
if (typeof comp.type === "function" || !comp.props || !comp.props.children || translation.indexOf(`${c2}/>`) < 0 && translation.indexOf(`${c2} />`) < 0) return;
function Componentized() {
return (0, import_react.createElement)(import_react.Fragment, null, comp);
}
components[c2] = (0, import_react.createElement)(Componentized);
});
}
const content = renderNodes(components || children, translation, i18n, reactI18nextOptions, combinedTOpts, shouldUnescape);
const useAsParent = parent !== void 0 ? parent : reactI18nextOptions.defaultTransParent;
return useAsParent ? (0, import_react.createElement)(useAsParent, additionalProps, content) : content;
}
// node_modules/react-i18next/dist/es/context.js
var import_react2 = __toESM(require_react(), 1);
// node_modules/react-i18next/dist/es/initReactI18next.js
var initReactI18next = {
type: "3rdParty",
init(instance) {
setDefaults(instance.options.react);
setI18n(instance);
}
};
// node_modules/react-i18next/dist/es/context.js
var I18nContext = (0, import_react2.createContext)();
var ReportNamespaces = class {
constructor() {
__publicField(this, "getUsedNamespaces", () => Object.keys(this.usedNamespaces));
this.usedNamespaces = {};
}
addUsedNamespaces(namespaces) {
namespaces.forEach((ns) => {
if (!this.usedNamespaces[ns]) this.usedNamespaces[ns] = true;
});
}
};
var composeInitialProps = (ForComponent) => async (ctx) => {
const componentsInitialProps = ForComponent.getInitialProps ? await ForComponent.getInitialProps(ctx) : {};
const i18nInitialProps = getInitialProps();
return {
...componentsInitialProps,
...i18nInitialProps
};
};
var getInitialProps = () => {
const i18n = getI18n();
const namespaces = i18n.reportNamespaces ? i18n.reportNamespaces.getUsedNamespaces() : [];
const ret = {};
const initialI18nStore = {};
i18n.languages.forEach((l) => {
initialI18nStore[l] = {};
namespaces.forEach((ns) => {
initialI18nStore[l][ns] = i18n.getResourceBundle(l, ns) || {};
});
});
ret.initialI18nStore = initialI18nStore;
ret.initialLanguage = i18n.language;
return ret;
};
// node_modules/react-i18next/dist/es/Trans.js
function Trans2(_ref) {
let {
children,
count,
parent,
i18nKey,
context,
tOptions = {},
values,
defaults,
components,
ns,
i18n: i18nFromProps,
t: tFromProps,
shouldUnescape,
...additionalProps
} = _ref;
const {
i18n: i18nFromContext,
defaultNS: defaultNSFromContext
} = (0, import_react3.useContext)(I18nContext) || {};
const i18n = i18nFromProps || i18nFromContext || getI18n();
const t2 = tFromProps || i18n && i18n.t.bind(i18n);
return Trans({
children,
count,
parent,
i18nKey,
context,
tOptions,
values,
defaults,
components,
ns: ns || t2 && t2.ns || defaultNSFromContext || i18n && i18n.options && i18n.options.defaultNS,
i18n,
t: tFromProps,
shouldUnescape,
...additionalProps
});
}
// node_modules/react-i18next/dist/es/useTranslation.js
var import_react4 = __toESM(require_react(), 1);
var usePrevious = (value, ignore) => {
const ref = (0, import_react4.useRef)();
(0, import_react4.useEffect)(() => {
ref.current = ignore ? ref.current : value;
}, [value, ignore]);
return ref.current;
};
var alwaysNewT = (i18n, language, namespace, keyPrefix) => i18n.getFixedT(language, namespace, keyPrefix);
var useMemoizedT = (i18n, language, namespace, keyPrefix) => (0, import_react4.useCallback)(alwaysNewT(i18n, language, namespace, keyPrefix), [i18n, language, namespace, keyPrefix]);
var useTranslation = function(ns) {
let props = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : {};
const {
i18n: i18nFromProps
} = props;
const {
i18n: i18nFromContext,
defaultNS: defaultNSFromContext
} = (0, import_react4.useContext)(I18nContext) || {};
const i18n = i18nFromProps || i18nFromContext || getI18n();
if (i18n && !i18n.reportNamespaces) i18n.reportNamespaces = new ReportNamespaces();
if (!i18n) {
warnOnce("You will need to pass in an i18next instance by using initReactI18next");
const notReadyT = (k, optsOrDefaultValue) => {
if (isString(optsOrDefaultValue)) return optsOrDefaultValue;
if (isObject(optsOrDefaultValue) && isString(optsOrDefaultValue.defaultValue)) return optsOrDefaultValue.defaultValue;
return Array.isArray(k) ? k[k.length - 1] : k;
};
const retNotReady = [notReadyT, {}, false];
retNotReady.t = notReadyT;
retNotReady.i18n = {};
retNotReady.ready = false;
return retNotReady;
}
if (i18n.options.react && i18n.options.react.wait !== void 0) warnOnce("It seems you are still using the old wait option, you may migrate to the new useSuspense behaviour.");
const i18nOptions = {
...getDefaults(),
...i18n.options.react,
...props
};
const {
useSuspense,
keyPrefix
} = i18nOptions;
let namespaces = ns || defaultNSFromContext || i18n.options && i18n.options.defaultNS;
namespaces = isString(namespaces) ? [namespaces] : namespaces || ["translation"];
if (i18n.reportNamespaces.addUsedNamespaces) i18n.reportNamespaces.addUsedNamespaces(namespaces);
const ready = (i18n.isInitialized || i18n.initializedStoreOnce) && namespaces.every((n2) => hasLoadedNamespace(n2, i18n, i18nOptions));
const memoGetT = useMemoizedT(i18n, props.lng || null, i18nOptions.nsMode === "fallback" ? namespaces : namespaces[0], keyPrefix);
const getT = () => memoGetT;
const getNewT = () => alwaysNewT(i18n, props.lng || null, i18nOptions.nsMode === "fallback" ? namespaces : namespaces[0], keyPrefix);
const [t2, setT] = (0, import_react4.useState)(getT);
let joinedNS = namespaces.join();
if (props.lng) joinedNS = `${props.lng}${joinedNS}`;
const previousJoinedNS = usePrevious(joinedNS);
const isMounted = (0, import_react4.useRef)(true);
(0, import_react4.useEffect)(() => {
const {
bindI18n,
bindI18nStore
} = i18nOptions;
isMounted.current = true;
if (!ready && !useSuspense) {
if (props.lng) {
loadLanguages(i18n, props.lng, namespaces, () => {
if (isMounted.current) setT(getNewT);
});
} else {
loadNamespaces(i18n, namespaces, () => {
if (isMounted.current) setT(getNewT);
});
}
}
if (ready && previousJoinedNS && previousJoinedNS !== joinedNS && isMounted.current) {
setT(getNewT);
}
const boundReset = () => {
if (isMounted.current) setT(getNewT);
};
if (bindI18n && i18n) i18n.on(bindI18n, boundReset);
if (bindI18nStore && i18n) i18n.store.on(bindI18nStore, boundReset);
return () => {
isMounted.current = false;
if (bindI18n && i18n) bindI18n.split(" ").forEach((e2) => i18n.off(e2, boundReset));
if (bindI18nStore && i18n) bindI18nStore.split(" ").forEach((e2) => i18n.store.off(e2, boundReset));
};
}, [i18n, joinedNS]);
(0, import_react4.useEffect)(() => {
if (isMounted.current && ready) {
setT(getT);
}
}, [i18n, keyPrefix, ready]);
const ret = [t2, i18n, ready];
ret.t = t2;
ret.i18n = i18n;
ret.ready = ready;
if (ready) return ret;
if (!ready && !useSuspense) return ret;
throw new Promise((resolve) => {
if (props.lng) {
loadLanguages(i18n, props.lng, namespaces, () => resolve());
} else {
loadNamespaces(i18n, namespaces, () => resolve());
}
});
};
// node_modules/react-i18next/dist/es/withTranslation.js
var import_react5 = __toESM(require_react(), 1);
var withTranslation = function(ns) {
let options = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : {};
return function Extend(WrappedComponent) {
function I18nextWithTranslation(_ref) {
let {
forwardedRef,
...rest
} = _ref;
const [t2, i18n, ready] = useTranslation(ns, {
...rest,
keyPrefix: options.keyPrefix
});
const passDownProps = {
...rest,
t: t2,
i18n,
tReady: ready
};
if (options.withRef && forwardedRef) {
passDownProps.ref = forwardedRef;
} else if (!options.withRef && forwardedRef) {
passDownProps.forwardedRef = forwardedRef;
}
return (0, import_react5.createElement)(WrappedComponent, passDownProps);
}
I18nextWithTranslation.displayName = `withI18nextTranslation(${getDisplayName(WrappedComponent)})`;
I18nextWithTranslation.WrappedComponent = WrappedComponent;
const forwardRef = (props, ref) => (0, import_react5.createElement)(I18nextWithTranslation, Object.assign({}, props, {
forwardedRef: ref
}));
return options.withRef ? (0, import_react5.forwardRef)(forwardRef) : I18nextWithTranslation;
};
};
// node_modules/react-i18next/dist/es/Translation.js
function Translation(props) {
const {
ns,
children,
...options
} = props;
const [t2, i18n, ready] = useTranslation(ns, options);
return children(t2, {
i18n,
lng: i18n.language
}, ready);
}
// node_modules/react-i18next/dist/es/I18nextProvider.js
var import_react6 = __toESM(require_react(), 1);
function I18nextProvider(_ref) {
let {
i18n,
defaultNS,
children
} = _ref;
const value = (0, import_react6.useMemo)(() => ({
i18n,
defaultNS
}), [i18n, defaultNS]);
return (0, import_react6.createElement)(I18nContext.Provider, {
value
}, children);
}
// node_modules/react-i18next/dist/es/withSSR.js
var import_react8 = __toESM(require_react(), 1);
// node_modules/react-i18next/dist/es/useSSR.js
var import_react7 = __toESM(require_react(), 1);
var useSSR = function(initialI18nStore, initialLanguage) {
let props = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : {};
const {
i18n: i18nFromProps
} = props;
const {
i18n: i18nFromContext
} = (0, import_react7.useContext)(I18nContext) || {};
const i18n = i18nFromProps || i18nFromContext || getI18n();
if (i18n.options && i18n.options.isClone) return;
if (initialI18nStore && !i18n.initializedStoreOnce) {
i18n.services.resourceStore.data = initialI18nStore;
i18n.options.ns = Object.values(initialI18nStore).reduce((mem, lngResources) => {
Object.keys(lngResources).forEach((ns) => {
if (mem.indexOf(ns) < 0) mem.push(ns);
});
return mem;
}, i18n.options.ns);
i18n.initializedStoreOnce = true;
i18n.isInitialized = true;
}
if (initialLanguage && !i18n.initializedLanguageOnce) {
i18n.changeLanguage(initialLanguage);
i18n.initializedLanguageOnce = true;
}
};
// node_modules/react-i18next/dist/es/withSSR.js
var withSSR = () => function Extend(WrappedComponent) {
function I18nextWithSSR(_ref) {
let {
initialI18nStore,
initialLanguage,
...rest
} = _ref;
useSSR(initialI18nStore, initialLanguage);
return (0, import_react8.createElement)(WrappedComponent, {
...rest
});
}
I18nextWithSSR.getInitialProps = composeInitialProps(WrappedComponent);
I18nextWithSSR.displayName = `withI18nextSSR(${getDisplayName(WrappedComponent)})`;
I18nextWithSSR.WrappedComponent = WrappedComponent;
return I18nextWithSSR;
};
// node_modules/react-i18next/dist/es/index.js
var date = () => "";
var time = () => "";
var number = () => "";
var select = () => "";
var plural = () => "";
var selectOrdinal = () => "";
export {
I18nContext,
I18nextProvider,
Trans2 as Trans,
Trans as TransWithoutContext,
Translation,
composeInitialProps,
date,
getDefaults,
getI18n,
getInitialProps,
initReactI18next,
number,
plural,
select,
selectOrdinal,
setDefaults,
setI18n,
time,
useSSR,
useTranslation,
withSSR,
withTranslation
};
//# sourceMappingURL=react-i18next.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,6 @@
import {
require_react
} from "./chunk-FXM4O2JN.js";
import "./chunk-EQCVQC35.js";
export default require_react();
//# sourceMappingURL=react.js.map

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"sources": [],
"sourcesContent": [],
"mappings": "",
"names": []
}

View File

@@ -0,0 +1,39 @@
import {
__commonJS
} from "./chunk-EQCVQC35.js";
// node_modules/react/cjs/react-jsx-dev-runtime.production.min.js
var require_react_jsx_dev_runtime_production_min = __commonJS({
"node_modules/react/cjs/react-jsx-dev-runtime.production.min.js"(exports) {
"use strict";
var a = Symbol.for("react.fragment");
exports.Fragment = a;
exports.jsxDEV = void 0;
}
});
// node_modules/react/jsx-dev-runtime.js
var require_jsx_dev_runtime = __commonJS({
"node_modules/react/jsx-dev-runtime.js"(exports, module) {
if (true) {
module.exports = require_react_jsx_dev_runtime_production_min();
} else {
module.exports = null;
}
}
});
export default require_jsx_dev_runtime();
/*! Bundled license information:
react/cjs/react-jsx-dev-runtime.production.min.js:
(**
* @license React
* react-jsx-dev-runtime.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
*/
//# sourceMappingURL=react_jsx-dev-runtime.js.map

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"sources": ["../../react/cjs/react-jsx-dev-runtime.production.min.js", "../../react/jsx-dev-runtime.js"],
"sourcesContent": ["/**\n * @license React\n * react-jsx-dev-runtime.production.min.js\n *\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n'use strict';var a=Symbol.for(\"react.fragment\");exports.Fragment=a;exports.jsxDEV=void 0;\n", "'use strict';\n\nif (process.env.NODE_ENV === 'production') {\n module.exports = require('./cjs/react-jsx-dev-runtime.production.min.js');\n} else {\n module.exports = require('./cjs/react-jsx-dev-runtime.development.js');\n}\n"],
"mappings": ";;;;;AAAA;AAAA;AAAA;AASa,QAAI,IAAE,OAAO,IAAI,gBAAgB;AAAE,YAAQ,WAAS;AAAE,YAAQ,SAAO;AAAA;AAAA;;;ACTlF;AAAA;AAEA,QAAI,MAAuC;AACzC,aAAO,UAAU;AAAA,IACnB,OAAO;AACL,aAAO,UAAU;AAAA,IACnB;AAAA;AAAA;",
"names": []
}

View File

@@ -0,0 +1,57 @@
import {
require_react
} from "./chunk-FXM4O2JN.js";
import {
__commonJS
} from "./chunk-EQCVQC35.js";
// node_modules/react/cjs/react-jsx-runtime.production.min.js
var require_react_jsx_runtime_production_min = __commonJS({
"node_modules/react/cjs/react-jsx-runtime.production.min.js"(exports) {
"use strict";
var f = require_react();
var k = Symbol.for("react.element");
var l = Symbol.for("react.fragment");
var m = Object.prototype.hasOwnProperty;
var n = f.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.ReactCurrentOwner;
var p = { key: true, ref: true, __self: true, __source: true };
function q(c, a, g) {
var b, d = {}, e = null, h = null;
void 0 !== g && (e = "" + g);
void 0 !== a.key && (e = "" + a.key);
void 0 !== a.ref && (h = a.ref);
for (b in a) m.call(a, b) && !p.hasOwnProperty(b) && (d[b] = a[b]);
if (c && c.defaultProps) for (b in a = c.defaultProps, a) void 0 === d[b] && (d[b] = a[b]);
return { $$typeof: k, type: c, key: e, ref: h, props: d, _owner: n.current };
}
exports.Fragment = l;
exports.jsx = q;
exports.jsxs = q;
}
});
// node_modules/react/jsx-runtime.js
var require_jsx_runtime = __commonJS({
"node_modules/react/jsx-runtime.js"(exports, module) {
if (true) {
module.exports = require_react_jsx_runtime_production_min();
} else {
module.exports = null;
}
}
});
export default require_jsx_runtime();
/*! Bundled license information:
react/cjs/react-jsx-runtime.production.min.js:
(**
* @license React
* react-jsx-runtime.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
*/
//# sourceMappingURL=react_jsx-runtime.js.map

Some files were not shown because too many files have changed in this diff Show More