diff --git a/.github/workflows/check-redis.yml b/.github/workflows/check-redis.yml deleted file mode 100644 index e5e9ff0d..00000000 --- a/.github/workflows/check-redis.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Check Redis -on: - schedule: - - cron: '*/5 * * * *' - -env: - BULL_AUTH_KEY: ${{ secrets.BULL_AUTH_KEY }} - -jobs: - clean-jobs: - runs-on: ubuntu-latest - steps: - - name: Send GET request to check queues - run: | - response=$(curl --write-out '%{http_code}' --silent --output /dev/null --max-time 180 https://api.firecrawl.dev/admin/${{ secrets.BULL_AUTH_KEY }}/redis-health) - if [ "$response" -ne 200 ]; then - echo "Failed to check queues. Response: $response" - exit 1 - fi - echo "Successfully checked queues. Response: $response" diff --git a/.github/workflows/fly-direct.yml b/.github/workflows/fly-direct.yml index f846098d..aea0a48e 100644 --- a/.github/workflows/fly-direct.yml +++ b/.github/workflows/fly-direct.yml @@ -1,7 +1,7 @@ name: Fly Deploy Direct on: schedule: - - cron: '0 */6 * * *' + - cron: '0 */2 * * *' env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} diff --git a/.github/workflows/fly.yml b/.github/workflows/fly.yml index 3f34f860..9e4b85a8 100644 --- a/.github/workflows/fly.yml +++ b/.github/workflows/fly.yml @@ -169,6 +169,41 @@ jobs: run: npm run test working-directory: ./apps/js-sdk/firecrawl + go-sdk-tests: + name: Go SDK Tests + needs: pre-deploy-e2e-tests + runs-on: ubuntu-latest + services: + redis: + image: redis + ports: + - 6379:6379 + steps: + - uses: actions/checkout@v3 + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version-file: "go.mod" + - name: Install pnpm + run: npm install -g pnpm + - name: Install dependencies + run: pnpm install + working-directory: ./apps/api + - name: Start the application + run: npm start & + working-directory: ./apps/api + id: start_app + - name: Start workers + run: npm run workers & + working-directory: ./apps/api + id: start_workers + - name: Install dependencies for Go SDK + run: go mod tidy + working-directory: ./apps/go-sdk + - name: Run tests for Go SDK + run: go test -v ./... -timeout 180s + working-directory: ./apps/go-sdk/firecrawl + deploy: name: Deploy app runs-on: ubuntu-latest diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..d56adf88 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "apps/go-sdk/firecrawl"] + path = apps/go-sdk/firecrawl + url = https://github.com/mendableai/firecrawl-go +[submodule "apps/go-sdk/examples"] + path = apps/go-sdk/examples + url = https://github.com/mendableai/firecrawl-go-examples diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cece879b..d0145a6b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -44,7 +44,6 @@ BULL_AUTH_KEY= @ LOGTAIL_KEY= # Use if you're configuring basic logging with logtail PLAYWRIGHT_MICROSERVICE_URL= # set if you'd like to run a playwright fallback LLAMAPARSE_API_KEY= #Set if you have a llamaparse key you'd like to use to parse pdfs -SERPER_API_KEY= #Set if you have a serper key you'd like to use as a search api SLACK_WEBHOOK_URL= # set if you'd like to send slack server health status messages POSTHOG_API_KEY= # set if you'd like to send posthog events like job logs POSTHOG_HOST= # set if you'd like to send posthog events like job logs diff --git a/SELF_HOST.md b/SELF_HOST.md index f3a4a4b1..f631cf18 100644 --- a/SELF_HOST.md +++ b/SELF_HOST.md @@ -1,36 +1,76 @@ -## Self-hosting Firecrawl +# Self-hosting Firecrawl -_We're currently working on a more in-depth guide on how to self-host, but in the meantime, here is a simplified version._ +#### Contributor? -Refer to [CONTRIBUTING.md](https://github.com/mendableai/firecrawl/blob/main/CONTRIBUTING.md) for instructions on how to run it locally. +Welcome to [Firecrawl](https://firecrawl.dev) 🔥! Here are some instructions on how to get the project locally so you can run it on your own and contribute. -## Getting Started +If you're contributing, note that the process is similar to other open-source repos, i.e., fork Firecrawl, make changes, run tests, PR. -First, clone this repository and copy the example env file from the API folder `.env.example` to `.env`. +If you have any questions or would like help getting on board, join our Discord community [here](https://discord.gg/gSmWdAkdwd) for more information or submit an issue on Github [here](https://github.com/mendableai/firecrawl/issues/new/choose)! -### Steps +## Why? -1. Clone the repository: - - ```bash - git clone https://github.com/mendableai/firecrawl.git - cd firecrawl - cp ./apps/api/.env.example ./.env - ``` - -2. For running the simplest version of FireCrawl, edit the `USE_DB_AUTHENTICATION` in `.env` to not use the database authentication: - - ```plaintext - USE_DB_AUTHENTICATION=false - ``` - -3. Update the Redis URL in the .env file to align with the Docker configuration: - - ```plaintext - REDIS_URL=redis://redis:6379 - ``` - -4. #### Option: Running with TypeScript Playwright Service +Self-hosting Firecrawl is particularly beneficial for organizations with stringent security policies that require data to remain within controlled environments. Here are some key reasons to consider self-hosting: + +- **Enhanced Security and Compliance:** By self-hosting, you ensure that all data handling and processing complies with internal and external regulations, keeping sensitive information within your secure infrastructure. Note that Firecrawl is a Mendable product and relies on SOC2 Type2 certification, which means that the platform adheres to high industry standards for managing data security. +- **Customizable Services:** Self-hosting allows you to tailor the services, such as the Playwright service, to meet specific needs or handle particular use cases that may not be supported by the standard cloud offering. +- **Learning and Community Contribution:** By setting up and maintaining your own instance, you gain a deeper understanding of how Firecrawl works, which can also lead to more meaningful contributions to the project. + +### Considerations + +However, there are some limitations and additional responsibilities to be aware of: + +1. **Limited Access to Fire-engine:** Currently, self-hosted instances of Firecrawl do not have access to Fire-engine, which includes advanced features for handling IP blocks, robot detection mechanisms, and more. This means that while you can manage basic scraping tasks, more complex scenarios might require additional configuration or might not be supported. +2. **Manual Configuration Required:** If you need to use scraping methods beyond the basic fetch and Playwright options, you will need to manually configure these in the `.env` file. This requires a deeper understanding of the technologies and might involve more setup time. + +Self-hosting Firecrawl is ideal for those who need full control over their scraping and data processing environments but comes with the trade-off of additional maintenance and configuration efforts. + +## Steps + +1. First, start by installing the dependencies + +- Docker [instructions](https://docs.docker.com/get-docker/) + + +2. Set environment variables + +Create an `.env` in the root directory you can copy over the template in `apps/api/.env.example` + +To start, we wont set up authentication, or any optional sub services (pdf parsing, JS blocking support, AI features) + +`.env:` +``` +# ===== Required ENVS ====== +NUM_WORKERS_PER_QUEUE=8 +PORT=3002 +HOST=0.0.0.0 +REDIS_URL=redis://redis:6379 +REDIS_RATE_LIMIT_URL=redis://redis:6379 + +## To turn on DB authentication, you need to set up supabase. +USE_DB_AUTHENTICATION=false + +# ===== Optional ENVS ====== + +# Supabase Setup (used to support DB authentication, advanced logging, etc.) +SUPABASE_ANON_TOKEN= +SUPABASE_URL= +SUPABASE_SERVICE_TOKEN= + +# Other Optionals +TEST_API_KEY= # use if you've set up authentication and want to test with a real API key +SCRAPING_BEE_API_KEY= #Set if you'd like to use scraping Be to handle JS blocking +OPENAI_API_KEY= # add for LLM dependednt features (image alt generation, etc.) +BULL_AUTH_KEY= @ +LOGTAIL_KEY= # Use if you're configuring basic logging with logtail +PLAYWRIGHT_MICROSERVICE_URL= # set if you'd like to run a playwright fallback +LLAMAPARSE_API_KEY= #Set if you have a llamaparse key you'd like to use to parse pdfs +SLACK_WEBHOOK_URL= # set if you'd like to send slack server health status messages +POSTHOG_API_KEY= # set if you'd like to send posthog events like job logs +POSTHOG_HOST= # set if you'd like to send posthog events like job logs +``` + +3. *(Optional) Running with TypeScript Playwright Service* * Update the `docker-compose.yml` file to change the Playwright service: @@ -49,16 +89,91 @@ First, clone this repository and copy the example env file from the API folder ` ``` * Don't forget to set the proxy server in your `.env` file as needed. -5. Build and run the Docker containers: + +4. Build and run the Docker containers: ```bash docker compose build docker compose up ``` - This will run a local instance of Firecrawl which can be accessed at `http://localhost:3002`. +You should be able to see the Bull Queue Manager UI on `http://localhost:3002/admin/@/queues`. + +5. *(Optional)* Test the API + +If you’d like to test the crawl endpoint, you can run this: + + ```bash + curl -X POST http://localhost:3002/v0/crawl \ + -H 'Content-Type: application/json' \ + -d '{ + "url": "https://mendable.ai" + }' + ``` + +## Troubleshooting + +This section provides solutions to common issues you might encounter while setting up or running your self-hosted instance of Firecrawl. + +### Supabase client is not configured + +**Symptom:** +```bash +[YYYY-MM-DDTHH:MM:SS.SSSz]ERROR - Attempted to access Supabase client when it's not configured. +[YYYY-MM-DDTHH:MM:SS.SSSz]ERROR - Error inserting scrape event: Error: Supabase client is not configured. +``` + +**Explanation:** +This error occurs because the Supabase client setup is not completed. You should be able to scrape and crawl with no problems. Right now it's not possible to configure Supabase in self-hosted instances. + +### You're bypassing authentication + +**Symptom:** +```bash +[YYYY-MM-DDTHH:MM:SS.SSSz]WARN - You're bypassing authentication +``` + +**Explanation:** +This error occurs because the Supabase client setup is not completed. You should be able to scrape and crawl with no problems. Right now it's not possible to configure Supabase in self-hosted instances. + +### Docker containers fail to start + +**Symptom:** +Docker containers exit unexpectedly or fail to start. + +**Solution:** +Check the Docker logs for any error messages using the command: +```bash +docker logs [container_name] +``` + +- Ensure all required environment variables are set correctly in the .env file. +- Verify that all Docker services defined in docker-compose.yml are correctly configured and the necessary images are available. + +### Connection issues with Redis + +**Symptom:** +Errors related to connecting to Redis, such as timeouts or "Connection refused". + +**Solution:** +- Ensure that the Redis service is up and running in your Docker environment. +- Verify that the REDIS_URL and REDIS_RATE_LIMIT_URL in your .env file point to the correct Redis instance, ensure that it points to the same URL in the `docker-compose.yaml` file (`redis://redis:6379`) +- Check network settings and firewall rules that may block the connection to the Redis port. + +### API endpoint does not respond + +**Symptom:** +API requests to the Firecrawl instance timeout or return no response. + +**Solution:** +- Ensure that the Firecrawl service is running by checking the Docker container status. +- Verify that the PORT and HOST settings in your .env file are correct and that no other service is using the same port. +- Check the network configuration to ensure that the host is accessible from the client making the API request. + +By addressing these common issues, you can ensure a smoother setup and operation of your self-hosted Firecrawl instance. + ## Install Firecrawl on a Kubernetes Cluster (Simple Version) -Read the [examples/kubernetes-cluster-install/README.md](https://github.com/mendableai/firecrawl/blob/main/examples/kubernetes-cluster-install/README.md) for instructions on how to install Firecrawl on a Kubernetes Cluster. +Read the [examples/kubernetes-cluster-install/README.md](https://github.com/mendableai/firecrawl/blob/main/examples/kubernetes-cluster-install/README.md) for instructions on how to install Firecrawl on a Kubernetes Cluster. \ No newline at end of file diff --git a/apps/api/.env.example b/apps/api/.env.example index 08ff7d7f..f3c1dc1b 100644 --- a/apps/api/.env.example +++ b/apps/api/.env.example @@ -2,8 +2,8 @@ NUM_WORKERS_PER_QUEUE=8 PORT=3002 HOST=0.0.0.0 -REDIS_URL=redis://localhost:6379 -REDIS_RATE_LIMIT_URL=redis://localhost:6379 +REDIS_URL=redis://redis:6379 #for self-hosting using docker, use redis://redis:6379. For running locally, use redis://localhost:6379 +REDIS_RATE_LIMIT_URL=redis://redis:6379 #for self-hosting using docker, use redis://redis:6379. For running locally, use redis://localhost:6379 PLAYWRIGHT_MICROSERVICE_URL=http://playwright-service:3000/html ## To turn on DB authentication, you need to set up supabase. @@ -17,18 +17,27 @@ SUPABASE_URL= SUPABASE_SERVICE_TOKEN= # Other Optionals -TEST_API_KEY= # use if you've set up authentication and want to test with a real API key -RATE_LIMIT_TEST_API_KEY_SCRAPE= # set if you'd like to test the scraping rate limit -RATE_LIMIT_TEST_API_KEY_CRAWL= # set if you'd like to test the crawling rate limit -SCRAPING_BEE_API_KEY= #Set if you'd like to use scraping Be to handle JS blocking -OPENAI_API_KEY= # add for LLM dependednt features (image alt generation, etc.) -BULL_AUTH_KEY= @ -LOGTAIL_KEY= # Use if you're configuring basic logging with logtail -LLAMAPARSE_API_KEY= #Set if you have a llamaparse key you'd like to use to parse pdfs -SERPER_API_KEY= #Set if you have a serper key you'd like to use as a search api -SLACK_WEBHOOK_URL= # set if you'd like to send slack server health status messages -POSTHOG_API_KEY= # set if you'd like to send posthog events like job logs -POSTHOG_HOST= # set if you'd like to send posthog events like job logs +# use if you've set up authentication and want to test with a real API key +TEST_API_KEY= +# set if you'd like to test the scraping rate limit +RATE_LIMIT_TEST_API_KEY_SCRAPE= +# set if you'd like to test the crawling rate limit +RATE_LIMIT_TEST_API_KEY_CRAWL= +# set if you'd like to use scraping Be to handle JS blocking +SCRAPING_BEE_API_KEY= +# add for LLM dependednt features (image alt generation, etc.) +OPENAI_API_KEY= +BULL_AUTH_KEY=@ +# use if you're configuring basic logging with logtail +LOGTAIL_KEY= +# set if you have a llamaparse key you'd like to use to parse pdfs +LLAMAPARSE_API_KEY= +# set if you'd like to send slack server health status messages +SLACK_WEBHOOK_URL= +# set if you'd like to send posthog events like job logs +POSTHOG_API_KEY= +# set if you'd like to send posthog events like job logs +POSTHOG_HOST= STRIPE_PRICE_ID_STANDARD= STRIPE_PRICE_ID_SCALE= @@ -43,7 +52,8 @@ STRIPE_PRICE_ID_GROWTH_YEARLY= HYPERDX_API_KEY= HDX_NODE_BETA_MODE=1 -FIRE_ENGINE_BETA_URL= # set if you'd like to use the fire engine closed beta +# set if you'd like to use the fire engine closed beta +FIRE_ENGINE_BETA_URL= # Proxy Settings for Playwright (Alternative you can can use a proxy service like oxylabs, which rotates IPs for you on every request) PROXY_SERVER= diff --git a/apps/api/.gitignore b/apps/api/.gitignore index edc2faf4..7dcbabe9 100644 --- a/apps/api/.gitignore +++ b/apps/api/.gitignore @@ -6,3 +6,5 @@ dump.rdb /mongo-data /.next/ + +.rdb \ No newline at end of file diff --git a/apps/api/fly.staging.toml b/apps/api/fly.staging.toml index 7a5e0848..db1ed183 100644 --- a/apps/api/fly.staging.toml +++ b/apps/api/fly.staging.toml @@ -24,8 +24,8 @@ kill_timeout = '30s' [http_service.concurrency] type = "requests" - hard_limit = 100 - soft_limit = 50 + # hard_limit = 100 + soft_limit = 100 [[http_service.checks]] grace_period = "10s" @@ -51,12 +51,13 @@ kill_timeout = '30s' [services.concurrency] type = 'connections' - hard_limit = 25 - soft_limit = 20 + # hard_limit = 25 + soft_limit = 100 [[vm]] - size = 'performance-1x' + size = 'performance-2x' processes = ['app','worker'] + memory = 8192 diff --git a/apps/api/fly.toml b/apps/api/fly.toml index 94108e5f..c0c87401 100644 --- a/apps/api/fly.toml +++ b/apps/api/fly.toml @@ -24,8 +24,8 @@ kill_timeout = '30s' [http_service.concurrency] type = "requests" - hard_limit = 200 - soft_limit = 75 + # hard_limit = 200 + soft_limit = 200 [[http_service.checks]] grace_period = "20s" @@ -50,8 +50,8 @@ kill_timeout = '30s' [services.concurrency] type = 'connections' - hard_limit = 30 - soft_limit = 12 + # hard_limit = 30 + soft_limit = 200 [[vm]] size = 'performance-4x' diff --git a/apps/api/openapi-v0.json b/apps/api/openapi-v0.json new file mode 100644 index 00000000..40272385 --- /dev/null +++ b/apps/api/openapi-v0.json @@ -0,0 +1,924 @@ +{ + "openapi": "3.0.0", + "info": { + "title": "Firecrawl API", + "version": "0.0.0", + "description": "API for interacting with Firecrawl services to perform web scraping and crawling tasks.", + "contact": { + "name": "Firecrawl Support", + "url": "https://firecrawl.dev/support", + "email": "support@firecrawl.dev" + } + }, + "servers": [ + { + "url": "https://api.firecrawl.dev/v0" + } + ], + "paths": { + "/scrape": { + "post": { + "summary": "Scrape a single URL and optionally extract information using an LLM", + "operationId": "scrapeAndExtractFromUrl", + "tags": ["Scraping"], + "security": [ + { + "bearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "The URL to scrape" + }, + "pageOptions": { + "type": "object", + "properties": { + "headers": { + "type": "object", + "description": "Headers to send with the request. Can be used to send cookies, user-agent, etc." + }, + "includeHtml": { + "type": "boolean", + "description": "Include the HTML version of the content on page. Will output a html key in the response.", + "default": false + }, + "includeRawHtml": { + "type": "boolean", + "description": "Include the raw HTML content of the page. Will output a rawHtml key in the response.", + "default": false + }, + "onlyIncludeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Only include tags, classes and ids from the page in the final output. Use comma separated values. Example: 'script, .ad, #footer'" + }, + "onlyMainContent": { + "type": "boolean", + "description": "Only return the main content of the page excluding headers, navs, footers, etc.", + "default": false + }, + "removeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tags, classes and ids to remove from the page. Use comma separated values. Example: 'script, .ad, #footer'" + }, + "replaceAllPathsWithAbsolutePaths": { + "type": "boolean", + "description": "Replace all relative paths with absolute paths for images and links", + "default": false + }, + "screenshot": { + "type": "boolean", + "description": "Include a screenshot of the top of the page that you are scraping.", + "default": false + }, + "fullPageScreenshot": { + "type": "boolean", + "description": "Include a full page screenshot of the page that you are scraping.", + "default": false + }, + "waitFor": { + "type": "integer", + "description": "Wait x amount of milliseconds for the page to load to fetch content", + "default": 0 + } + } + }, + "extractorOptions": { + "type": "object", + "description": "Options for extraction of structured information from the page content. Note: LLM-based extraction is not performed by default and only occurs when explicitly configured. The 'markdown' mode simply returns the scraped markdown and is the default mode for scraping.", + "default": {}, + "properties": { + "mode": { + "type": "string", + "enum": ["markdown", "llm-extraction", "llm-extraction-from-raw-html", "llm-extraction-from-markdown"], + "description": "The extraction mode to use. 'markdown': Returns the scraped markdown content, does not perform LLM extraction. 'llm-extraction': Extracts information from the cleaned and parsed content using LLM. 'llm-extraction-from-raw-html': Extracts information directly from the raw HTML using LLM. 'llm-extraction-from-markdown': Extracts information from the markdown content using LLM." + }, + "extractionPrompt": { + "type": "string", + "description": "A prompt describing what information to extract from the page, applicable for LLM extraction modes." + }, + "extractionSchema": { + "type": "object", + "additionalProperties": true, + "description": "The schema for the data to be extracted, required only for LLM extraction modes.", + "required": [ + "company_mission", + "supports_sso", + "is_open_source" + ] + } + } + }, + "timeout": { + "type": "integer", + "description": "Timeout in milliseconds for the request", + "default": 30000 + } + }, + "required": ["url"] + } + } + } + }, + "responses": { + "200": { + "description": "Successful response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ScrapeResponse" + } + } + } + }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Payment required to access this resource." + } + } + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Request rate limit exceeded. Please wait and try again later." + } + } + } + } + } + }, + "500": { + "description": "Server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "An unexpected error occurred on the server." + } + } + } + } + } + } + } + } + }, + "/crawl": { + "post": { + "summary": "Crawl multiple URLs based on options", + "operationId": "crawlUrls", + "tags": ["Crawling"], + "security": [ + { + "bearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "The base URL to start crawling from" + }, + "crawlerOptions": { + "type": "object", + "properties": { + "includes": { + "type": "array", + "items": { + "type": "string" + }, + "description": "URL patterns to include" + }, + "excludes": { + "type": "array", + "items": { + "type": "string" + }, + "description": "URL patterns to exclude" + }, + "generateImgAltText": { + "type": "boolean", + "description": "Generate alt text for images using LLMs (must have a paid plan)", + "default": false + }, + "returnOnlyUrls": { + "type": "boolean", + "description": "If true, returns only the URLs as a list on the crawl status. Attention: the return response will be a list of URLs inside the data, not a list of documents.", + "default": false + }, + "maxDepth": { + "type": "integer", + "description": "Maximum depth to crawl relative to the entered URL. A maxDepth of 0 scrapes only the entered URL. A maxDepth of 1 scrapes the entered URL and all pages one level deep. A maxDepth of 2 scrapes the entered URL and all pages up to two levels deep. Higher values follow the same pattern." + }, + "mode": { + "type": "string", + "enum": ["default", "fast"], + "description": "The crawling mode to use. Fast mode crawls 4x faster websites without sitemap, but may not be as accurate and shouldn't be used in heavy js-rendered websites.", + "default": "default" + }, + "ignoreSitemap": { + "type": "boolean", + "description": "Ignore the website sitemap when crawling", + "default": false + }, + "limit": { + "type": "integer", + "description": "Maximum number of pages to crawl", + "default": 10000 + }, + "allowBackwardCrawling": { + "type": "boolean", + "description": "Enables the crawler to navigate from a specific URL to previously linked pages. For instance, from 'example.com/product/123' back to 'example.com/product'", + "default": false + }, + "allowExternalContentLinks": { + "type": "boolean", + "description": "Allows the crawler to follow links to external websites.", + "default": false + } + } + }, + "pageOptions": { + "type": "object", + "properties": { + "headers": { + "type": "object", + "description": "Headers to send with the request. Can be used to send cookies, user-agent, etc." + }, + "includeHtml": { + "type": "boolean", + "description": "Include the HTML version of the content on page. Will output a html key in the response.", + "default": false + }, + "includeRawHtml": { + "type": "boolean", + "description": "Include the raw HTML content of the page. Will output a rawHtml key in the response.", + "default": false + }, + "onlyIncludeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Only include tags, classes and ids from the page in the final output. Use comma separated values. Example: 'script, .ad, #footer'" + }, + "onlyMainContent": { + "type": "boolean", + "description": "Only return the main content of the page excluding headers, navs, footers, etc.", + "default": false + }, + "removeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tags, classes and ids to remove from the page. Use comma separated values. Example: 'script, .ad, #footer'" + }, + "replaceAllPathsWithAbsolutePaths": { + "type": "boolean", + "description": "Replace all relative paths with absolute paths for images and links", + "default": false + }, + "screenshot": { + "type": "boolean", + "description": "Include a screenshot of the top of the page that you are scraping.", + "default": false + }, + "fullPageScreenshot": { + "type": "boolean", + "description": "Include a full page screenshot of the page that you are scraping.", + "default": false + }, + "waitFor": { + "type": "integer", + "description": "Wait x amount of milliseconds for the page to load to fetch content", + "default": 0 + } + } + } + }, + "required": ["url"] + } + } + } + }, + "responses": { + "200": { + "description": "Successful response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CrawlResponse" + } + } + } + }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Payment required to access this resource." + } + } + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Request rate limit exceeded. Please wait and try again later." + } + } + } + } + } + }, + "500": { + "description": "Server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "An unexpected error occurred on the server." + } + } + } + } + } + } + } + } + }, + "/search": { + "post": { + "summary": "Search for a keyword in Google, returns top page results with markdown content for each page", + "operationId": "searchGoogle", + "tags": ["Search"], + "security": [ + { + "bearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "query": { + "type": "string", + "format": "uri", + "description": "The query to search for" + }, + "pageOptions": { + "type": "object", + "properties": { + "onlyMainContent": { + "type": "boolean", + "description": "Only return the main content of the page excluding headers, navs, footers, etc.", + "default": false + }, + "fetchPageContent": { + "type": "boolean", + "description": "Fetch the content of each page. If false, defaults to a basic fast serp API.", + "default": true + }, + "includeHtml": { + "type": "boolean", + "description": "Include the HTML version of the content on page. Will output a html key in the response.", + "default": false + }, + "includeRawHtml": { + "type": "boolean", + "description": "Include the raw HTML content of the page. Will output a rawHtml key in the response.", + "default": false + } + } + }, + "searchOptions": { + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of results. Max is 20 during beta." + } + } + } + }, + "required": ["query"] + } + } + } + }, + "responses": { + "200": { + "description": "Successful response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SearchResponse" + } + } + } + }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Payment required to access this resource." + } + } + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Request rate limit exceeded. Please wait and try again later." + } + } + } + } + } + }, + "500": { + "description": "Server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "An unexpected error occurred on the server." + } + } + } + } + } + } + } + } + }, + "/crawl/status/{jobId}": { + "get": { + "tags": ["Crawl"], + "summary": "Get the status of a crawl job", + "operationId": "getCrawlStatus", + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "name": "jobId", + "in": "path", + "description": "ID of the crawl job", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Successful response", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "status": { + "type": "string", + "description": "Status of the job (completed, active, failed, paused)" + }, + "current": { + "type": "integer", + "description": "Current page number" + }, + "total": { + "type": "integer", + "description": "Total number of pages" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CrawlStatusResponseObj" + }, + "description": "Data returned from the job (null when it is in progress)" + }, + "partial_data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CrawlStatusResponseObj" + }, + "description": "Partial documents returned as it is being crawled (streaming). **This feature is currently in alpha - expect breaking changes** When a page is ready, it will append to the partial_data array, so there is no need to wait for the entire website to be crawled. When the crawl is done, partial_data will become empty and the result will be available in `data`. There is a max of 50 items in the array response. The oldest item (top of the array) will be removed when the new item is added to the array." + } + } + } + } + } + }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Payment required to access this resource." + } + } + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Request rate limit exceeded. Please wait and try again later." + } + } + } + } + } + }, + "500": { + "description": "Server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "An unexpected error occurred on the server." + } + } + } + } + } + } + } + } + }, + "/crawl/cancel/{jobId}": { + "delete": { + "tags": ["Crawl"], + "summary": "Cancel a crawl job", + "operationId": "cancelCrawlJob", + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "name": "jobId", + "in": "path", + "description": "ID of the crawl job", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Successful response", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "status": { + "type": "string", + "description": "Returns cancelled." + } + } + } + } + } + }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Payment required to access this resource." + } + } + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "Request rate limit exceeded. Please wait and try again later." + } + } + } + } + } + }, + "500": { + "description": "Server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string", + "example": "An unexpected error occurred on the server." + } + } + } + } + } + } + } + } + } + }, + "components": { + "securitySchemes": { + "bearerAuth": { + "type": "http", + "scheme": "bearer" + } + }, + "schemas": { + "ScrapeResponse": { + "type": "object", + "properties": { + "success": { + "type": "boolean" + }, + "data": { + "type": "object", + "properties": { + "markdown": { + "type": "string" + }, + "content": { + "type": "string" + }, + "html": { + "type": "string", + "nullable": true, + "description": "HTML version of the content on page if `includeHtml` is true" + }, + "rawHtml": { + "type": "string", + "nullable": true, + "description": "Raw HTML content of the page if `includeRawHtml` is true" + }, + "metadata": { + "type": "object", + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "language": { + "type": "string", + "nullable": true + }, + "sourceURL": { + "type": "string", + "format": "uri" + }, + " ": { + "type": "string" + }, + "pageStatusCode": { + "type": "integer", + "description": "The status code of the page" + }, + "pageError": { + "type": "string", + "nullable": true, + "description": "The error message of the page" + } + + } + }, + "llm_extraction": { + "type": "object", + "description": "Displayed when using LLM Extraction. Extracted data from the page following the schema defined.", + "nullable": true + }, + "warning": { + "type": "string", + "nullable": true, + "description": "Can be displayed when using LLM Extraction. Warning message will let you know any issues with the extraction." + } + } + } + } + }, + "CrawlStatusResponseObj": { + "type": "object", + "properties": { + "markdown": { + "type": "string" + }, + "content": { + "type": "string" + }, + "html": { + "type": "string", + "nullable": true, + "description": "HTML version of the content on page if `includeHtml` is true" + }, + "rawHtml": { + "type": "string", + "nullable": true, + "description": "Raw HTML content of the page if `includeRawHtml` is true" + }, + "index": { + "type": "integer", + "description": "The number of the page that was crawled. This is useful for `partial_data` so you know which page the data is from." + }, + "metadata": { + "type": "object", + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "language": { + "type": "string", + "nullable": true + }, + "sourceURL": { + "type": "string", + "format": "uri" + }, + " ": { + "type": "string" + }, + "pageStatusCode": { + "type": "integer", + "description": "The status code of the page" + }, + "pageError": { + "type": "string", + "nullable": true, + "description": "The error message of the page" + } + } + } + } + }, + "SearchResponse": { + "type": "object", + "properties": { + "success": { + "type": "boolean" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "properties": { + "url": { + "type": "string" + }, + "markdown": { + "type": "string" + }, + "content": { + "type": "string" + }, + "metadata": { + "type": "object", + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "language": { + "type": "string", + "nullable": true + }, + "sourceURL": { + "type": "string", + "format": "uri" + } + } + } + } + } + } + } + }, + "CrawlResponse": { + "type": "object", + "properties": { + "jobId": { + "type": "string" + } + } + } + } + }, + "security": [ + { + "bearerAuth": [] + } + ] +} \ No newline at end of file diff --git a/apps/api/openapi.json b/apps/api/openapi.json index e0b583f0..5bd3e3d8 100644 --- a/apps/api/openapi.json +++ b/apps/api/openapi.json @@ -18,8 +18,8 @@ "paths": { "/scrape": { "post": { - "summary": "Scrape a single URL and optionally extract information using an LLM", - "operationId": "scrapeAndExtractFromUrl", + "summary": "Scrape a single URL", + "operationId": "scrape", "tags": ["Scraping"], "security": [ { @@ -38,89 +38,47 @@ "format": "uri", "description": "The URL to scrape" }, - "pageOptions": { - "type": "object", - "properties": { - "headers": { - "type": "object", - "description": "Headers to send with the request. Can be used to send cookies, user-agent, etc." - }, - "includeHtml": { - "type": "boolean", - "description": "Include the HTML version of the content on page. Will output a html key in the response.", - "default": false - }, - "includeRawHtml": { - "type": "boolean", - "description": "Include the raw HTML content of the page. Will output a rawHtml key in the response.", - "default": false - }, - "onlyIncludeTags": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Only include tags, classes and ids from the page in the final output. Use comma separated values. Example: 'script, .ad, #footer'" - }, - "onlyMainContent": { - "type": "boolean", - "description": "Only return the main content of the page excluding headers, navs, footers, etc.", - "default": false - }, - "removeTags": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Tags, classes and ids to remove from the page. Use comma separated values. Example: 'script, .ad, #footer'" - }, - "replaceAllPathsWithAbsolutePaths": { - "type": "boolean", - "description": "Replace all relative paths with absolute paths for images and links", - "default": false - }, - "screenshot": { - "type": "boolean", - "description": "Include a screenshot of the top of the page that you are scraping.", - "default": false - }, - "waitFor": { - "type": "integer", - "description": "Wait x amount of milliseconds for the page to load to fetch content", - "default": 0 - } - } + "formats": { + "type": "array", + "items": { + "type": "string", + "enum": ["markdown", "html", "rawHtml", "links", "screenshot", "screenshot@fullPage"] + }, + "description": "Specific formats to return.\n\n - markdown: The page in Markdown format.\n - html: The page's HTML, trimmed to include only meaningful content.\n - rawHtml: The page's original HTML.\n - links: The links on the page.\n - screenshot: A screenshot of the top of the page.\n - screenshot@fullPage: A screenshot of the full page. (overridden by screenshot if present)", + "default": ["markdown"] }, - "extractorOptions": { + "headers": { "type": "object", - "description": "Options for extraction of structured information from the page content. Note: LLM-based extraction is not performed by default and only occurs when explicitly configured. The 'markdown' mode simply returns the scraped markdown and is the default mode for scraping.", - "default": {}, - "properties": { - "mode": { - "type": "string", - "enum": ["markdown", "llm-extraction", "llm-extraction-from-raw-html", "llm-extraction-from-markdown"], - "description": "The extraction mode to use. 'markdown': Returns the scraped markdown content, does not perform LLM extraction. 'llm-extraction': Extracts information from the cleaned and parsed content using LLM. 'llm-extraction-from-raw-html': Extracts information directly from the raw HTML using LLM. 'llm-extraction-from-markdown': Extracts information from the markdown content using LLM." - }, - "extractionPrompt": { - "type": "string", - "description": "A prompt describing what information to extract from the page, applicable for LLM extraction modes." - }, - "extractionSchema": { - "type": "object", - "additionalProperties": true, - "description": "The schema for the data to be extracted, required only for LLM extraction modes.", - "required": [ - "company_mission", - "supports_sso", - "is_open_source" - ] - } - } + "description": "Headers to send with the request. Can be used to send cookies, user-agent, etc." + }, + "includeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Only include tags, classes and ids from the page in the final output. Use comma separated values. Example: 'script, .ad, #footer'" + }, + "excludeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tags, classes and ids to remove from the page. Use comma separated values. Example: 'script, .ad, #footer'" + }, + "onlyMainContent": { + "type": "boolean", + "description": "Only return the main content of the page excluding headers, navs, footers, etc.", + "default": true }, "timeout": { "type": "integer", "description": "Timeout in milliseconds for the request", "default": 30000 + }, + "waitFor": { + "type": "integer", + "description": "Wait x amount of milliseconds for the page to load to fetch content", + "default": 0 } }, "required": ["url"] @@ -317,6 +275,11 @@ "description": "Include a screenshot of the top of the page that you are scraping.", "default": false }, + "fullPageScreenshot": { + "type": "boolean", + "description": "Include a full page screenshot of the page that you are scraping.", + "default": false + }, "waitFor": { "type": "integer", "description": "Wait x amount of milliseconds for the page to load to fetch content", @@ -731,24 +694,42 @@ "success": { "type": "boolean" }, + "warning": { + "type": "string", + "nullable": true, + "description": "Warning message to let you know of any issues." + }, "data": { "type": "object", "properties": { "markdown": { - "type": "string" - }, - "content": { - "type": "string" + "type": "string", + "nullable": true, + "description": "Markdown content of the page if the `markdown` format was specified (default)" }, "html": { "type": "string", "nullable": true, - "description": "HTML version of the content on page if `includeHtml` is true" + "description": "HTML version of the content on page if the `html` format was specified" }, "rawHtml": { "type": "string", "nullable": true, - "description": "Raw HTML content of the page if `includeRawHtml` is true" + "description": "Raw HTML content of the page if the `rawHtml` format was specified" + }, + "links": { + "type": "array", + "items": { + "type": "string", + "format": "uri" + }, + "nullable": true, + "description": "Links on the page if the `links` format was specified" + }, + "screenshot": { + "type": "string", + "nullable": true, + "description": "URL of the screenshot of the page if the `screenshot` or `screenshot@fullSize` format was specified" }, "metadata": { "type": "object", @@ -770,27 +751,16 @@ " ": { "type": "string" }, - "pageStatusCode": { + "statusCode": { "type": "integer", "description": "The status code of the page" }, - "pageError": { + "error": { "type": "string", "nullable": true, "description": "The error message of the page" } - } - }, - "llm_extraction": { - "type": "object", - "description": "Displayed when using LLM Extraction. Extracted data from the page following the schema defined.", - "nullable": true - }, - "warning": { - "type": "string", - "nullable": true, - "description": "Can be displayed when using LLM Extraction. Warning message will let you know any issues with the extraction." } } } @@ -800,24 +770,33 @@ "type": "object", "properties": { "markdown": { - "type": "string" - }, - "content": { - "type": "string" + "type": "string", + "nullable": true, + "description": "Markdown content of the page if the `markdown` format was specified (default)" }, "html": { "type": "string", "nullable": true, - "description": "HTML version of the content on page if `includeHtml` is true" + "description": "HTML version of the content on page if the `html` format was specified" }, "rawHtml": { "type": "string", "nullable": true, - "description": "Raw HTML content of the page if `includeRawHtml` is true" + "description": "Raw HTML content of the page if the `rawHtml` format was specified" }, - "index": { - "type": "integer", - "description": "The number of the page that was crawled. This is useful for `partial_data` so you know which page the data is from." + "links": { + "type": "array", + "items": { + "type": "string", + "format": "uri" + }, + "nullable": true, + "description": "Links on the page if the `links` format was specified" + }, + "screenshot": { + "type": "string", + "nullable": true, + "description": "URL of the screenshot of the page if the `screenshot` or `screenshot@fullSize` format was specified" }, "metadata": { "type": "object", @@ -839,11 +818,11 @@ " ": { "type": "string" }, - "pageStatusCode": { + "statusCode": { "type": "integer", "description": "The status code of the page" }, - "pageError": { + "error": { "type": "string", "nullable": true, "description": "The error message of the page" @@ -861,34 +840,63 @@ "data": { "type": "array", "items": { - "type": "object", - "properties": { - "url": { - "type": "string" + "markdown": { + "type": "string", + "nullable": true, + "description": "Markdown content of the page if the `markdown` format was specified (default)" + }, + "html": { + "type": "string", + "nullable": true, + "description": "HTML version of the content on page if the `html` format was specified" + }, + "rawHtml": { + "type": "string", + "nullable": true, + "description": "Raw HTML content of the page if the `rawHtml` format was specified" + }, + "links": { + "type": "array", + "items": { + "type": "string", + "format": "uri" }, - "markdown": { - "type": "string" - }, - "content": { - "type": "string" - }, - "metadata": { - "type": "object", - "properties": { - "title": { - "type": "string" - }, - "description": { - "type": "string" - }, - "language": { - "type": "string", - "nullable": true - }, - "sourceURL": { - "type": "string", - "format": "uri" - } + "nullable": true, + "description": "Links on the page if the `links` format was specified" + }, + "screenshot": { + "type": "string", + "nullable": true, + "description": "URL of the screenshot of the page if the `screenshot` or `screenshot@fullSize` format was specified" + }, + "metadata": { + "type": "object", + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "language": { + "type": "string", + "nullable": true + }, + "sourceURL": { + "type": "string", + "format": "uri" + }, + " ": { + "type": "string" + }, + "statusCode": { + "type": "integer", + "description": "The status code of the page" + }, + "error": { + "type": "string", + "nullable": true, + "description": "The error message of the page" } } } @@ -899,8 +907,15 @@ "CrawlResponse": { "type": "object", "properties": { - "jobId": { + "success": { + "type": "boolean" + }, + "id": { "type": "string" + }, + "url": { + "type": "string", + "format": "uri" } } } diff --git a/apps/api/package.json b/apps/api/package.json index 4815b612..12024b68 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -57,6 +57,8 @@ "@nangohq/node": "^0.40.8", "@sentry/node": "^8.13.0", "@supabase/supabase-js": "^2.44.2", + "@types/express-ws": "^3.0.4", + "@types/ws": "^8.5.12", "ajv": "^8.16.0", "async": "^3.2.5", "async-mutex": "^0.5.0", @@ -71,6 +73,7 @@ "date-fns": "^3.6.0", "dotenv": "^16.3.1", "express-rate-limit": "^7.3.1", + "express-ws": "^5.0.2", "form-data": "^4.0.0", "glob": "^10.4.2", "gpt3-tokenizer": "^1.1.5", @@ -93,6 +96,7 @@ "promptable": "^0.0.10", "puppeteer": "^22.12.1", "rate-limiter-flexible": "2.4.2", + "redlock": "5.0.0-beta.2", "resend": "^3.4.0", "robots-parser": "^3.0.1", "scrapingbee": "^1.7.4", @@ -104,8 +108,9 @@ "unstructured-client": "^0.11.3", "uuid": "^10.0.0", "wordpos": "^2.1.0", + "ws": "^8.18.0", "xml2js": "^0.6.2", - "zod": "^3.23.4", + "zod": "^3.23.8", "zod-to-json-schema": "^3.23.1" }, "nodemonConfig": { diff --git a/apps/api/pnpm-lock.yaml b/apps/api/pnpm-lock.yaml index 62760a98..93d0ed13 100644 --- a/apps/api/pnpm-lock.yaml +++ b/apps/api/pnpm-lock.yaml @@ -41,6 +41,12 @@ importers: '@supabase/supabase-js': specifier: ^2.44.2 version: 2.44.2 + '@types/express-ws': + specifier: ^3.0.4 + version: 3.0.4 + '@types/ws': + specifier: ^8.5.12 + version: 8.5.12 ajv: specifier: ^8.16.0 version: 8.16.0 @@ -83,6 +89,9 @@ importers: express-rate-limit: specifier: ^7.3.1 version: 7.3.1(express@4.19.2) + express-ws: + specifier: ^5.0.2 + version: 5.0.2(express@4.19.2) form-data: specifier: ^4.0.0 version: 4.0.0 @@ -106,7 +115,7 @@ importers: version: 0.0.28 langchain: specifier: ^0.2.8 - version: 0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1) + version: 0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0) languagedetect: specifier: ^2.0.0 version: 2.0.0 @@ -149,6 +158,9 @@ importers: rate-limiter-flexible: specifier: 2.4.2 version: 2.4.2 + redlock: + specifier: 5.0.0-beta.2 + version: 5.0.0-beta.2 resend: specifier: ^3.4.0 version: 3.4.0 @@ -182,11 +194,14 @@ importers: wordpos: specifier: ^2.1.0 version: 2.1.0 + ws: + specifier: ^8.18.0 + version: 8.18.0 xml2js: specifier: ^0.6.2 version: 0.6.2 zod: - specifier: ^3.23.4 + specifier: ^3.23.8 version: 3.23.8 zod-to-json-schema: specifier: ^3.23.1 @@ -1556,6 +1571,9 @@ packages: '@types/express-serve-static-core@4.19.3': resolution: {integrity: sha512-KOzM7MhcBFlmnlr/fzISFF5vGWVSvN6fTd4T+ExOt08bA/dA5kpSzY52nMsI1KDFmUREpJelPYyuslLRSjjgCg==} + '@types/express-ws@3.0.4': + resolution: {integrity: sha512-Yjj18CaivG5KndgcvzttWe8mPFinPCHJC2wvyQqVzA7hqeufM8EtWMj6mpp5omg3s8XALUexhOu8aXAyi/DyJQ==} + '@types/express@4.17.21': resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==} @@ -1658,8 +1676,8 @@ packages: '@types/whatwg-url@11.0.5': resolution: {integrity: sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==} - '@types/ws@8.5.10': - resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==} + '@types/ws@8.5.12': + resolution: {integrity: sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ==} '@types/yargs-parser@21.0.3': resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} @@ -2413,6 +2431,12 @@ packages: peerDependencies: express: 4 || 5 || ^5.0.0-beta.1 + express-ws@5.0.2: + resolution: {integrity: sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==} + engines: {node: '>=4.5.0'} + peerDependencies: + express: ^4.0.0 || ^5.0.0-alpha.1 + express@4.19.2: resolution: {integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==} engines: {node: '>= 0.10.0'} @@ -3950,6 +3974,10 @@ packages: redis@4.6.14: resolution: {integrity: sha512-GrNg/e33HtsQwNXL7kJT+iNFPSwE1IPmd7wzV3j4f2z0EYxZfZE7FVTmUysgAtqQQtg5NXF5SNLR9OdO/UHOfw==} + redlock@5.0.0-beta.2: + resolution: {integrity: sha512-2RDWXg5jgRptDrB1w9O/JgSZC0j7y4SlaXnor93H/UJm/QyDiFgBKNtrh0TI6oCXqYSaSoXxFh6Sd3VtYfhRXw==} + engines: {node: '>=12'} + regenerator-runtime@0.14.1: resolution: {integrity: sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==} @@ -4540,8 +4568,20 @@ packages: resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - ws@8.17.1: - resolution: {integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==} + ws@7.5.10: + resolution: {integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==} + engines: {node: '>=8.3.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@8.18.0: + resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==} engines: {node: '>=10.0.0'} peerDependencies: bufferutil: ^4.0.1 @@ -5178,13 +5218,13 @@ snapshots: '@js-sdsl/ordered-map@4.4.2': {} - '@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2)': + '@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2)': dependencies: ansi-styles: 5.2.0 camelcase: 6.3.0 decamelize: 1.2.0 js-tiktoken: 1.0.12 - langsmith: 0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + langsmith: 0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2) ml-distance: 4.0.1 mustache: 4.2.0 p-queue: 6.6.2 @@ -5196,9 +5236,9 @@ snapshots: - langchain - openai - '@langchain/openai@0.2.1(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))': + '@langchain/openai@0.2.1(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))': dependencies: - '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2) js-tiktoken: 1.0.12 openai: 4.52.2 zod: 3.23.8 @@ -5207,9 +5247,9 @@ snapshots: - encoding - langchain - '@langchain/textsplitters@0.0.3(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2)': + '@langchain/textsplitters@0.0.3(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2)': dependencies: - '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2) js-tiktoken: 1.0.12 transitivePeerDependencies: - langchain @@ -6367,8 +6407,8 @@ snapshots: dependencies: '@supabase/node-fetch': 2.6.15 '@types/phoenix': 1.6.5 - '@types/ws': 8.5.10 - ws: 8.17.1 + '@types/ws': 8.5.12 + ws: 8.18.0 transitivePeerDependencies: - bufferutil - utf-8-validate @@ -6465,6 +6505,12 @@ snapshots: '@types/range-parser': 1.2.7 '@types/send': 0.17.4 + '@types/express-ws@3.0.4': + dependencies: + '@types/express': 4.17.21 + '@types/express-serve-static-core': 4.19.3 + '@types/ws': 8.5.12 + '@types/express@4.17.21': dependencies: '@types/body-parser': 1.19.5 @@ -6588,7 +6634,7 @@ snapshots: dependencies: '@types/webidl-conversions': 7.0.3 - '@types/ws@8.5.10': + '@types/ws@8.5.12': dependencies: '@types/node': 20.14.1 @@ -7329,6 +7375,14 @@ snapshots: dependencies: express: 4.19.2 + express-ws@5.0.2(express@4.19.2): + dependencies: + express: 4.19.2 + ws: 7.5.10 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + express@4.19.2: dependencies: accepts: 1.3.8 @@ -8241,17 +8295,17 @@ snapshots: kleur@3.0.3: {} - langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1): + langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0): dependencies: - '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) - '@langchain/openai': 0.2.1(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1)) - '@langchain/textsplitters': 0.0.3(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2) + '@langchain/openai': 0.2.1(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0)) + '@langchain/textsplitters': 0.0.3(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2) binary-extensions: 2.3.0 js-tiktoken: 1.0.12 js-yaml: 4.1.0 jsonpointer: 5.0.1 langchainhub: 0.0.11 - langsmith: 0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + langsmith: 0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2) ml-distance: 4.0.1 openapi-types: 12.1.3 p-retry: 4.6.2 @@ -8271,14 +8325,14 @@ snapshots: pdf-parse: 1.1.1 puppeteer: 22.12.1(typescript@5.4.5) redis: 4.6.14 - ws: 8.17.1 + ws: 8.18.0 transitivePeerDependencies: - encoding - openai langchainhub@0.0.11: {} - langsmith@0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2): + langsmith@0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2): dependencies: '@types/uuid': 9.0.8 commander: 10.0.1 @@ -8287,8 +8341,8 @@ snapshots: p-retry: 4.6.2 uuid: 9.0.1 optionalDependencies: - '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) - langchain: 0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1) + '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.52.2) + langchain: 0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0) openai: 4.52.2 languagedetect@2.0.0: {} @@ -8992,7 +9046,7 @@ snapshots: chromium-bidi: 0.5.24(devtools-protocol@0.0.1299070) debug: 4.3.5 devtools-protocol: 0.0.1299070 - ws: 8.17.1 + ws: 8.18.0 transitivePeerDependencies: - bufferutil - supports-color @@ -9098,6 +9152,10 @@ snapshots: '@redis/search': 1.1.6(@redis/client@1.5.16) '@redis/time-series': 1.0.5(@redis/client@1.5.16) + redlock@5.0.0-beta.2: + dependencies: + node-abort-controller: 3.1.1 + regenerator-runtime@0.14.1: {} require-directory@2.1.1: {} @@ -9670,7 +9728,9 @@ snapshots: imurmurhash: 0.1.4 signal-exit: 4.1.0 - ws@8.17.1: {} + ws@7.5.10: {} + + ws@8.18.0: {} xml2js@0.6.2: dependencies: diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts new file mode 100644 index 00000000..3bb26e98 --- /dev/null +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -0,0 +1,609 @@ +import request from "supertest"; +import dotenv from "dotenv"; +import { + ScrapeOptions, + ScrapeRequest, + ScrapeResponseRequestTest, +} from "../../controllers/v1/types"; + +dotenv.config(); +const TEST_URL = "http://127.0.0.1:3002"; + +describe("E2E Tests for v1 API Routes", () => { + beforeAll(() => { + process.env.USE_DB_AUTHENTICATION = "true"; + }); + + afterAll(() => { + delete process.env.USE_DB_AUTHENTICATION; + }); + + describe("GET /is-production", () => { + it.concurrent("should return the production status", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL).get( + "/is-production" + ); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("isProduction"); + }); + }); + + describe("POST /v1/scrape", () => { + it.concurrent("should require authorization", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL).post( + "/v1/scrape" + ); + expect(response.statusCode).toBe(401); + }); + + it.concurrent("should throw error for blocklisted URL", async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://facebook.com/fake-test", + }; + + const response = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(403); + expect(response.body.error).toBe("URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions."); + }); + + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); + + it.concurrent( + "should return a successful response with a valid API key", + async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://roastmywebsite.ai", + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).not.toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.markdown).toContain("_Roast_"); + expect(response.body.data.metadata.error).toBeUndefined(); + expect(response.body.data.metadata.title).toBe("Roast My Website"); + expect(response.body.data.metadata.description).toBe( + "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + ); + expect(response.body.data.metadata.keywords).toBe( + "Roast My Website,Roast,Website,GitHub,Firecrawl" + ); + expect(response.body.data.metadata.robots).toBe("follow, index"); + expect(response.body.data.metadata.ogTitle).toBe("Roast My Website"); + expect(response.body.data.metadata.ogDescription).toBe( + "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + ); + expect(response.body.data.metadata.ogUrl).toBe( + "https://www.roastmywebsite.ai" + ); + expect(response.body.data.metadata.ogImage).toBe( + "https://www.roastmywebsite.ai/og.png" + ); + expect(response.body.data.metadata.ogLocaleAlternate).toStrictEqual([]); + expect(response.body.data.metadata.ogSiteName).toBe("Roast My Website"); + expect(response.body.data.metadata.sourceURL).toBe( + "https://roastmywebsite.ai" + ); + expect(response.body.data.metadata.statusCode).toBe(200); + }, + 30000 + ); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key and includeHtml set to true", + async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://roastmywebsite.ai", + formats: ["markdown", "html"], + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("html"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.markdown).toContain("_Roast_"); + expect(response.body.data.html).toContain(" { + const scrapeRequest: ScrapeRequest = { + url: "https://arxiv.org/pdf/astro-ph/9301001.pdf" + // formats: ["markdown", "html"], + }; + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send(scrapeRequest); + await new Promise((r) => setTimeout(r, 6000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.markdown).toContain('Broad Line Radio Galaxy'); + expect(response.body.data.metadata.statusCode).toBe(200); + expect(response.body.data.metadata.error).toBeUndefined(); + }, 60000); + + it.concurrent('should return a successful response for a valid scrape with PDF file without explicit .pdf extension', async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://arxiv.org/pdf/astro-ph/9301001" + }; + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send(scrapeRequest); + await new Promise((r) => setTimeout(r, 6000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.markdown).toContain('Broad Line Radio Galaxy'); + expect(response.body.data.metadata.statusCode).toBe(200); + expect(response.body.data.metadata.error).toBeUndefined(); + }, 60000); + + it.concurrent("should return a successful response with a valid API key with removeTags option", async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://www.scrapethissite.com/", + onlyMainContent: false // default is true + }; + const responseWithoutRemoveTags: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + expect(responseWithoutRemoveTags.statusCode).toBe(200); + expect(responseWithoutRemoveTags.body).toHaveProperty("data"); + + if (!("data" in responseWithoutRemoveTags.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(responseWithoutRemoveTags.body.data).toHaveProperty("markdown"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("metadata"); + expect(responseWithoutRemoveTags.body.data).not.toHaveProperty("html"); + expect(responseWithoutRemoveTags.body.data.markdown).toContain("[FAQ](/faq/)"); // .nav + expect(responseWithoutRemoveTags.body.data.markdown).toContain("Hartley Brody 2023"); // #footer + + const scrapeRequestWithRemoveTags: ScrapeRequest = { + url: "https://www.scrapethissite.com/", + excludeTags: ['.nav', '#footer', 'strong'], + onlyMainContent: false // default is true + }; + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequestWithRemoveTags); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.markdown).not.toContain("Hartley Brody 2023"); + expect(response.body.data.markdown).not.toContain("[FAQ](/faq/)"); // + }, 30000); + + it.concurrent('should return a successful response for a scrape with 400 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/400' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(400); + }, 60000); + + + it.concurrent('should return a successful response for a scrape with 401 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/401' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(401); + }, 60000); + + it.concurrent('should return a successful response for a scrape with 403 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/403' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(403); + }, 60000); + + it.concurrent('should return a successful response for a scrape with 404 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/404' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(404); + }, 60000); + + it.concurrent('should return a successful response for a scrape with 405 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/405' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(405); + }, 60000); + + it.concurrent('should return a successful response for a scrape with 500 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/500' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(500); + }, 60000); + + it.concurrent("should return a timeout error when scraping takes longer than the specified timeout", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev", timeout: 1000 }); + + expect(response.statusCode).toBe(408); + }, 3000); + + it.concurrent( + "should return a successful response with a valid API key and includeHtml set to true", + async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://roastmywebsite.ai", + formats: ["html","rawHtml"], + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).not.toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("html"); + expect(response.body.data).toHaveProperty("rawHtml"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.html).toContain(" { + const scrapeRequest: ScrapeRequest = { + url: "https://ycombinator.com/companies", + formats: ["markdown"], + waitFor: 5000 + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data).not.toHaveProperty("links"); + expect(response.body.data).not.toHaveProperty("rawHtml"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.markdown).toContain("PagerDuty"); + expect(response.body.data.metadata.statusCode).toBe(200); + expect(response.body.data.metadata.error).toBeUndefined(); + + }, + 30000 + ); + + it.concurrent( + "should return a successful response with a valid links on page", + async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://roastmywebsite.ai", + formats: ["links"], + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data).not.toHaveProperty("rawHtml"); + expect(response.body.data).toHaveProperty("links"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.links).toContain("https://firecrawl.dev"); + expect(response.body.data.metadata.statusCode).toBe(200); + expect(response.body.data.metadata.error).toBeUndefined(); + }, + 30000 + ); + + + }); + +describe("POST /v1/map", () => { + it.concurrent("should require authorization", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL).post( + "/v1/map" + ); + expect(response.statusCode).toBe(401); + }); + + it.concurrent("should return an error response with an invalid API key", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + }); + + it.concurrent("should return a successful response with a valid API key", async () => { + const mapRequest = { + url: "https://roastmywebsite.ai" + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + }); + + it.concurrent("should return a successful response with a valid API key and search", async () => { + const mapRequest = { + url: "https://usemotion.com", + search: "pricing" + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + expect(links[0]).toContain("usemotion.com/pricing"); + }); + + it.concurrent("should return a successful response with a valid API key and search and allowSubdomains", async () => { + const mapRequest = { + url: "https://firecrawl.dev", + search: "docs", + includeSubdomains: true + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + expect(links[0]).toContain("docs.firecrawl.dev"); + }); + + it.concurrent("should return a successful response with a valid API key and search and allowSubdomains and www", async () => { + const mapRequest = { + url: "https://www.firecrawl.dev", + search: "docs", + includeSubdomains: true + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + expect(links[0]).toContain("docs.firecrawl.dev"); + }, 10000) + + it.concurrent("should return a successful response with a valid API key and search and not allowSubdomains and www", async () => { + const mapRequest = { + url: "https://www.firecrawl.dev", + search: "docs", + includeSubdomains: false + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + expect(links[0]).not.toContain("docs.firecrawl.dev"); + }) + + it.concurrent("should return an error for invalid URL", async () => { + const mapRequest = { + url: "invalid-url", + includeSubdomains: true, + search: "test", + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(400); + expect(response.body).toHaveProperty("success", false); + expect(response.body).toHaveProperty("error"); + }); +}); +}); diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index b5bc54a5..82ed5bfe 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -1,11 +1,15 @@ import request from "supertest"; import dotenv from "dotenv"; -import { FirecrawlCrawlResponse, FirecrawlCrawlStatusResponse, FirecrawlScrapeResponse } from "../../types"; +import { + FirecrawlCrawlResponse, + FirecrawlCrawlStatusResponse, + FirecrawlScrapeResponse, +} from "../../types"; dotenv.config(); const TEST_URL = "http://127.0.0.1:3002"; -describe("E2E Tests for API Routes", () => { +describe("E2E Tests for v0 API Routes", () => { beforeAll(() => { process.env.USE_DB_AUTHENTICATION = "true"; }); @@ -24,276 +28,365 @@ describe("E2E Tests for API Routes", () => { describe("POST /v0/scrape", () => { it.concurrent("should require authorization", async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL).post("/v0/scrape"); + const response: FirecrawlScrapeResponse = await request(TEST_URL).post( + "/v0/scrape" + ); expect(response.statusCode).toBe(401); }); - it.concurrent("should return an error response with an invalid API key", async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer invalid-api-key`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev" }); - expect(response.statusCode).toBe(401); - }); + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); - it.concurrent("should return a successful response with a valid API key", async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://roastmywebsite.ai" }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("data"); - expect(response.body.data).toHaveProperty("content"); - expect(response.body.data).toHaveProperty("markdown"); - expect(response.body.data).toHaveProperty("metadata"); - expect(response.body.data).not.toHaveProperty("html"); - expect(response.body.data.content).toContain("_Roast_"); - expect(response.body.data.metadata.pageError).toBeUndefined(); - expect(response.body.data.metadata.title).toBe("Roast My Website"); - expect(response.body.data.metadata.description).toBe("Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️"); - expect(response.body.data.metadata.keywords).toBe("Roast My Website,Roast,Website,GitHub,Firecrawl"); - expect(response.body.data.metadata.robots).toBe("follow, index"); - expect(response.body.data.metadata.ogTitle).toBe("Roast My Website"); - expect(response.body.data.metadata.ogDescription).toBe("Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️"); - expect(response.body.data.metadata.ogUrl).toBe("https://www.roastmywebsite.ai"); - expect(response.body.data.metadata.ogImage).toBe("https://www.roastmywebsite.ai/og.png"); - expect(response.body.data.metadata.ogLocaleAlternate).toStrictEqual([]); - expect(response.body.data.metadata.ogSiteName).toBe("Roast My Website"); - expect(response.body.data.metadata.sourceURL).toBe("https://roastmywebsite.ai"); - expect(response.body.data.metadata.pageStatusCode).toBe(200); - }, 30000); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://roastmywebsite.ai" }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.content).toContain("_Roast_"); + expect(response.body.data.metadata.pageError).toBeUndefined(); + expect(response.body.data.metadata.title).toBe("Roast My Website"); + expect(response.body.data.metadata.description).toBe( + "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + ); + expect(response.body.data.metadata.keywords).toBe( + "Roast My Website,Roast,Website,GitHub,Firecrawl" + ); + expect(response.body.data.metadata.robots).toBe("follow, index"); + expect(response.body.data.metadata.ogTitle).toBe("Roast My Website"); + expect(response.body.data.metadata.ogDescription).toBe( + "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + ); + expect(response.body.data.metadata.ogUrl).toBe( + "https://www.roastmywebsite.ai" + ); + expect(response.body.data.metadata.ogImage).toBe( + "https://www.roastmywebsite.ai/og.png" + ); + expect(response.body.data.metadata.ogLocaleAlternate).toStrictEqual([]); + expect(response.body.data.metadata.ogSiteName).toBe("Roast My Website"); + expect(response.body.data.metadata.sourceURL).toBe( + "https://roastmywebsite.ai" + ); + expect(response.body.data.metadata.pageStatusCode).toBe(200); + }, + 30000 + ); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key and includeHtml set to true", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://roastmywebsite.ai", + pageOptions: { includeHtml: true }, + }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("html"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.content).toContain("_Roast_"); + expect(response.body.data.markdown).toContain("_Roast_"); + expect(response.body.data.html).toContain(" { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://roastmywebsite.ai", - pageOptions: { includeHtml: true }, - }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("data"); - expect(response.body.data).toHaveProperty("content"); - expect(response.body.data).toHaveProperty("markdown"); - expect(response.body.data).toHaveProperty("html"); - expect(response.body.data).toHaveProperty("metadata"); - expect(response.body.data.content).toContain("_Roast_"); - expect(response.body.data.markdown).toContain("_Roast_"); - expect(response.body.data.html).toContain(" { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://arxiv.org/pdf/astro-ph/9301001.pdf' }); - await new Promise((r) => setTimeout(r, 6000)); + it.concurrent( + "should return a successful response for a valid scrape with PDF file", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://arxiv.org/pdf/astro-ph/9301001.pdf" }); + await new Promise((r) => setTimeout(r, 6000)); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); - expect(response.body.data.metadata.pageStatusCode).toBe(200); - expect(response.body.data.metadata.pageError).toBeUndefined(); - }, 60000); // 60 seconds - - it.concurrent('should return a successful response for a valid scrape with PDF file without explicit .pdf extension', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://arxiv.org/pdf/astro-ph/9301001' }); - await new Promise((r) => setTimeout(r, 6000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.content).toContain( + "We present spectrophotometric observations of the Broad Line Radio Galaxy" + ); + expect(response.body.data.metadata.pageStatusCode).toBe(200); + expect(response.body.data.metadata.pageError).toBeUndefined(); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); - expect(response.body.data.metadata.pageStatusCode).toBe(200); - expect(response.body.data.metadata.pageError).toBeUndefined(); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a valid scrape with PDF file without explicit .pdf extension", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://arxiv.org/pdf/astro-ph/9301001" }); + await new Promise((r) => setTimeout(r, 6000)); - it.concurrent("should return a successful response with a valid API key with removeTags option", async () => { - const responseWithoutRemoveTags: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://www.scrapethissite.com/" }); - expect(responseWithoutRemoveTags.statusCode).toBe(200); - expect(responseWithoutRemoveTags.body).toHaveProperty("data"); - expect(responseWithoutRemoveTags.body.data).toHaveProperty("content"); - expect(responseWithoutRemoveTags.body.data).toHaveProperty("markdown"); - expect(responseWithoutRemoveTags.body.data).toHaveProperty("metadata"); - expect(responseWithoutRemoveTags.body.data).not.toHaveProperty("html"); - expect(responseWithoutRemoveTags.body.data.content).toContain("Scrape This Site"); - expect(responseWithoutRemoveTags.body.data.content).toContain("Lessons and Videos"); // #footer - expect(responseWithoutRemoveTags.body.data.content).toContain("[Sandbox]("); // .nav - expect(responseWithoutRemoveTags.body.data.content).toContain("web scraping"); // strong + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.content).toContain( + "We present spectrophotometric observations of the Broad Line Radio Galaxy" + ); + expect(response.body.data.metadata.pageStatusCode).toBe(200); + expect(response.body.data.metadata.pageError).toBeUndefined(); + }, + 60000 + ); // 60 seconds - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://www.scrapethissite.com/", pageOptions: { removeTags: ['.nav', '#footer', 'strong'] } }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("data"); - expect(response.body.data).toHaveProperty("content"); - expect(response.body.data).toHaveProperty("markdown"); - expect(response.body.data).toHaveProperty("metadata"); - expect(response.body.data).not.toHaveProperty("html"); - expect(response.body.data.content).toContain("Scrape This Site"); - expect(response.body.data.content).not.toContain("Lessons and Videos"); // #footer - expect(response.body.data.content).not.toContain("[Sandbox]("); // .nav - expect(response.body.data.content).not.toContain("web scraping"); // strong - }, 30000); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key with removeTags option", + async () => { + const responseWithoutRemoveTags: FirecrawlScrapeResponse = + await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://www.scrapethissite.com/" }); + expect(responseWithoutRemoveTags.statusCode).toBe(200); + expect(responseWithoutRemoveTags.body).toHaveProperty("data"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("content"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("markdown"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("metadata"); + expect(responseWithoutRemoveTags.body.data).not.toHaveProperty("html"); + expect(responseWithoutRemoveTags.body.data.content).toContain( + "Scrape This Site" + ); + expect(responseWithoutRemoveTags.body.data.content).toContain( + "Lessons and Videos" + ); // #footer + expect(responseWithoutRemoveTags.body.data.content).toContain( + "[Sandbox](" + ); // .nav + expect(responseWithoutRemoveTags.body.data.content).toContain( + "web scraping" + ); // strong - it.concurrent('should return a successful response for a scrape with 400 page', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/400' }); - await new Promise((r) => setTimeout(r, 5000)); + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://www.scrapethissite.com/", + pageOptions: { removeTags: [".nav", "#footer", "strong"] }, + }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.content).toContain("Scrape This Site"); + expect(response.body.data.content).not.toContain("Lessons and Videos"); // #footer + expect(response.body.data.content).not.toContain("[Sandbox]("); // .nav + expect(response.body.data.content).not.toContain("web scraping"); // strong + }, + 30000 + ); // 30 seconds timeout - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(400); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("bad request"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 400 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/400" }); + await new Promise((r) => setTimeout(r, 5000)); - it.concurrent('should return a successful response for a scrape with 401 page', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/401' }); - await new Promise((r) => setTimeout(r, 5000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(400); + expect(response.body.data.metadata.pageError.toLowerCase()).toContain( + "bad request" + ); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(401); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("unauthorized"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 401 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/401" }); + await new Promise((r) => setTimeout(r, 5000)); - it.concurrent("should return a successful response for a scrape with 403 page", async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/403' }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(401); + expect(response.body.data.metadata.pageError.toLowerCase()).toContain( + "unauthorized" + ); + }, + 60000 + ); // 60 seconds - await new Promise((r) => setTimeout(r, 5000)); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(403); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("forbidden"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 403 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/403" }); - it.concurrent('should return a successful response for a scrape with 404 page', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/404' }); - await new Promise((r) => setTimeout(r, 5000)); + await new Promise((r) => setTimeout(r, 5000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(403); + expect(response.body.data.metadata.pageError.toLowerCase()).toContain( + "forbidden" + ); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(404); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("not found"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 404 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/404" }); + await new Promise((r) => setTimeout(r, 5000)); - it.concurrent('should return a successful response for a scrape with 405 page', async () => { - const response = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/405' }); - await new Promise((r) => setTimeout(r, 5000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(404); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(405); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("method not allowed"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 405 page", + async () => { + const response = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/405" }); + await new Promise((r) => setTimeout(r, 5000)); - it.concurrent('should return a successful response for a scrape with 500 page', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/500' }); - await new Promise((r) => setTimeout(r, 5000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(405); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(500); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("internal server error"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 500 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/500" }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(500); + }, + 60000 + ); // 60 seconds }); describe("POST /v0/crawl", () => { it.concurrent("should require authorization", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL).post("/v0/crawl"); - expect(response.statusCode).toBe(401); - }); - - it.concurrent("should return an error response with an invalid API key", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer invalid-api-key`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev" }); - expect(response.statusCode).toBe(401); - }); - - it.concurrent("should return a successful response with a valid API key for crawl", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev" }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("jobId"); - expect(response.body.jobId).toMatch( - /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/ + const response: FirecrawlCrawlResponse = await request(TEST_URL).post( + "/v0/crawl" ); + expect(response.statusCode).toBe(401); }); - - it.concurrent("should return a successful response with a valid API key and valid includes option", async () => { - const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://mendable.ai", - limit: 10, - crawlerOptions: { - includes: ["blog/*"], - }, - }); - + + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); + + it.concurrent( + "should return a successful response with a valid API key for crawl", + async () => { + const response: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("jobId"); + expect(response.body.jobId).toMatch( + /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/ + ); + } + ); + + it.concurrent( + "should return a successful response with a valid API key and valid includes option", + async () => { + const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://mendable.ai", + limit: 10, + crawlerOptions: { + includes: ["blog/*"], + }, + }); + let response: FirecrawlCrawlStatusResponse; let isFinished = false; @@ -311,149 +404,191 @@ describe("E2E Tests for API Routes", () => { } } - const completedResponse = response; + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); const urls = completedResponse.body.data.map( - (item: any) => item.metadata?.sourceURL - ); - expect(urls.length).toBeGreaterThan(5); - urls.forEach((url: string) => { - expect(url.startsWith("https://www.mendable.ai/blog/")).toBeTruthy(); - }); - - expect(completedResponse.statusCode).toBe(200); - expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("completed"); - expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data[0]).toHaveProperty("content"); - expect(completedResponse.body.data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].content).toContain("Mendable"); - expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200); - expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); - }, 180000); // 180 seconds - - it.concurrent("should return a successful response with a valid API key and valid excludes option", async () => { - const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://mendable.ai", - limit: 10, - crawlerOptions: { - excludes: ["blog/*"], - }, + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThan(5); + urls.forEach((url: string) => { + expect(url.startsWith("https://www.mendable.ai/blog/")).toBeTruthy(); }); - - let isFinished = false; - let response: FirecrawlCrawlStatusResponse; - while (!isFinished) { - response = await request(TEST_URL) + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].content).toContain("Mendable"); + expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe( + 200 + ); + expect( + completedResponse.body.data[0].metadata.pageError + ).toBeUndefined(); + }, + 180000 + ); // 180 seconds + + it.concurrent( + "should return a successful response with a valid API key and valid excludes option", + async () => { + const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://mendable.ai", + limit: 10, + crawlerOptions: { + excludes: ["blog/*"], + }, + }); + + let isFinished = false; + let response: FirecrawlCrawlStatusResponse; + + while (!isFinished) { + response = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + isFinished = response.body.status === "completed"; + + if (!isFinished) { + await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + } + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse: FirecrawlCrawlStatusResponse = await request( + TEST_URL + ) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + const urls = completedResponse.body.data.map( + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThan(5); + urls.forEach((url: string) => { + expect(url.startsWith("https://wwww.mendable.ai/blog/")).toBeFalsy(); + }); + }, + 90000 + ); // 90 seconds + + it.concurrent( + "should return a successful response with max depth option for a valid crawl job", + async () => { + const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://www.scrapethissite.com", + crawlerOptions: { maxDepth: 1 }, + }); + expect(crawlResponse.statusCode).toBe(200); + + const response: FirecrawlCrawlStatusResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); expect(response.statusCode).toBe(200); expect(response.body).toHaveProperty("status"); - isFinished = response.body.status === "completed"; - - if (!isFinished) { - await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + expect(["active", "waiting"]).toContain(response.body.status); + // wait for 60 seconds + let isCompleted = false; + while (!isCompleted) { + const statusCheckResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(statusCheckResponse.statusCode).toBe(200); + isCompleted = statusCheckResponse.body.status === "completed"; + if (!isCompleted) { + await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + } } - } - - const completedResponse: FirecrawlCrawlStatusResponse = response; - - const urls = completedResponse.body.data.map( - (item: any) => item.metadata?.sourceURL - ); - expect(urls.length).toBeGreaterThan(5); - urls.forEach((url: string) => { - expect(url.startsWith("https://wwww.mendable.ai/blog/")).toBeFalsy(); - }); - }, 90000); // 90 seconds - - it.concurrent("should return a successful response with max depth option for a valid crawl job", async () => { - const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://www.scrapethissite.com", - crawlerOptions: { maxDepth: 1 }, - }); - expect(crawlResponse.statusCode).toBe(200); - - const response: FirecrawlCrawlStatusResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("status"); - expect(["active", "waiting"]).toContain(response.body.status); - // wait for 60 seconds - let isCompleted = false; - while (!isCompleted) { - const statusCheckResponse = await request(TEST_URL) + const completedResponse: FirecrawlCrawlStatusResponse = await request( + TEST_URL + ) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(statusCheckResponse.statusCode).toBe(200); - isCompleted = statusCheckResponse.body.status === "completed"; - if (!isCompleted) { - await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again - } - } - const completedResponse: FirecrawlCrawlStatusResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(completedResponse.statusCode).toBe(200); - expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("completed"); - expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data[0]).toHaveProperty("content"); - expect(completedResponse.body.data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200); - expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); - const urls = completedResponse.body.data.map( - (item: any) => item.metadata?.sourceURL - ); - expect(urls.length).toBeGreaterThan(1); + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe( + 200 + ); + expect( + completedResponse.body.data[0].metadata.pageError + ).toBeUndefined(); + const urls = completedResponse.body.data.map( + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThan(1); - // Check if all URLs have a maximum depth of 1 - urls.forEach((url: string) => { - const pathSplits = new URL(url).pathname.split('/'); - const depth = pathSplits.length - (pathSplits[0].length === 0 && pathSplits[pathSplits.length - 1].length === 0 ? 1 : 0); - expect(depth).toBeLessThanOrEqual(2); - }); - }, 180000); + // Check if all URLs have a maximum depth of 1 + urls.forEach((url: string) => { + const pathSplits = new URL(url).pathname.split("/"); + const depth = + pathSplits.length - + (pathSplits[0].length === 0 && + pathSplits[pathSplits.length - 1].length === 0 + ? 1 + : 0); + expect(depth).toBeLessThanOrEqual(2); + }); + }, + 180000 + ); }); describe("POST /v0/crawlWebsitePreview", () => { it.concurrent("should require authorization", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL).post("/v0/crawlWebsitePreview"); + const response: FirecrawlCrawlResponse = await request(TEST_URL).post( + "/v0/crawlWebsitePreview" + ); expect(response.statusCode).toBe(401); }); - it.concurrent("should return an error response with an invalid API key", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawlWebsitePreview") - .set("Authorization", `Bearer invalid-api-key`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev" }); - expect(response.statusCode).toBe(401); - }); + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawlWebsitePreview") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); - it.concurrent("should return a timeout error when scraping takes longer than the specified timeout", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev", timeout: 1000 }); + it.concurrent( + "should return a timeout error when scraping takes longer than the specified timeout", + async () => { + const response: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev", timeout: 1000 }); - expect(response.statusCode).toBe(408); - }, 3000); + expect(response.statusCode).toBe(408); + }, + 3000 + ); }); describe("POST /v0/search", () => { @@ -462,26 +597,33 @@ describe("E2E Tests for API Routes", () => { expect(response.statusCode).toBe(401); }); - it.concurrent("should return an error response with an invalid API key", async () => { - const response = await request(TEST_URL) - .post("/v0/search") - .set("Authorization", `Bearer invalid-api-key`) - .set("Content-Type", "application/json") - .send({ query: "test" }); - expect(response.statusCode).toBe(401); - }); + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response = await request(TEST_URL) + .post("/v0/search") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ query: "test" }); + expect(response.statusCode).toBe(401); + } + ); - it.concurrent("should return a successful response with a valid API key for search", async () => { - const response = await request(TEST_URL) - .post("/v0/search") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ query: "test" }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("success"); - expect(response.body.success).toBe(true); - expect(response.body).toHaveProperty("data"); - }, 30000); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key for search", + async () => { + const response = await request(TEST_URL) + .post("/v0/search") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ query: "test" }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success"); + expect(response.body.success).toBe(true); + expect(response.body).toHaveProperty("data"); + }, + 60000 + ); // 60 seconds timeout }); describe("GET /v0/crawl/status/:jobId", () => { @@ -490,62 +632,83 @@ describe("E2E Tests for API Routes", () => { expect(response.statusCode).toBe(401); }); - it.concurrent("should return an error response with an invalid API key", async () => { - const response = await request(TEST_URL) - .get("/v0/crawl/status/123") - .set("Authorization", `Bearer invalid-api-key`); - expect(response.statusCode).toBe(401); - }); - - it.concurrent("should return Job not found for invalid job ID", async () => { - const response = await request(TEST_URL) - .get("/v0/crawl/status/invalidJobId") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(response.statusCode).toBe(404); - }); - - it.concurrent("should return a successful crawl status response for a valid crawl job", async () => { - const crawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://mendable.ai/blog" }); - expect(crawlResponse.statusCode).toBe(200); - - let isCompleted = false; - let completedResponse; - - while (!isCompleted) { + it.concurrent( + "should return an error response with an invalid API key", + async () => { const response = await request(TEST_URL) + .get("/v0/crawl/status/123") + .set("Authorization", `Bearer invalid-api-key`); + expect(response.statusCode).toBe(401); + } + ); + + it.concurrent( + "should return Job not found for invalid job ID", + async () => { + const response = await request(TEST_URL) + .get("/v0/crawl/status/invalidJobId") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(404); + } + ); + + it.concurrent( + "should return a successful crawl status response for a valid crawl job", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://mendable.ai/blog" }); + expect(crawlResponse.statusCode).toBe(200); + + let isCompleted = false; + + while (!isCompleted) { + const response = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + + if (response.body.status === "completed") { + isCompleted = true; + } else { + await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again + } + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse = await request(TEST_URL) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("status"); - if (response.body.status === "completed") { - isCompleted = true; - completedResponse = response; - } else { - await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again - } - } - expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("completed"); - expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data[0]).toHaveProperty("content"); - expect(completedResponse.body.data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].content).toContain("Mendable"); - expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200); - expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].content).toContain("Mendable"); + expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe( + 200 + ); + expect( + completedResponse.body.data[0].metadata.pageError + ).toBeUndefined(); - const childrenLinks = completedResponse.body.data.filter(doc => - doc.metadata && doc.metadata.sourceURL && doc.metadata.sourceURL.includes("mendable.ai/blog") - ); + const childrenLinks = completedResponse.body.data.filter( + (doc) => + doc.metadata && + doc.metadata.sourceURL && + doc.metadata.sourceURL.includes("mendable.ai/blog") + ); + + expect(childrenLinks.length).toBe(completedResponse.body.data.length); + }, + 180000 + ); // 120 seconds - expect(childrenLinks.length).toBe(completedResponse.body.data.length); - }, 180000); // 120 seconds - // TODO: review the test below // it.concurrent('should return a successful response for a valid crawl job with PDF files without explicit .pdf extension ', async () => { // const crawlResponse = await request(TEST_URL) @@ -592,148 +755,210 @@ describe("E2E Tests for API Routes", () => { // expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); // }, 180000); // 120 seconds - it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => { - const crawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://jestjs.io" }); + it.concurrent( + "If someone cancels a crawl job, it should turn into failed status", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://jestjs.io" }); - expect(crawlResponse.statusCode).toBe(200); + expect(crawlResponse.statusCode).toBe(200); - await new Promise((r) => setTimeout(r, 20000)); + await new Promise((r) => setTimeout(r, 20000)); - const responseCancel = await request(TEST_URL) - .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(responseCancel.statusCode).toBe(200); - expect(responseCancel.body).toHaveProperty("status"); - expect(responseCancel.body.status).toBe("cancelled"); + const responseCancel = await request(TEST_URL) + .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(responseCancel.statusCode).toBe(200); + expect(responseCancel.body).toHaveProperty("status"); + expect(responseCancel.body.status).toBe("cancelled"); - await new Promise((r) => setTimeout(r, 10000)); - const completedResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + await new Promise((r) => setTimeout(r, 10000)); + const completedResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(completedResponse.statusCode).toBe(200); - expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("failed"); - expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data).toBeNull(); - expect(completedResponse.body).toHaveProperty("partial_data"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("content"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.partial_data[0].metadata.pageStatusCode).toBe(200); - expect(completedResponse.body.partial_data[0].metadata.pageError).toBeUndefined(); - }, 60000); // 60 seconds + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("failed"); + expect(completedResponse.body).toHaveProperty("data"); + + let isNullOrEmptyArray = false; + if ( + completedResponse.body.data === null || + completedResponse.body.data.length === 0 + ) { + isNullOrEmptyArray = true; + } + expect(isNullOrEmptyArray).toBe(true); + expect(completedResponse.body.data).toEqual(expect.arrayContaining([])); + expect(completedResponse.body).toHaveProperty("partial_data"); + expect(completedResponse.body.partial_data[0]).toHaveProperty( + "content" + ); + expect(completedResponse.body.partial_data[0]).toHaveProperty( + "markdown" + ); + expect(completedResponse.body.partial_data[0]).toHaveProperty( + "metadata" + ); + expect( + completedResponse.body.partial_data[0].metadata.pageStatusCode + ).toBe(200); + expect( + completedResponse.body.partial_data[0].metadata.pageError + ).toBeUndefined(); + }, + 60000 + ); // 60 seconds }); describe("POST /v0/scrape with LLM Extraction", () => { - it.concurrent("should extract data using LLM extraction mode", async () => { - const response = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://mendable.ai", - pageOptions: { - onlyMainContent: true, - }, - extractorOptions: { - mode: "llm-extraction", - extractionPrompt: - "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", - extractionSchema: { - type: "object", - properties: { - company_mission: { - type: "string", - }, - supports_sso: { - type: "boolean", - }, - is_open_source: { - type: "boolean", - }, - }, - required: ["company_mission", "supports_sso", "is_open_source"], + it.concurrent( + "should extract data using LLM extraction mode", + async () => { + const response = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://mendable.ai", + pageOptions: { + onlyMainContent: true, }, - }, - }); + extractorOptions: { + mode: "llm-extraction", + extractionPrompt: + "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", + extractionSchema: { + type: "object", + properties: { + company_mission: { + type: "string", + }, + supports_sso: { + type: "boolean", + }, + is_open_source: { + type: "boolean", + }, + }, + required: ["company_mission", "supports_sso", "is_open_source"], + }, + }, + }); - // Ensure that the job was successfully created before proceeding with LLM extraction - expect(response.statusCode).toBe(200); + // Ensure that the job was successfully created before proceeding with LLM extraction + expect(response.statusCode).toBe(200); - // Assuming the LLM extraction object is available in the response body under `data.llm_extraction` - let llmExtraction = response.body.data.llm_extraction; + // Assuming the LLM extraction object is available in the response body under `data.llm_extraction` + let llmExtraction = response.body.data.llm_extraction; - // Check if the llm_extraction object has the required properties with correct types and values - expect(llmExtraction).toHaveProperty("company_mission"); - expect(typeof llmExtraction.company_mission).toBe("string"); - expect(llmExtraction).toHaveProperty("supports_sso"); - expect(llmExtraction.supports_sso).toBe(true); - expect(typeof llmExtraction.supports_sso).toBe("boolean"); - expect(llmExtraction).toHaveProperty("is_open_source"); - expect(llmExtraction.is_open_source).toBe(false); - expect(typeof llmExtraction.is_open_source).toBe("boolean"); - }, 60000); // 60 secs + // Check if the llm_extraction object has the required properties with correct types and values + expect(llmExtraction).toHaveProperty("company_mission"); + expect(typeof llmExtraction.company_mission).toBe("string"); + expect(llmExtraction).toHaveProperty("supports_sso"); + expect(llmExtraction.supports_sso).toBe(true); + expect(typeof llmExtraction.supports_sso).toBe("boolean"); + expect(llmExtraction).toHaveProperty("is_open_source"); + expect(llmExtraction.is_open_source).toBe(false); + expect(typeof llmExtraction.is_open_source).toBe("boolean"); + }, + 60000 + ); // 60 secs }); - describe("POST /v0/crawl with fast mode", () => { - it.concurrent("should complete the crawl under 20 seconds", async () => { - const startTime = Date.now(); + describe("POST /v0/map", () => { + it.concurrent( + "should return a list of links for mendable.ai without subdomains included", + async () => { + const response = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://mendable.ai", + }); - const crawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://flutterbricks.com", - crawlerOptions: { - mode: "fast" - } + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + expect(response.body.links).not.toContain("https://docs.mendable.ai"); + expect(Array.isArray(response.body.links)).toBe(true); + expect(response.body.links.length).toBeGreaterThan(0); + }, + 60000 + ); // 60 secs + + it.concurrent( + "should return a list of links for a given URL with subdomains included", + async () => { + const response = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://python.langchain.com", + includeSubdomains: true, + }); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + expect(Array.isArray(response.body.links)).toBe(true); + expect(response.body.links.length).toBeGreaterThan(0); + }, + 60000 + ); // 60 secs + + it.concurrent( + "should return a list of links for a given URL with subdomains and search", + async () => { + const response = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://python.langchain.com", + includeSubdomains: true, + search: "agents", + }); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + expect(response.body.links).toContain( + "https://api.python.langchain.com/en/latest/_modules/langchain/agents/openai_functions_agent/base.html" + ); + expect(Array.isArray(response.body.links)).toBe(true); + expect(response.body.links.length).toBeGreaterThan(0); + response.body.links.forEach((link) => { + expect(link).toContain("python.langchain.com"); }); + }, + 60000 + ); // 60 secs - expect(crawlResponse.statusCode).toBe(200); + it.concurrent( + "should handle invalid URL input gracefully", + async () => { + const response = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "invalid-url", + includeSubdomains: true, + search: "agents", + }); - const jobId = crawlResponse.body.jobId; - let statusResponse; - let isFinished = false; - - while (!isFinished) { - statusResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - - expect(statusResponse.statusCode).toBe(200); - isFinished = statusResponse.body.status === "completed"; - - if (!isFinished) { - await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again - } - } - - // const endTime = Date.now(); - // const timeElapsed = (endTime - startTime) / 1000; // Convert to seconds - - // console.log(`Time elapsed: ${timeElapsed} seconds`); - - expect(statusResponse.body.status).toBe("completed"); - expect(statusResponse.body).toHaveProperty("data"); - expect(statusResponse.body.data[0]).toHaveProperty("content"); - expect(statusResponse.body.data[0]).toHaveProperty("markdown"); - expect(statusResponse.body.data[0]).toHaveProperty("metadata"); - expect(statusResponse.body.data[0].metadata.pageStatusCode).toBe(200); - expect(statusResponse.body.data[0].metadata.pageError).toBeUndefined(); - - const results = statusResponse.body.data; - // results.forEach((result, i) => { - // console.log(result.metadata.sourceURL); - // }); - expect(results.length).toBeGreaterThanOrEqual(10); - expect(results.length).toBeLessThanOrEqual(15); - - }, 20000); + expect(response.statusCode).toBe(400); + expect(response.body).toHaveProperty("success", false); + expect(response.body).toHaveProperty("details"); + }, + 60000 + ); // 60 secs }); }); diff --git a/apps/api/src/controllers/__tests__/crawl.test.ts b/apps/api/src/controllers/__tests__/crawl.test.ts index 621c7436..4cbe720d 100644 --- a/apps/api/src/controllers/__tests__/crawl.test.ts +++ b/apps/api/src/controllers/__tests__/crawl.test.ts @@ -1,6 +1,6 @@ -import { crawlController } from '../crawl' +import { crawlController } from '../v0/crawl' import { Request, Response } from 'express'; -import { authenticateUser } from '../auth'; // Ensure this import is correct +import { authenticateUser } from '../v0/auth'; // Ensure this import is correct import { createIdempotencyKey } from '../../services/idempotency/create'; import { validateIdempotencyKey } from '../../services/idempotency/validate'; import { v4 as uuidv4 } from 'uuid'; diff --git a/apps/api/src/controllers/crawl-status.ts b/apps/api/src/controllers/crawl-status.ts deleted file mode 100644 index bc6de593..00000000 --- a/apps/api/src/controllers/crawl-status.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { Request, Response } from "express"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../src/types"; -import { addWebScraperJob } from "../../src/services/queue-jobs"; -import { getWebScraperQueue } from "../../src/services/queue-service"; -import { supabaseGetJobById } from "../../src/lib/supabase-jobs"; -import { Logger } from "../../src/lib/logger"; - -export async function crawlStatusController(req: Request, res: Response) { - try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.CrawlStatus - ); - if (!success) { - return res.status(status).json({ error }); - } - const job = await getWebScraperQueue().getJob(req.params.jobId); - if (!job) { - return res.status(404).json({ error: "Job not found" }); - } - - const isCancelled = await (await getWebScraperQueue().client).exists("cancelled:" + req.params.jobId); - - let progress = job.progress; - if(typeof progress !== 'object') { - progress = { - current: 0, - current_url: '', - total: 0, - current_step: '', - partialDocs: [] - } - } - const { - current = 0, - current_url = '', - total = 0, - current_step = '', - partialDocs = [] - } = progress as { current: number, current_url: string, total: number, current_step: string, partialDocs: any[] }; - - let data = job.returnvalue; - if (process.env.USE_DB_AUTHENTICATION === "true") { - const supabaseData = await supabaseGetJobById(req.params.jobId); - - if (supabaseData) { - data = supabaseData.docs; - } - } - - const jobStatus = await job.getState(); - - res.json({ - status: isCancelled ? "failed" : jobStatus, - // progress: job.progress(), - current, - current_url, - current_step, - total, - data: data && !isCancelled ? data : null, - partial_data: jobStatus == 'completed' && !isCancelled ? [] : partialDocs, - }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/crawl.ts b/apps/api/src/controllers/crawl.ts deleted file mode 100644 index 42593d83..00000000 --- a/apps/api/src/controllers/crawl.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { Request, Response } from "express"; -import { WebScraperDataProvider } from "../../src/scraper/WebScraper"; -import { billTeam } from "../../src/services/billing/credit_billing"; -import { checkTeamCredits } from "../../src/services/billing/credit_billing"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../src/types"; -import { addWebScraperJob } from "../../src/services/queue-jobs"; -import { isUrlBlocked } from "../../src/scraper/WebScraper/utils/blocklist"; -import { logCrawl } from "../../src/services/logging/crawl_log"; -import { validateIdempotencyKey } from "../../src/services/idempotency/validate"; -import { createIdempotencyKey } from "../../src/services/idempotency/create"; -import { defaultCrawlPageOptions, defaultCrawlerOptions, defaultOrigin } from "../../src/lib/default-values"; -import { v4 as uuidv4 } from "uuid"; -import { Logger } from "../../src/lib/logger"; - -export async function crawlController(req: Request, res: Response) { - try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.Crawl - ); - if (!success) { - return res.status(status).json({ error }); - } - - if (req.headers["x-idempotency-key"]) { - const isIdempotencyValid = await validateIdempotencyKey(req); - if (!isIdempotencyValid) { - return res.status(409).json({ error: "Idempotency key already used" }); - } - try { - createIdempotencyKey(req); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } - } - - const { success: creditsCheckSuccess, message: creditsCheckMessage } = - await checkTeamCredits(team_id, 1); - if (!creditsCheckSuccess) { - return res.status(402).json({ error: "Insufficient credits" }); - } - - const url = req.body.url; - if (!url) { - return res.status(400).json({ error: "Url is required" }); - } - - if (isUrlBlocked(url)) { - return res - .status(403) - .json({ - error: - "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", - }); - } - - const mode = req.body.mode ?? "crawl"; - - const crawlerOptions = { ...defaultCrawlerOptions, ...req.body.crawlerOptions }; - const pageOptions = { ...defaultCrawlPageOptions, ...req.body.pageOptions }; - - if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this? - try { - const a = new WebScraperDataProvider(); - await a.setOptions({ - jobId: uuidv4(), - mode: "single_urls", - urls: [url], - crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, - pageOptions: pageOptions, - }); - - const docs = await a.getDocuments(false, (progress) => { - job.updateProgress({ - current: progress.current, - total: progress.total, - current_step: "SCRAPING", - current_url: progress.currentDocumentUrl, - }); - }); - return res.json({ - success: true, - documents: docs, - }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } - } - - const job = await addWebScraperJob({ - url: url, - mode: mode ?? "crawl", // fix for single urls not working - crawlerOptions: crawlerOptions, - team_id: team_id, - pageOptions: pageOptions, - origin: req.body.origin ?? defaultOrigin, - }); - - await logCrawl(job.id.toString(), team_id); - - res.json({ jobId: job.id }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/crawlPreview.ts b/apps/api/src/controllers/crawlPreview.ts deleted file mode 100644 index 7c5c804d..00000000 --- a/apps/api/src/controllers/crawlPreview.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { Request, Response } from "express"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../src/types"; -import { addWebScraperJob } from "../../src/services/queue-jobs"; -import { isUrlBlocked } from "../../src/scraper/WebScraper/utils/blocklist"; -import { Logger } from "../../src/lib/logger"; - -export async function crawlPreviewController(req: Request, res: Response) { - try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.Preview - ); - if (!success) { - return res.status(status).json({ error }); - } - // authenticate on supabase - const url = req.body.url; - if (!url) { - return res.status(400).json({ error: "Url is required" }); - } - - if (isUrlBlocked(url)) { - return res.status(403).json({ error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." }); - } - - const mode = req.body.mode ?? "crawl"; - const crawlerOptions = req.body.crawlerOptions ?? {}; - const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false, removeTags: [] }; - - const job = await addWebScraperJob({ - url: url, - mode: mode ?? "crawl", // fix for single urls not working - crawlerOptions: { ...crawlerOptions, limit: 5, maxCrawledLinks: 5 }, - team_id: "preview", - pageOptions: pageOptions, - origin: "website-preview", - }); - - res.json({ jobId: job.id }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/status.ts b/apps/api/src/controllers/status.ts deleted file mode 100644 index 6437bea0..00000000 --- a/apps/api/src/controllers/status.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { Request, Response } from "express"; -import { getWebScraperQueue } from "../../src/services/queue-service"; -import { supabaseGetJobById } from "../../src/lib/supabase-jobs"; -import { Logger } from "../../src/lib/logger"; - -export async function crawlJobStatusPreviewController(req: Request, res: Response) { - try { - const job = await getWebScraperQueue().getJob(req.params.jobId); - if (!job) { - return res.status(404).json({ error: "Job not found" }); - } - - let progress = job.progress; - if(typeof progress !== 'object') { - progress = { - current: 0, - current_url: '', - total: 0, - current_step: '', - partialDocs: [] - } - } - const { - current = 0, - current_url = '', - total = 0, - current_step = '', - partialDocs = [] - } = progress as { current: number, current_url: string, total: number, current_step: string, partialDocs: any[] }; - - let data = job.returnvalue; - if (process.env.USE_DB_AUTHENTICATION === "true") { - const supabaseData = await supabaseGetJobById(req.params.jobId); - - if (supabaseData) { - data = supabaseData.docs; - } - } - - let jobStatus = await job.getState(); - if (jobStatus === 'waiting' || jobStatus === 'delayed' || jobStatus === 'waiting-children' || jobStatus === 'unknown' || jobStatus === 'prioritized') { - jobStatus = 'active'; - } - - res.json({ - status: jobStatus, - // progress: job.progress(), - current, - current_url, - current_step, - total, - data: data ? data : null, - partial_data: jobStatus == 'completed' ? [] : partialDocs, - }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/admin/queue.ts b/apps/api/src/controllers/v0/admin/queue.ts similarity index 86% rename from apps/api/src/controllers/admin/queue.ts rename to apps/api/src/controllers/v0/admin/queue.ts index 3f1e9323..a5b23e33 100644 --- a/apps/api/src/controllers/admin/queue.ts +++ b/apps/api/src/controllers/v0/admin/queue.ts @@ -1,9 +1,9 @@ import { Request, Response } from "express"; import { Job } from "bullmq"; -import { Logger } from "../../lib/logger"; -import { getWebScraperQueue } from "../../services/queue-service"; -import { checkAlerts } from "../../services/alerts"; +import { Logger } from "../../../lib/logger"; +import { getScrapeQueue } from "../../../services/queue-service"; +import { checkAlerts } from "../../../services/alerts"; export async function cleanBefore24hCompleteJobsController( req: Request, @@ -11,13 +11,13 @@ export async function cleanBefore24hCompleteJobsController( ) { Logger.info("🐂 Cleaning jobs older than 24h"); try { - const webScraperQueue = getWebScraperQueue(); + const scrapeQueue = getScrapeQueue(); const batchSize = 10; const numberOfBatches = 9; // Adjust based on your needs const completedJobsPromises: Promise[] = []; for (let i = 0; i < numberOfBatches; i++) { completedJobsPromises.push( - webScraperQueue.getJobs( + scrapeQueue.getJobs( ["completed"], i * batchSize, i * batchSize + batchSize, @@ -68,10 +68,10 @@ export async function checkQueuesController(req: Request, res: Response) { // Use this as a "health check" that way we dont destroy the server export async function queuesController(req: Request, res: Response) { try { - const webScraperQueue = getWebScraperQueue(); + const scrapeQueue = getScrapeQueue(); const [webScraperActive] = await Promise.all([ - webScraperQueue.getActiveCount(), + scrapeQueue.getActiveCount(), ]); const noActiveJobs = webScraperActive === 0; diff --git a/apps/api/src/controllers/admin/redis-health.ts b/apps/api/src/controllers/v0/admin/redis-health.ts similarity index 84% rename from apps/api/src/controllers/admin/redis-health.ts rename to apps/api/src/controllers/v0/admin/redis-health.ts index e35d6db9..dc58d745 100644 --- a/apps/api/src/controllers/admin/redis-health.ts +++ b/apps/api/src/controllers/v0/admin/redis-health.ts @@ -1,8 +1,7 @@ import { Request, Response } from "express"; import Redis from "ioredis"; -import { Logger } from "../../lib/logger"; -import { sendSlackWebhook } from "../../services/alerts/slack"; -import { redisRateLimitClient } from "../../services/rate-limiter"; +import { Logger } from "../../../lib/logger"; +import { redisRateLimitClient } from "../../../services/rate-limiter"; export async function redisHealthController(req: Request, res: Response) { const retryOperation = async (operation, retries = 3) => { @@ -63,22 +62,22 @@ export async function redisHealthController(req: Request, res: Response) { Logger.info( `Redis instances health check: ${JSON.stringify(healthStatus)}` ); - await sendSlackWebhook( - `[REDIS DOWN] Redis instances health check: ${JSON.stringify( - healthStatus - )}`, - true - ); + // await sendSlackWebhook( + // `[REDIS DOWN] Redis instances health check: ${JSON.stringify( + // healthStatus + // )}`, + // true + // ); return res .status(500) .json({ status: "unhealthy", details: healthStatus }); } } catch (error) { Logger.error(`Redis health check failed: ${error}`); - await sendSlackWebhook( - `[REDIS DOWN] Redis instances health check: ${error.message}`, - true - ); + // await sendSlackWebhook( + // `[REDIS DOWN] Redis instances health check: ${error.message}`, + // true + // ); return res .status(500) .json({ status: "unhealthy", message: error.message }); diff --git a/apps/api/src/controllers/auth.ts b/apps/api/src/controllers/v0/auth.ts similarity index 51% rename from apps/api/src/controllers/auth.ts rename to apps/api/src/controllers/v0/auth.ts index 5dff80b8..039510c9 100644 --- a/apps/api/src/controllers/auth.ts +++ b/apps/api/src/controllers/v0/auth.ts @@ -1,26 +1,77 @@ -import { parseApi } from "../../src/lib/parseApi"; -import { getRateLimiter, } from "../../src/services/rate-limiter"; -import { AuthResponse, NotificationType, RateLimiterMode } from "../../src/types"; -import { supabase_service } from "../../src/services/supabase"; -import { withAuth } from "../../src/lib/withAuth"; +import { parseApi } from "../../../src/lib/parseApi"; +import { getRateLimiter } from "../../../src/services/rate-limiter"; +import { + AuthResponse, + NotificationType, + RateLimiterMode, +} from "../../../src/types"; +import { supabase_service } from "../../../src/services/supabase"; +import { withAuth } from "../../../src/lib/withAuth"; import { RateLimiterRedis } from "rate-limiter-flexible"; -import { setTraceAttributes } from '@hyperdx/node-opentelemetry'; -import { sendNotification } from "../services/notification/email_notification"; -import { Logger } from "../lib/logger"; +import { setTraceAttributes } from "@hyperdx/node-opentelemetry"; +import { sendNotification } from "../../services/notification/email_notification"; +import { Logger } from "../../lib/logger"; +import { redlock } from "../../../src/services/redlock"; +import { getValue } from "../../../src/services/redis"; +import { setValue } from "../../../src/services/redis"; +import { validate } from "uuid"; -export async function authenticateUser(req, res, mode?: RateLimiterMode): Promise { +function normalizedApiIsUuid(potentialUuid: string): boolean { + // Check if the string is a valid UUID + return validate(potentialUuid); +} +export async function authenticateUser( + req, + res, + mode?: RateLimiterMode +): Promise { return withAuth(supaAuthenticateUser)(req, res, mode); } function setTrace(team_id: string, api_key: string) { try { setTraceAttributes({ team_id, - api_key + api_key, }); } catch (error) { Logger.error(`Error setting trace attributes: ${error.message}`); } +} +async function getKeyAndPriceId(normalizedApi: string): Promise<{ + success: boolean; + teamId?: string; + priceId?: string; + error?: string; + status?: number; +}> { + const { data, error } = await supabase_service.rpc("get_key_and_price_id_2", { + api_key: normalizedApi, + }); + if (error) { + Logger.error(`RPC ERROR (get_key_and_price_id_2): ${error.message}`); + return { + success: false, + error: + "The server seems overloaded. Please contact hello@firecrawl.com if you aren't sending too many requests at once.", + status: 500, + }; + } + if (!data || data.length === 0) { + Logger.warn(`Error fetching api key: ${error.message} or data is empty`); + // TODO: change this error code ? + return { + success: false, + error: "Unauthorized: Invalid token", + status: 401, + }; + } else { + return { + success: true, + teamId: data[0].team_id, + priceId: data[0].price_id, + }; + } } export async function supaAuthenticateUser( req, @@ -51,20 +102,83 @@ export async function supaAuthenticateUser( const iptoken = incomingIP + token; let rateLimiter: RateLimiterRedis; - let subscriptionData: { team_id: string, plan: string } | null = null; + let subscriptionData: { team_id: string; plan: string } | null = null; let normalizedApi: string; - let team_id: string; + let cacheKey = ""; + let redLockKey = ""; + const lockTTL = 15000; // 10 seconds + let teamId: string | null = null; + let priceId: string | null = null; if (token == "this_is_just_a_preview_token") { rateLimiter = getRateLimiter(RateLimiterMode.Preview, token); - team_id = "preview"; + teamId = "preview"; } else { normalizedApi = parseApi(token); + if (!normalizedApiIsUuid(normalizedApi)) { + return { + success: false, + error: "Unauthorized: Invalid token", + status: 401, + }; + } + + cacheKey = `api_key:${normalizedApi}`; + + try { + const teamIdPriceId = await getValue(cacheKey); + if (teamIdPriceId) { + const { team_id, price_id } = JSON.parse(teamIdPriceId); + teamId = team_id; + priceId = price_id; + } else { + const { + success, + teamId: tId, + priceId: pId, + error, + status, + } = await getKeyAndPriceId(normalizedApi); + if (!success) { + return { success, error, status }; + } + teamId = tId; + priceId = pId; + await setValue( + cacheKey, + JSON.stringify({ team_id: teamId, price_id: priceId }), + 10 + ); + } + } catch (error) { + Logger.error(`Error with auth function: ${error.message}`); + // const { + // success, + // teamId: tId, + // priceId: pId, + // error: e, + // status, + // } = await getKeyAndPriceId(normalizedApi); + // if (!success) { + // return { success, error: e, status }; + // } + // teamId = tId; + // priceId = pId; + // const { + // success, + // teamId: tId, + // priceId: pId, + // error: e, + // status, + // } = await getKeyAndPriceId(normalizedApi); + // if (!success) { + // return { success, error: e, status }; + // } + // teamId = tId; + // priceId = pId; + } - const { data, error } = await supabase_service.rpc( - 'get_key_and_price_id_2', { api_key: normalizedApi } - ); // get_key_and_price_id_2 rpc definition: // create or replace function get_key_and_price_id_2(api_key uuid) // returns table(key uuid, team_id uuid, price_id text) as $$ @@ -82,46 +196,39 @@ export async function supaAuthenticateUser( // end; // $$ language plpgsql; - if (error) { - Logger.warn(`Error fetching key and price_id: ${error.message}`); - } else { - // console.log('Key and Price ID:', data); - } - - - - if (error || !data || data.length === 0) { - Logger.warn(`Error fetching api key: ${error.message} or data is empty`); - return { - success: false, - error: "Unauthorized: Invalid token", - status: 401, - }; - } - const internal_team_id = data[0].team_id; - team_id = internal_team_id; - - const plan = getPlanByPriceId(data[0].price_id); + const plan = getPlanByPriceId(priceId); // HyperDX Logging - setTrace(team_id, normalizedApi); + setTrace(teamId, normalizedApi); subscriptionData = { - team_id: team_id, - plan: plan - } + team_id: teamId, + plan: plan, + }; switch (mode) { case RateLimiterMode.Crawl: - rateLimiter = getRateLimiter(RateLimiterMode.Crawl, token, subscriptionData.plan); + rateLimiter = getRateLimiter( + RateLimiterMode.Crawl, + token, + subscriptionData.plan + ); break; case RateLimiterMode.Scrape: - rateLimiter = getRateLimiter(RateLimiterMode.Scrape, token, subscriptionData.plan); + rateLimiter = getRateLimiter( + RateLimiterMode.Scrape, + token, + subscriptionData.plan + ); break; case RateLimiterMode.Search: - rateLimiter = getRateLimiter(RateLimiterMode.Search, token, subscriptionData.plan); + rateLimiter = getRateLimiter( + RateLimiterMode.Search, + token, + subscriptionData.plan + ); break; case RateLimiterMode.CrawlStatus: rateLimiter = getRateLimiter(RateLimiterMode.CrawlStatus, token); break; - + case RateLimiterMode.Preview: rateLimiter = getRateLimiter(RateLimiterMode.Preview, token); break; @@ -134,7 +241,8 @@ export async function supaAuthenticateUser( } } - const team_endpoint_token = token === "this_is_just_a_preview_token" ? iptoken : team_id; + const team_endpoint_token = + token === "this_is_just_a_preview_token" ? iptoken : teamId; try { await rateLimiter.consume(team_endpoint_token); @@ -147,7 +255,17 @@ export async function supaAuthenticateUser( const startDate = new Date(); const endDate = new Date(); endDate.setDate(endDate.getDate() + 7); + // await sendNotification(team_id, NotificationType.RATE_LIMIT_REACHED, startDate.toISOString(), endDate.toISOString()); + // Cache longer for 429s + if (teamId && priceId && mode !== RateLimiterMode.Preview) { + await setValue( + cacheKey, + JSON.stringify({ team_id: teamId, price_id: priceId }), + 60 // 10 seconds, cache for everything + ); + } + return { success: false, error: `Rate limit exceeded. Consumed points: ${rateLimiterRes.consumedPoints}, Remaining points: ${rateLimiterRes.remainingPoints}. Upgrade your plan at https://firecrawl.dev/pricing for increased rate limits or please retry after ${secs}s, resets at ${retryDate}`, @@ -157,7 +275,9 @@ export async function supaAuthenticateUser( if ( token === "this_is_just_a_preview_token" && - (mode === RateLimiterMode.Scrape || mode === RateLimiterMode.Preview || mode === RateLimiterMode.Search) + (mode === RateLimiterMode.Scrape || + mode === RateLimiterMode.Preview || + mode === RateLimiterMode.Search) ) { return { success: true, team_id: "preview" }; // check the origin of the request and make sure its from firecrawl.dev @@ -181,8 +301,6 @@ export async function supaAuthenticateUser( .select("*") .eq("key", normalizedApi); - - if (error || !data || data.length === 0) { Logger.warn(`Error fetching api key: ${error.message} or data is empty`); return { @@ -195,26 +313,32 @@ export async function supaAuthenticateUser( subscriptionData = data[0]; } - return { success: true, team_id: subscriptionData.team_id, plan: subscriptionData.plan ?? ""}; + return { + success: true, + team_id: subscriptionData.team_id, + plan: subscriptionData.plan ?? "", + }; } function getPlanByPriceId(price_id: string) { switch (price_id) { case process.env.STRIPE_PRICE_ID_STARTER: - return 'starter'; + return "starter"; case process.env.STRIPE_PRICE_ID_STANDARD: - return 'standard'; + return "standard"; case process.env.STRIPE_PRICE_ID_SCALE: - return 'scale'; + return "scale"; case process.env.STRIPE_PRICE_ID_HOBBY: case process.env.STRIPE_PRICE_ID_HOBBY_YEARLY: - return 'hobby'; + return "hobby"; case process.env.STRIPE_PRICE_ID_STANDARD_NEW: case process.env.STRIPE_PRICE_ID_STANDARD_NEW_YEARLY: - return 'standardnew'; + return "standardnew"; case process.env.STRIPE_PRICE_ID_GROWTH: case process.env.STRIPE_PRICE_ID_GROWTH_YEARLY: - return 'growth'; + return "growth"; + case process.env.STRIPE_PRICE_ID_GROWTH_DOUBLE_MONTHLY: + return "growthdouble"; default: - return 'free'; + return "free"; } -} \ No newline at end of file +} diff --git a/apps/api/src/controllers/crawl-cancel.ts b/apps/api/src/controllers/v0/crawl-cancel.ts similarity index 53% rename from apps/api/src/controllers/crawl-cancel.ts rename to apps/api/src/controllers/v0/crawl-cancel.ts index 86a4c5b4..4658d90c 100644 --- a/apps/api/src/controllers/crawl-cancel.ts +++ b/apps/api/src/controllers/v0/crawl-cancel.ts @@ -1,10 +1,9 @@ import { Request, Response } from "express"; import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../src/types"; -import { getWebScraperQueue } from "../../src/services/queue-service"; -import { supabase_service } from "../../src/services/supabase"; -import { billTeam } from "../../src/services/billing/credit_billing"; -import { Logger } from "../../src/lib/logger"; +import { RateLimiterMode } from "../../../src/types"; +import { supabase_service } from "../../../src/services/supabase"; +import { Logger } from "../../../src/lib/logger"; +import { getCrawl, saveCrawl } from "../../../src/lib/crawl-redis"; export async function crawlCancelController(req: Request, res: Response) { try { @@ -18,8 +17,9 @@ export async function crawlCancelController(req: Request, res: Response) { if (!success) { return res.status(status).json({ error }); } - const job = await getWebScraperQueue().getJob(req.params.jobId); - if (!job) { + + const sc = await getCrawl(req.params.jobId); + if (!sc) { return res.status(404).json({ error: "Job not found" }); } @@ -39,27 +39,9 @@ export async function crawlCancelController(req: Request, res: Response) { } } - const jobState = await job.getState(); - let progress = job.progress; - if(typeof progress !== 'object') { - progress = { - partialDocs: [] - } - } - const { - partialDocs = [] - } = progress as { partialDocs: any[] }; - - if (partialDocs && partialDocs.length > 0 && jobState === "active") { - Logger.info("Billing team for partial docs..."); - // Note: the credits that we will bill them here might be lower than the actual - // due to promises that are not yet resolved - await billTeam(team_id, partialDocs.length); - } - try { - await (await getWebScraperQueue().client).set("cancelled:" + job.id, "true", "EX", 60 * 60); - await job.discard(); + sc.cancelled = true; + await saveCrawl(req.params.jobId, sc); } catch (error) { Logger.error(error); } diff --git a/apps/api/src/controllers/v0/crawl-status.ts b/apps/api/src/controllers/v0/crawl-status.ts new file mode 100644 index 00000000..0c982737 --- /dev/null +++ b/apps/api/src/controllers/v0/crawl-status.ts @@ -0,0 +1,60 @@ +import { Request, Response } from "express"; +import { authenticateUser } from "./auth"; +import { RateLimiterMode } from "../../../src/types"; +import { getScrapeQueue } from "../../../src/services/queue-service"; +import { Logger } from "../../../src/lib/logger"; +import { getCrawl, getCrawlJobs } from "../../../src/lib/crawl-redis"; +import { supabaseGetJobById } from "../../../src/lib/supabase-jobs"; + +export async function crawlStatusController(req: Request, res: Response) { + try { + const { success, team_id, error, status } = await authenticateUser( + req, + res, + RateLimiterMode.CrawlStatus + ); + if (!success) { + return res.status(status).json({ error }); + } + + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return res.status(404).json({ error: "Job not found" }); + } + + if (sc.team_id !== team_id) { + return res.status(403).json({ error: "Forbidden" }); + } + + const jobIDs = await getCrawlJobs(req.params.jobId); + + const jobs = (await Promise.all(jobIDs.map(async x => { + const job = await getScrapeQueue().getJob(x); + + if (process.env.USE_DB_AUTHENTICATION === "true") { + const supabaseData = await supabaseGetJobById(job.id); + + if (supabaseData) { + job.returnvalue = supabaseData.docs; + } + } + + return job; + }))).sort((a, b) => a.timestamp - b.timestamp); + const jobStatuses = await Promise.all(jobs.map(x => x.getState())); + const jobStatus = sc.cancelled ? "failed" : jobStatuses.every(x => x === "completed") ? "completed" : jobStatuses.some(x => x === "failed") ? "failed" : "active"; + + const data = jobs.map(x => Array.isArray(x.returnvalue) ? x.returnvalue[0] : x.returnvalue); + + res.json({ + status: jobStatus, + current: jobStatuses.filter(x => x === "completed" || x === "failed").length, + total: jobs.length, + data: jobStatus === "completed" ? data : null, + partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null), + }); + } catch (error) { + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/v0/crawl.ts b/apps/api/src/controllers/v0/crawl.ts new file mode 100644 index 00000000..a61e7b6d --- /dev/null +++ b/apps/api/src/controllers/v0/crawl.ts @@ -0,0 +1,171 @@ +import { Request, Response } from "express"; +import { checkTeamCredits } from "../../../src/services/billing/credit_billing"; +import { authenticateUser } from "./auth"; +import { RateLimiterMode } from "../../../src/types"; +import { addScrapeJob } from "../../../src/services/queue-jobs"; +import { isUrlBlocked } from "../../../src/scraper/WebScraper/utils/blocklist"; +import { logCrawl } from "../../../src/services/logging/crawl_log"; +import { validateIdempotencyKey } from "../../../src/services/idempotency/validate"; +import { createIdempotencyKey } from "../../../src/services/idempotency/create"; +import { defaultCrawlPageOptions, defaultCrawlerOptions, defaultOrigin } from "../../../src/lib/default-values"; +import { v4 as uuidv4 } from "uuid"; +import { Logger } from "../../../src/lib/logger"; +import { addCrawlJob, addCrawlJobs, crawlToCrawler, lockURL, lockURLs, saveCrawl, StoredCrawl } from "../../../src/lib/crawl-redis"; +import { getScrapeQueue } from "../../../src/services/queue-service"; +import { checkAndUpdateURL } from "../../../src/lib/validateUrl"; + +export async function crawlController(req: Request, res: Response) { + try { + const { success, team_id, error, status } = await authenticateUser( + req, + res, + RateLimiterMode.Crawl + ); + if (!success) { + return res.status(status).json({ error }); + } + + if (req.headers["x-idempotency-key"]) { + const isIdempotencyValid = await validateIdempotencyKey(req); + if (!isIdempotencyValid) { + return res.status(409).json({ error: "Idempotency key already used" }); + } + try { + createIdempotencyKey(req); + } catch (error) { + Logger.error(error); + return res.status(500).json({ error: error.message }); + } + } + + const { success: creditsCheckSuccess, message: creditsCheckMessage } = + await checkTeamCredits(team_id, 1); + if (!creditsCheckSuccess) { + return res.status(402).json({ error: "Insufficient credits" }); + } + + let url = req.body.url; + if (!url) { + return res.status(400).json({ error: "Url is required" }); + } + try { + url = checkAndUpdateURL(url).url; + } catch (e) { + return res + .status(e instanceof Error && e.message === "Invalid URL" ? 400 : 500) + .json({ error: e.message ?? e }); + } + + if (isUrlBlocked(url)) { + return res + .status(403) + .json({ + error: + "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", + }); + } + + const mode = req.body.mode ?? "crawl"; + + const crawlerOptions = { ...defaultCrawlerOptions, ...req.body.crawlerOptions }; + const pageOptions = { ...defaultCrawlPageOptions, ...req.body.pageOptions }; + + // if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this? + // try { + // const a = new WebScraperDataProvider(); + // await a.setOptions({ + // jobId: uuidv4(), + // mode: "single_urls", + // urls: [url], + // crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, + // pageOptions: pageOptions, + // }); + + // const docs = await a.getDocuments(false, (progress) => { + // job.updateProgress({ + // current: progress.current, + // total: progress.total, + // current_step: "SCRAPING", + // current_url: progress.currentDocumentUrl, + // }); + // }); + // return res.json({ + // success: true, + // documents: docs, + // }); + // } catch (error) { + // Logger.error(error); + // return res.status(500).json({ error: error.message }); + // } + // } + + const id = uuidv4(); + + await logCrawl(id, team_id); + + const sc: StoredCrawl = { + originUrl: url, + crawlerOptions, + pageOptions, + team_id, + createdAt: Date.now(), + }; + + const crawler = crawlToCrawler(id, sc); + + try { + sc.robots = await crawler.getRobotsTxt(); + } catch (_) {} + + await saveCrawl(id, sc); + + const sitemap = sc.crawlerOptions?.ignoreSitemap ? null : await crawler.tryGetSitemap(); + + if (sitemap !== null) { + const jobs = sitemap.map(x => { + const url = x.url; + const uuid = uuidv4(); + return { + name: uuid, + data: { + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + origin: req.body.origin ?? defaultOrigin, + crawl_id: id, + sitemapped: true, + }, + opts: { + jobId: uuid, + priority: 20, + } + }; + }) + + await lockURLs(id, jobs.map(x => x.data.url)); + await addCrawlJobs(id, jobs.map(x => x.opts.jobId)); + await getScrapeQueue().addBulk(jobs); + } else { + await lockURL(id, sc, url); + const job = await addScrapeJob({ + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + origin: req.body.origin ?? defaultOrigin, + crawl_id: id, + }, { + priority: 15, // prioritize request 0 of crawl jobs same as scrape jobs + }); + await addCrawlJob(id, job.id); + } + + res.json({ jobId: id }); + } catch (error) { + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/v0/crawlPreview.ts b/apps/api/src/controllers/v0/crawlPreview.ts new file mode 100644 index 00000000..356da835 --- /dev/null +++ b/apps/api/src/controllers/v0/crawlPreview.ts @@ -0,0 +1,135 @@ +import { Request, Response } from "express"; +import { authenticateUser } from "./auth"; +import { RateLimiterMode } from "../../../src/types"; +import { isUrlBlocked } from "../../../src/scraper/WebScraper/utils/blocklist"; +import { v4 as uuidv4 } from "uuid"; +import { Logger } from "../../../src/lib/logger"; +import { addCrawlJob, crawlToCrawler, lockURL, saveCrawl, StoredCrawl } from "../../../src/lib/crawl-redis"; +import { addScrapeJob } from "../../../src/services/queue-jobs"; +import { checkAndUpdateURL } from "../../../src/lib/validateUrl"; + +export async function crawlPreviewController(req: Request, res: Response) { + try { + const { success, error, status } = await authenticateUser( + req, + res, + RateLimiterMode.Preview + ); + + const team_id = "preview"; + + if (!success) { + return res.status(status).json({ error }); + } + + let url = req.body.url; + if (!url) { + return res.status(400).json({ error: "Url is required" }); + } + try { + url = checkAndUpdateURL(url).url; + } catch (e) { + return res + .status(e instanceof Error && e.message === "Invalid URL" ? 400 : 500) + .json({ error: e.message ?? e }); + } + + if (isUrlBlocked(url)) { + return res + .status(403) + .json({ + error: + "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", + }); + } + + const crawlerOptions = req.body.crawlerOptions ?? {}; + const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false, removeTags: [] }; + + // if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this? + // try { + // const a = new WebScraperDataProvider(); + // await a.setOptions({ + // jobId: uuidv4(), + // mode: "single_urls", + // urls: [url], + // crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, + // pageOptions: pageOptions, + // }); + + // const docs = await a.getDocuments(false, (progress) => { + // job.updateProgress({ + // current: progress.current, + // total: progress.total, + // current_step: "SCRAPING", + // current_url: progress.currentDocumentUrl, + // }); + // }); + // return res.json({ + // success: true, + // documents: docs, + // }); + // } catch (error) { + // Logger.error(error); + // return res.status(500).json({ error: error.message }); + // } + // } + + const id = uuidv4(); + + let robots; + + try { + robots = await this.getRobotsTxt(); + } catch (_) {} + + const sc: StoredCrawl = { + originUrl: url, + crawlerOptions, + pageOptions, + team_id, + robots, + createdAt: Date.now(), + }; + + await saveCrawl(id, sc); + + const crawler = crawlToCrawler(id, sc); + + const sitemap = sc.crawlerOptions?.ignoreSitemap ? null : await crawler.tryGetSitemap(); + + if (sitemap !== null) { + for (const url of sitemap.map(x => x.url)) { + await lockURL(id, sc, url); + const job = await addScrapeJob({ + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + origin: "website-preview", + crawl_id: id, + sitemapped: true, + }); + await addCrawlJob(id, job.id); + } + } else { + await lockURL(id, sc, url); + const job = await addScrapeJob({ + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + origin: "website-preview", + crawl_id: id, + }); + await addCrawlJob(id, job.id); + } + + res.json({ jobId: id }); + } catch (error) { + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/keyAuth.ts b/apps/api/src/controllers/v0/keyAuth.ts similarity index 90% rename from apps/api/src/controllers/keyAuth.ts rename to apps/api/src/controllers/v0/keyAuth.ts index 351edd18..a0a4cabc 100644 --- a/apps/api/src/controllers/keyAuth.ts +++ b/apps/api/src/controllers/v0/keyAuth.ts @@ -1,5 +1,5 @@ -import { AuthResponse, RateLimiterMode } from "../types"; +import { AuthResponse, RateLimiterMode } from "../../types"; import { Request, Response } from "express"; import { authenticateUser } from "./auth"; diff --git a/apps/api/src/controllers/liveness.ts b/apps/api/src/controllers/v0/liveness.ts similarity index 100% rename from apps/api/src/controllers/liveness.ts rename to apps/api/src/controllers/v0/liveness.ts diff --git a/apps/api/src/controllers/readiness.ts b/apps/api/src/controllers/v0/readiness.ts similarity index 100% rename from apps/api/src/controllers/readiness.ts rename to apps/api/src/controllers/v0/readiness.ts diff --git a/apps/api/src/controllers/scrape.ts b/apps/api/src/controllers/v0/scrape.ts similarity index 84% rename from apps/api/src/controllers/scrape.ts rename to apps/api/src/controllers/v0/scrape.ts index 25cc90e4..4e1b696d 100644 --- a/apps/api/src/controllers/scrape.ts +++ b/apps/api/src/controllers/v0/scrape.ts @@ -1,17 +1,17 @@ -import { ExtractorOptions, PageOptions } from './../lib/entities'; +import { ExtractorOptions, PageOptions } from './../../lib/entities'; import { Request, Response } from "express"; -import { billTeam, checkTeamCredits } from "../services/billing/credit_billing"; +import { billTeam, checkTeamCredits } from "../../services/billing/credit_billing"; import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../types"; -import { logJob } from "../services/logging/log_job"; -import { Document } from "../lib/entities"; -import { isUrlBlocked } from "../scraper/WebScraper/utils/blocklist"; // Import the isUrlBlocked function -import { numTokensFromString } from '../lib/LLM-extraction/helpers'; -import { defaultPageOptions, defaultExtractorOptions, defaultTimeout, defaultOrigin } from '../lib/default-values'; -import { addScrapeJob } from '../services/queue-jobs'; -import { scrapeQueueEvents } from '../services/queue-service'; +import { RateLimiterMode } from "../../types"; +import { logJob } from "../../services/logging/log_job"; +import { Document } from "../../lib/entities"; +import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; // Import the isUrlBlocked function +import { numTokensFromString } from '../../lib/LLM-extraction/helpers'; +import { defaultPageOptions, defaultExtractorOptions, defaultTimeout, defaultOrigin } from '../../lib/default-values'; +import { addScrapeJob } from '../../services/queue-jobs'; +import { scrapeQueueEvents } from '../../services/queue-service'; import { v4 as uuidv4 } from "uuid"; -import { Logger } from '../lib/logger'; +import { Logger } from '../../lib/logger'; export async function scrapeHelper( jobId: string, @@ -45,7 +45,7 @@ export async function scrapeHelper( pageOptions, extractorOptions, origin: req.body.origin ?? defaultOrigin, - }); + }, {}, jobId); let doc; try { @@ -62,6 +62,8 @@ export async function scrapeHelper( } } + await job.remove(); + if (!doc) { console.error("!!! PANIC DOC IS", doc, job); return { success: true, error: "No page found", returnCode: 200, data: doc }; @@ -121,13 +123,7 @@ export async function scrapeController(req: Request, res: Response) { }; - // Async check saves 500ms in average case - // Don't async check in llm extraction mode as it could be expensive - if (extractorOptions.mode.includes("llm-extraction")) { - await checkCredits(); - } else { - checkCredits(); - } + await checkCredits(); const jobId = uuidv4(); diff --git a/apps/api/src/controllers/search.ts b/apps/api/src/controllers/v0/search.ts similarity index 75% rename from apps/api/src/controllers/search.ts rename to apps/api/src/controllers/v0/search.ts index dfd9b8b9..948e883d 100644 --- a/apps/api/src/controllers/search.ts +++ b/apps/api/src/controllers/v0/search.ts @@ -1,14 +1,15 @@ import { Request, Response } from "express"; -import { WebScraperDataProvider } from "../scraper/WebScraper"; -import { billTeam, checkTeamCredits } from "../services/billing/credit_billing"; +import { WebScraperDataProvider } from "../../scraper/WebScraper"; +import { billTeam, checkTeamCredits } from "../../services/billing/credit_billing"; import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../types"; -import { logJob } from "../services/logging/log_job"; -import { PageOptions, SearchOptions } from "../lib/entities"; -import { search } from "../search"; -import { isUrlBlocked } from "../scraper/WebScraper/utils/blocklist"; +import { RateLimiterMode } from "../../types"; +import { logJob } from "../../services/logging/log_job"; +import { PageOptions, SearchOptions } from "../../lib/entities"; +import { search } from "../../search"; +import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; import { v4 as uuidv4 } from "uuid"; -import { Logger } from "../lib/logger"; +import { Logger } from "../../lib/logger"; +import { getScrapeQueue, scrapeQueueEvents } from "../../services/queue-service"; export async function searchHelper( jobId: string, @@ -75,26 +76,28 @@ export async function searchHelper( // filter out social media links + const jobDatas = res.map(x => { + const url = x.url; + const uuid = uuidv4(); + return { + name: uuid, + data: { + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + }, + opts: { + jobId: uuid, + priority: 10, + } + }; + }) + + const jobs = await getScrapeQueue().addBulk(jobDatas); - const a = new WebScraperDataProvider(); - await a.setOptions({ - jobId, - mode: "single_urls", - urls: res.map((r) => r.url).slice(0, searchOptions.limit ?? 7), - crawlerOptions: { - ...crawlerOptions, - }, - pageOptions: { - ...pageOptions, - onlyMainContent: pageOptions?.onlyMainContent ?? true, - fetchPageContent: pageOptions?.fetchPageContent ?? true, - includeHtml: pageOptions?.includeHtml ?? false, - removeTags: pageOptions?.removeTags ?? [], - fallback: false, - }, - }); - - const docs = await a.getDocuments(false); + const docs = (await Promise.all(jobs.map(x => x.waitUntilFinished(scrapeQueueEvents, 60000)))).map(x => x[0]); if (docs.length === 0) { return { success: true, error: "No search results found", returnCode: 200 }; @@ -109,19 +112,6 @@ export async function searchHelper( return { success: true, error: "No page found", returnCode: 200, data: docs }; } - const billingResult = await billTeam( - team_id, - filteredDocs.length - ); - if (!billingResult.success) { - return { - success: false, - error: - "Failed to bill team. Insufficient credits or subscription not found.", - returnCode: 402, - }; - } - return { success: true, data: filteredDocs, @@ -150,8 +140,8 @@ export async function searchController(req: Request, res: Response) { }; const origin = req.body.origin ?? "api"; - const searchOptions = req.body.searchOptions ?? { limit: 7 }; - + const searchOptions = req.body.searchOptions ?? { limit: 5 }; + const jobId = uuidv4(); try { diff --git a/apps/api/src/controllers/v0/status.ts b/apps/api/src/controllers/v0/status.ts new file mode 100644 index 00000000..5efb0ff1 --- /dev/null +++ b/apps/api/src/controllers/v0/status.ts @@ -0,0 +1,54 @@ +import { Request, Response } from "express"; +import { Logger } from "../../../src/lib/logger"; +import { getCrawl, getCrawlJobs } from "../../../src/lib/crawl-redis"; +import { getScrapeQueue } from "../../../src/services/queue-service"; +import { supabaseGetJobById } from "../../../src/lib/supabase-jobs"; + +export async function crawlJobStatusPreviewController(req: Request, res: Response) { + try { + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return res.status(404).json({ error: "Job not found" }); + } + + const jobIDs = await getCrawlJobs(req.params.jobId); + + // let data = job.returnvalue; + // if (process.env.USE_DB_AUTHENTICATION === "true") { + // const supabaseData = await supabaseGetJobById(req.params.jobId); + + // if (supabaseData) { + // data = supabaseData.docs; + // } + // } + + const jobs = (await Promise.all(jobIDs.map(async x => { + const job = await getScrapeQueue().getJob(x); + + if (process.env.USE_DB_AUTHENTICATION === "true") { + const supabaseData = await supabaseGetJobById(job.id); + + if (supabaseData) { + job.returnvalue = supabaseData.docs; + } + } + + return job; + }))).sort((a, b) => a.timestamp - b.timestamp); + const jobStatuses = await Promise.all(jobs.map(x => x.getState())); + const jobStatus = sc.cancelled ? "failed" : jobStatuses.every(x => x === "completed") ? "completed" : jobStatuses.some(x => x === "failed") ? "failed" : "active"; + + const data = jobs.map(x => Array.isArray(x.returnvalue) ? x.returnvalue[0] : x.returnvalue); + + res.json({ + status: jobStatus, + current: jobStatuses.filter(x => x === "completed" || x === "failed").length, + total: jobs.length, + data: jobStatus === "completed" ? data : null, + partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null), + }); + } catch (error) { + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/v1/auth.ts b/apps/api/src/controllers/v1/auth.ts index bd45648d..d4da3c6b 100644 --- a/apps/api/src/controllers/v1/auth.ts +++ b/apps/api/src/controllers/v1/auth.ts @@ -26,13 +26,7 @@ export async function supaAuthenticateUser( req, res, mode?: RateLimiterMode -): Promise<{ - success: boolean; - team_id?: string; - error?: string; - status?: number; - plan?: string; -}> { +): Promise { const authHeader = req.headers.authorization; if (!authHeader) { return { success: false, error: "Unauthorized", status: 401 }; @@ -106,7 +100,7 @@ export async function supaAuthenticateUser( setTrace(team_id, normalizedApi); subscriptionData = { team_id: team_id, - plan: plan + plan: plan, } switch (mode) { case RateLimiterMode.Crawl: @@ -121,6 +115,9 @@ export async function supaAuthenticateUser( case RateLimiterMode.CrawlStatus: rateLimiter = getRateLimiter(RateLimiterMode.CrawlStatus, token); break; + case RateLimiterMode.Map: + rateLimiter = getRateLimiter(RateLimiterMode.Map, token); + break; case RateLimiterMode.Preview: rateLimiter = getRateLimiter(RateLimiterMode.Preview, token); @@ -157,7 +154,7 @@ export async function supaAuthenticateUser( if ( token === "this_is_just_a_preview_token" && - (mode === RateLimiterMode.Scrape || mode === RateLimiterMode.Preview || mode === RateLimiterMode.Search) + (mode === RateLimiterMode.Scrape || mode === RateLimiterMode.Preview || mode === RateLimiterMode.Search || mode === RateLimiterMode.Map) ) { return { success: true, team_id: "preview" }; // check the origin of the request and make sure its from firecrawl.dev @@ -195,7 +192,12 @@ export async function supaAuthenticateUser( subscriptionData = data[0]; } - return { success: true, team_id: subscriptionData.team_id, plan: subscriptionData.plan ?? ""}; + return { + success: true, + team_id: subscriptionData.team_id, + plan: subscriptionData.plan ?? "", + api_key: normalizedApi + }; } function getPlanByPriceId(price_id: string) { switch (price_id) { diff --git a/apps/api/src/controllers/v1/crawl-status-ws.ts b/apps/api/src/controllers/v1/crawl-status-ws.ts new file mode 100644 index 00000000..6e2e2eaf --- /dev/null +++ b/apps/api/src/controllers/v1/crawl-status-ws.ts @@ -0,0 +1,148 @@ +import { authMiddleware } from "../../routes/v1"; +import { RateLimiterMode } from "../../types"; +import { authenticateUser } from "../v0/auth"; +import { CrawlStatusParams, CrawlStatusResponse, Document, ErrorResponse, legacyDocumentConverter, RequestWithAuth } from "./types"; +import { WebSocket } from "ws"; +import { v4 as uuidv4 } from "uuid"; +import { Logger } from "../../lib/logger"; +import { getCrawl, getCrawlExpiry, getCrawlJobs, getDoneJobsOrdered, getDoneJobsOrderedLength, isCrawlFinished, isCrawlFinishedLocked } from "../../lib/crawl-redis"; +import { getScrapeQueue, scrapeQueueEvents } from "../../services/queue-service"; +import { getJob, getJobs } from "./crawl-status"; + +type ErrorMessage = { + type: "error", + error: string, +} + +type CatchupMessage = { + type: "catchup", + data: CrawlStatusResponse, +} + +type DocumentMessage = { + type: "document", + data: Document, +} + +type DoneMessage = { type: "done" } + +type Message = ErrorMessage | CatchupMessage | DoneMessage | DocumentMessage; + +function send(ws: WebSocket, msg: Message) { + if (ws.readyState === 1) { + return new Promise((resolve, reject) => { + ws.send(JSON.stringify(msg), (err) => { + if (err) reject(err); + else resolve(null); + }); + }); + } +} + +function close(ws: WebSocket, code: number, msg: Message) { + if (ws.readyState <= 1) { + ws.close(code, JSON.stringify(msg)); + } +} + +async function crawlStatusWS(ws: WebSocket, req: RequestWithAuth) { + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return close(ws, 1008, { type: "error", error: "Job not found" }); + } + + if (sc.team_id !== req.auth.team_id) { + return close(ws, 3003, { type: "error", error: "Forbidden" }); + } + + let doneJobIDs = []; + + const completedListener = async e => { + const job = await getScrapeQueue().getJob(e.jobId) + if (job.data.crawl_id === req.params.jobId) { + if (doneJobIDs.includes(job.id)) return; + const j = await getJob(job.id); + if (j.returnvalue) { + send(ws, { + type: "document", + data: legacyDocumentConverter(j.returnvalue), + }); + if (await isCrawlFinishedLocked(req.params.jobId)) { + await new Promise((resolve) => setTimeout(() => resolve(true), 5000)) // wait for last events to pour in + scrapeQueueEvents.removeListener("completed", completedListener); + close(ws, 1000, { type: "done" }) + } + } else { + // FAILED + } + } + }; + + // TODO: handle failed jobs + + scrapeQueueEvents.addListener("completed", completedListener); + + doneJobIDs = await getDoneJobsOrdered(req.params.jobId); + + const jobIDs = await getCrawlJobs(req.params.jobId); + const jobStatuses = await Promise.all(jobIDs.map(x => getScrapeQueue().getJobState(x))); + const status: Exclude["status"] = sc.cancelled ? "cancelled" : jobStatuses.every(x => x === "completed") ? "completed" : jobStatuses.some(x => x === "failed") ? "failed" : "scraping"; + const doneJobs = await getJobs(doneJobIDs); + const data = doneJobs.map(x => x.returnvalue); + + send(ws, { + type: "catchup", + data: { + status, + totalCount: jobIDs.length, + creditsUsed: jobIDs.length, + expiresAt: (await getCrawlExpiry(req.params.jobId)).toISOString(), + data: data.map(x => legacyDocumentConverter(x)), + } + }); + + if (status !== "scraping") { + scrapeQueueEvents.removeListener("completed", completedListener); + return close(ws, 1000, { type: "done" }); + } +} + +// Basically just middleware and error wrapping +export async function crawlStatusWSController(ws: WebSocket, req: RequestWithAuth) { + try { + const { success, team_id, error, status, plan } = await authenticateUser( + req, + null, + RateLimiterMode.CrawlStatus, + ); + + if (!success) { + return close(ws, 3000, { + type: "error", + error, + }); + } + + req.auth = { team_id, plan }; + + await crawlStatusWS(ws, req); + } catch (err) { + const id = uuidv4(); + let verbose = JSON.stringify(err); + if (verbose === "{}") { + if (err instanceof Error) { + verbose = JSON.stringify({ + message: err.message, + name: err.name, + stack: err.stack, + }); + } + } + + Logger.error("Error occurred in WebSocket! (" + req.path + ") -- ID " + id + " -- " + verbose); + return close(ws, 1011, { + type: "error", + error: "An unexpected error occurred. Please contact hello@firecrawl.com for help. Your exception ID is " + id + }); + } +} diff --git a/apps/api/src/controllers/v1/crawl-status.ts b/apps/api/src/controllers/v1/crawl-status.ts index da2bc11e..3cbacea3 100644 --- a/apps/api/src/controllers/v1/crawl-status.ts +++ b/apps/api/src/controllers/v1/crawl-status.ts @@ -1,89 +1,115 @@ -import { Request, Response } from "express"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../../src/types"; -import { addWebScraperJob } from "../../../src/services/queue-jobs"; -import { getWebScraperQueue } from "../../../src/services/queue-service"; -import { supabaseGetJobById } from "../../../src/lib/supabase-jobs"; -import { Logger } from "../../../src/lib/logger"; -import { v4 as uuidv4 } from "uuid"; +import { Response } from "express"; +import { CrawlStatusParams, CrawlStatusResponse, ErrorResponse, legacyDocumentConverter, RequestWithAuth } from "./types"; +import { getCrawl, getCrawlExpiry, getCrawlJobs, getDoneJobsOrdered, getDoneJobsOrderedLength } from "../../lib/crawl-redis"; +import { getScrapeQueue } from "../../services/queue-service"; +import { supabaseGetJobById, supabaseGetJobsById } from "../../lib/supabase-jobs"; -export async function crawlStatusController(req: Request, res: Response) { - // TODO: validate req.params.jobId +export async function getJob(id: string) { + const job = await getScrapeQueue().getJob(id); + if (!job) return job; + + if (process.env.USE_DB_AUTHENTICATION === "true") { + const supabaseData = await supabaseGetJobById(id); - try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.CrawlStatus - ); - if (!success) { - return res.status(status).json({ error }); + if (supabaseData) { + job.returnvalue = supabaseData.docs; } - - // const job = await getWebScraperQueue().getJob(req.params.jobId); - // if (!job) { - // return res.status(404).json({ error: "Job not found" }); - // } - - // const { current, current_url, total, current_step, partialDocs } = await job.progress(); - - // let data = job.returnvalue; - // if (process.env.USE_DB_AUTHENTICATION === "true") { - // const supabaseData = await supabaseGetJobById(req.params.jobId); - - // if (supabaseData) { - // data = supabaseData.docs; - // } - // } - - // const jobStatus = await job.getState(); - - // mock: - const id = uuidv4(); - const result = { - totalCount: 100, - creditsUsed: 2, - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).getTime(), - status: "scraping", // scraping, completed, failed - next: `${req.protocol}://${req.get("host")}/v1/crawl/${id}`, - data: [{ - markdown: "test", - content: "test", - html: "test", - rawHtml: "test", - linksOnPage: ["test1", "test2"], - screenshot: "test", - metadata: { - title: "test", - description: "test", - language: "test", - sourceURL: "test", - statusCode: 200, - error: "test" - } - }, - { - markdown: "test", - content: "test", - html: "test", - rawHtml: "test", - linksOnPage: ["test1", "test2"], - screenshot: "test", - metadata: { - title: "test", - description: "test", - language: "test", - sourceURL: "test", - statusCode: 200, - error: "test" - } - }] - } - - res.status(200).json(result); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); } + + job.returnvalue = Array.isArray(job.returnvalue) ? job.returnvalue[0] : job.returnvalue; + + return job; +} + +export async function getJobs(ids: string[]) { + const jobs = (await Promise.all(ids.map(x => getScrapeQueue().getJob(x)))).filter(x => x); + + if (process.env.USE_DB_AUTHENTICATION === "true") { + const supabaseData = await supabaseGetJobsById(ids); + + supabaseData.forEach(x => { + const job = jobs.find(y => y.id === x.job_id); + if (job) { + job.returnvalue = x.docs; + } + }) + } + + jobs.forEach(job => { + job.returnvalue = Array.isArray(job.returnvalue) ? job.returnvalue[0] : job.returnvalue; + }); + + return jobs; +} + +export async function crawlStatusController(req: RequestWithAuth, res: Response) { + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return res.status(404).json({ success: false, error: "Job not found" }); + } + + if (sc.team_id !== req.auth.team_id) { + return res.status(403).json({ success: false, error: "Forbidden" }); + } + + const start = typeof req.query.skip === "string" ? parseInt(req.query.skip, 10) : 0; + const end = typeof req.query.limit === "string" ? (start + parseInt(req.query.limit, 10) - 1) : undefined; + + const jobIDs = await getCrawlJobs(req.params.jobId); + const jobStatuses = await Promise.all(jobIDs.map(x => getScrapeQueue().getJobState(x))); + const status: Exclude["status"] = sc.cancelled ? "cancelled" : jobStatuses.every(x => x === "completed") ? "completed" : jobStatuses.some(x => x === "failed") ? "failed" : "scraping"; + const doneJobsLength = await getDoneJobsOrderedLength(req.params.jobId); + const doneJobsOrder = await getDoneJobsOrdered(req.params.jobId, start, end ?? -1); + + let doneJobs = []; + + if (end === undefined) { // determine 10 megabyte limit + let bytes = 0; + const bytesLimit = 10485760; // 10 MiB in bytes + const factor = 100; // chunking for faster retrieval + + for (let i = 0; i < doneJobsOrder.length && bytes < bytesLimit; i += factor) { + // get current chunk and retrieve jobs + const currentIDs = doneJobsOrder.slice(i, i+factor); + const jobs = await getJobs(currentIDs); + + // iterate through jobs and add them one them one to the byte counter + // both loops will break once we cross the byte counter + for (let ii = 0; ii < jobs.length && bytes < bytesLimit; ii++) { + const job = jobs[ii]; + doneJobs.push(job); + bytes += JSON.stringify(legacyDocumentConverter(job.returnvalue)).length; + } + } + + // if we ran over the bytes limit, remove the last document + if (bytes > bytesLimit) { + doneJobs.splice(doneJobs.length - 1, 1); + } + } else { + doneJobs = await getJobs(doneJobsOrder); + } + + const data = doneJobs.map(x => x.returnvalue); + + const nextURL = new URL(`${req.protocol}://${req.get("host")}/v1/crawl/${req.params.jobId}`); + + nextURL.searchParams.set("skip", (start + data.length).toString()); + + if (typeof req.query.limit === "string") { + nextURL.searchParams.set("limit", req.query.limit); + } + + res.status(200).json({ + status, + totalCount: jobIDs.length, + creditsUsed: jobIDs.length, + expiresAt: (await getCrawlExpiry(req.params.jobId)).toISOString(), + next: + status !== "scraping" && (start + data.length) === doneJobsLength // if there's not gonna be any documents after this + ? undefined + : nextURL.href, + data: data.map(x => legacyDocumentConverter(x)), + }); } diff --git a/apps/api/src/controllers/v1/crawl.ts b/apps/api/src/controllers/v1/crawl.ts index a00ad7ca..43f940b7 100644 --- a/apps/api/src/controllers/v1/crawl.ts +++ b/apps/api/src/controllers/v1/crawl.ts @@ -1,139 +1,126 @@ -import { Request, Response } from "express"; -import { WebScraperDataProvider } from "../../../src/scraper/WebScraper"; -import { billTeam } from "../../../src/services/billing/credit_billing"; -import { checkTeamCredits } from "../../../src/services/billing/credit_billing"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../../src/types"; -import { addWebScraperJob } from "../../../src/services/queue-jobs"; -import { isUrlBlocked } from "../../../src/scraper/WebScraper/utils/blocklist"; -import { logCrawl } from "../../../src/services/logging/crawl_log"; -import { validateIdempotencyKey } from "../../../src/services/idempotency/validate"; -import { createIdempotencyKey } from "../../../src/services/idempotency/create"; -import { defaultCrawlPageOptions, defaultCrawlerOptions, defaultOrigin } from "../../../src/lib/default-values"; +import { Response } from "express"; import { v4 as uuidv4 } from "uuid"; -import { Logger } from "../../../src/lib/logger"; -import { checkAndUpdateURL } from "../../../src/lib/validateUrl"; +import { + CrawlRequest, + crawlRequestSchema, + CrawlResponse, + legacyCrawlerOptions, + legacyScrapeOptions, + RequestWithAuth, +} from "./types"; +import { + addCrawlJob, + addCrawlJobs, + crawlToCrawler, + lockURL, + lockURLs, + saveCrawl, + StoredCrawl, +} from "../../lib/crawl-redis"; +import { logCrawl } from "../../services/logging/crawl_log"; +import { getScrapeQueue } from "../../services/queue-service"; +import { addScrapeJob } from "../../services/queue-jobs"; +import { Logger } from "../../lib/logger"; -export async function crawlController(req: Request, res: Response) { - // expected req.body +export async function crawlController( + req: RequestWithAuth<{}, CrawlResponse, CrawlRequest>, + res: Response +) { + req.body = crawlRequestSchema.parse(req.body); - // req.body = { - // url: string - // crawlerOptions: { - // includePaths: string[] - // excludePaths: string[] - // maxDepth: number - // limit: number - // allowBackwardLinks: boolean >> TODO: CHANGE THIS NAME??? - // allowExternalLinks: boolean - // ignoreSitemap: number - // } - // scrapeOptions: Exclude - // } + const id = uuidv4(); + await logCrawl(id, req.auth.team_id); + + const { remainingCredits } = req.account; + + // TODO: Get rid of crawlerOptions + const crawlerOptions = legacyCrawlerOptions(req.body); + const pageOptions = legacyScrapeOptions(req.body.scrapeOptions); + + crawlerOptions.limit = Math.min(remainingCredits, crawlerOptions.limit); + + const sc: StoredCrawl = { + originUrl: req.body.url, + crawlerOptions, + pageOptions, + team_id: req.auth.team_id, + createdAt: Date.now(), + }; + + const crawler = crawlToCrawler(id, sc); try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.Crawl + sc.robots = await crawler.getRobotsTxt(); + } catch (e) { + Logger.debug( + `[Crawl] Failed to get robots.txt (this is probably fine!): ${JSON.stringify( + e + )}` ); - if (!success) { - return res.status(status).json({ error }); - } - - if (req.headers["x-idempotency-key"]) { - const isIdempotencyValid = await validateIdempotencyKey(req); - if (!isIdempotencyValid) { - return res.status(409).json({ error: "Idempotency key already used" }); - } - try { - createIdempotencyKey(req); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } - } - - const { success: creditsCheckSuccess, message: creditsCheckMessage } = - await checkTeamCredits(team_id, 1); - if (!creditsCheckSuccess) { - return res.status(402).json({ error: "Insufficient credits" }); - } - - let url = req.body.url; - if (!url) { - return res.status(400).json({ error: "Url is required" }); - } - - if (isUrlBlocked(url)) { - return res - .status(403) - .json({ - error: - "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", - }); - } - - try { - url = checkAndUpdateURL(url); - } catch (error) { - return res.status(400).json({ error: 'Invalid Url' }); - } - - // TODO: add job to queue - - const id = uuidv4(); - return res.status(200).json({ jobId: id, url: `${req.protocol}://${req.get('host')}/v1/crawl/${id}` }); - - // const mode = req.body.mode ?? "crawl"; - - // const crawlerOptions = { ...defaultCrawlerOptions, ...req.body.crawlerOptions }; - // const pageOptions = { ...defaultCrawlPageOptions, ...req.body.pageOptions }; - - // if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this? - // try { - // const a = new WebScraperDataProvider(); - // await a.setOptions({ - // jobId: uuidv4(), - // mode: "single_urls", - // urls: [url], - // crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, - // pageOptions: pageOptions, - // }); - - // const docs = await a.getDocuments(false, (progress) => { - // job.progress({ - // current: progress.current, - // total: progress.total, - // current_step: "SCRAPING", - // current_url: progress.currentDocumentUrl, - // }); - // }); - // return res.json({ - // success: true, - // documents: docs, - // }); - // } catch (error) { - // Logger.error(error); - // return res.status(500).json({ error: error.message }); - // } - // } - - // const job = await addWebScraperJob({ - // url: url, - // mode: mode ?? "crawl", // fix for single urls not working - // crawlerOptions: crawlerOptions, - // team_id: team_id, - // pageOptions: pageOptions, - // origin: req.body.origin ?? defaultOrigin, - // }); - - // await logCrawl(job.id.toString(), team_id); - - // res.json({ jobId: job.id }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); } + + await saveCrawl(id, sc); + + const sitemap = sc.crawlerOptions.ignoreSitemap + ? null + : await crawler.tryGetSitemap(); + + if (sitemap !== null) { + const jobs = sitemap.map((x) => { + const url = x.url; + const uuid = uuidv4(); + return { + name: uuid, + data: { + url, + mode: "single_urls", + team_id: req.auth.team_id, + crawlerOptions, + pageOptions, + origin: "api", + crawl_id: id, + sitemapped: true, + }, + opts: { + jobId: uuid, + priority: 20, + }, + }; + }); + + await lockURLs( + id, + jobs.map((x) => x.data.url) + ); + await addCrawlJobs( + id, + jobs.map((x) => x.opts.jobId) + ); + await getScrapeQueue().addBulk(jobs); + } else { + await lockURL(id, sc, req.body.url); + const job = await addScrapeJob( + { + url: req.body.url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: req.auth.team_id, + pageOptions: pageOptions, + origin: "api", + crawl_id: id, + webhook: req.body.webhook, + }, + { + priority: 15, + } + ); + await addCrawlJob(id, job.id); + } + + return res.status(200).json({ + success: true, + id, + url: `${req.protocol}://${req.get("host")}/v1/crawl/${id}`, + }); } diff --git a/apps/api/src/controllers/v1/map.ts b/apps/api/src/controllers/v1/map.ts index 391b8a10..76cf1498 100644 --- a/apps/api/src/controllers/v1/map.ts +++ b/apps/api/src/controllers/v1/map.ts @@ -1,128 +1,94 @@ -import { Request, Response } from "express"; -import { WebScraperDataProvider } from "../../../src/scraper/WebScraper"; -import { billTeam } from "../../../src/services/billing/credit_billing"; -import { checkTeamCredits } from "../../../src/services/billing/credit_billing"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../../src/types"; -import { addWebScraperJob } from "../../../src/services/queue-jobs"; -import { isUrlBlocked } from "../../../src/scraper/WebScraper/utils/blocklist"; -import { logCrawl } from "../../../src/services/logging/crawl_log"; -import { validateIdempotencyKey } from "../../../src/services/idempotency/validate"; -import { createIdempotencyKey } from "../../../src/services/idempotency/create"; -import { defaultCrawlPageOptions, defaultCrawlerOptions, defaultOrigin } from "../../../src/lib/default-values"; +import { Response } from "express"; import { v4 as uuidv4 } from "uuid"; -import { Logger } from "../../../src/lib/logger"; -import { checkAndUpdateURL } from "../../../src/lib/validateUrl"; +import { + legacyCrawlerOptions, + mapRequestSchema, + RequestWithAuth, +} from "./types"; +import { crawlToCrawler, StoredCrawl } from "../../lib/crawl-redis"; +import { MapResponse, MapRequest } from "./types"; +import { configDotenv } from "dotenv"; +import { + checkAndUpdateURLForMap, + isSameDomain, + isSameSubdomain, +} from "../../lib/validateUrl"; +import { fireEngineMap } from "../../search/fireEngine"; +import { billTeam } from "../../services/billing/credit_billing"; -export async function mapController(req: Request, res: Response) { - // expected req.body +configDotenv(); - // req.body = { - // url: string - // ignoreSitemap: true?? - // other crawler options? - // } +export async function mapController( + req: RequestWithAuth<{}, MapResponse, MapRequest>, + res: Response +) { + req.body = mapRequestSchema.parse(req.body); + + const id = uuidv4(); + let links: string[] = [req.body.url]; - try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.Crawl - ); - if (!success) { - return res.status(status).json({ error }); - } + const sc: StoredCrawl = { + originUrl: req.body.url, + crawlerOptions: legacyCrawlerOptions(req.body), + pageOptions: {}, + team_id: req.auth.team_id, + createdAt: Date.now(), + }; - // if (req.headers["x-idempotency-key"]) { - // const isIdempotencyValid = await validateIdempotencyKey(req); - // if (!isIdempotencyValid) { - // return res.status(409).json({ error: "Idempotency key already used" }); - // } - // try { - // createIdempotencyKey(req); - // } catch (error) { - // Logger.error(error); - // return res.status(500).json({ error: error.message }); - // } - // } + const crawler = crawlToCrawler(id, sc); - // const { success: creditsCheckSuccess, message: creditsCheckMessage } = - // await checkTeamCredits(team_id, 1); - // if (!creditsCheckSuccess) { - // return res.status(402).json({ error: "Insufficient credits" }); - // } + const sitemap = + req.body.ignoreSitemap + ? null + : await crawler.tryGetSitemap(); - let url = req.body.url; - if (!url) { - return res.status(400).json({ error: "Url is required" }); - } - - if (isUrlBlocked(url)) { - return res - .status(403) - .json({ - error: - "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", - }); - } - - try { - url = checkAndUpdateURL(url); - } catch (error) { - return res.status(400).json({ error: 'Invalid Url' }); - } - - return res.status(200).json({ urls: [ "test1", "test2" ] }); - - // const mode = req.body.mode ?? "crawl"; - - // const crawlerOptions = { ...defaultCrawlerOptions, ...req.body.crawlerOptions }; - // const pageOptions = { ...defaultCrawlPageOptions, ...req.body.pageOptions }; - - // if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this? - // try { - // const a = new WebScraperDataProvider(); - // await a.setOptions({ - // jobId: uuidv4(), - // mode: "single_urls", - // urls: [url], - // crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, - // pageOptions: pageOptions, - // }); - - // const docs = await a.getDocuments(false, (progress) => { - // job.progress({ - // current: progress.current, - // total: progress.total, - // current_step: "SCRAPING", - // current_url: progress.currentDocumentUrl, - // }); - // }); - // return res.json({ - // success: true, - // documents: docs, - // }); - // } catch (error) { - // Logger.error(error); - // return res.status(500).json({ error: error.message }); - // } - // } - - // const job = await addWebScraperJob({ - // url: url, - // mode: mode ?? "crawl", // fix for single urls not working - // crawlerOptions: crawlerOptions, - // team_id: team_id, - // pageOptions: pageOptions, - // origin: req.body.origin ?? defaultOrigin, - // }); - - // await logCrawl(job.id.toString(), team_id); - - // res.json({ jobId: job.id }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); + if (sitemap !== null) { + sitemap.map((x) => { + links.push(x.url); + }); } + + let urlWithoutWww = req.body.url.replace("www.", ""); + + let mapUrl = req.body.search + ? `"${req.body.search}" site:${urlWithoutWww}` + : `site:${req.body.url}`; + // www. seems to exclude subdomains in some cases + const mapResults = await fireEngineMap(mapUrl, { + numResults: 50, + }); + + if (mapResults.length > 0) { + if (req.body.search) { + // Ensure all map results are first, maintaining their order + links = [mapResults[0].url, ...mapResults.slice(1).map(x => x.url), ...links]; + } else { + mapResults.map((x) => { + links.push(x.url); + }); + } + } + + links = links.map((x) => checkAndUpdateURLForMap(x).url.trim()); + + + + // allows for subdomains to be included + links = links.filter((x) => isSameDomain(x, req.body.url)); + + // if includeSubdomains is false, filter out subdomains + if (!req.body.includeSubdomains) { + links = links.filter((x) => isSameSubdomain(x, req.body.url)); + } + + // remove duplicates that could be due to http/https or www + links = [...new Set(links)]; + + await billTeam(req.auth.team_id, 1); + + return res.status(200).json({ + success: true, + links, + }); } diff --git a/apps/api/src/controllers/v1/scrape.ts b/apps/api/src/controllers/v1/scrape.ts index bf529ad2..ffa04b82 100644 --- a/apps/api/src/controllers/v1/scrape.ts +++ b/apps/api/src/controllers/v1/scrape.ts @@ -1,253 +1,105 @@ -// import { ExtractorOptions, PageOptions } from './../../lib/entities'; import { Request, Response } from "express"; -// import { WebScraperDataProvider } from "../../scraper/WebScraper"; -// import { billTeam, checkTeamCredits } from "../../services/billing/credit_billing"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../types"; -// import { logJob } from "../../services/logging/log_job"; -// import { Document } from "../../lib/entities"; -import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; // Import the isUrlBlocked function -// import { numTokensFromString } from '../../lib/LLM-extraction/helpers'; -// import { defaultPageOptions, defaultExtractorOptions, defaultTimeout, defaultOrigin } from '../../../src/lib/default-values'; -// import { v4 as uuidv4 } from "uuid"; import { Logger } from '../../lib/logger'; -import { checkAndUpdateURL } from '../../lib/validateUrl'; +import { Document, legacyDocumentConverter, legacyScrapeOptions, RequestWithAuth, ScrapeRequest, scrapeRequestSchema, ScrapeResponse } from "./types"; +import { billTeam } from "../../services/billing/credit_billing"; +import { v4 as uuidv4 } from 'uuid'; +import { numTokensFromString } from "../../lib/LLM-extraction/helpers"; +import { addScrapeJob } from "../../services/queue-jobs"; +import { scrapeQueueEvents } from '../../services/queue-service'; +import { logJob } from "../../services/logging/log_job"; -export async function scrapeController(req: Request, res: Response) { - let url = req.body.url; - if (!url) { - return { success: false, error: "Url is required", returnCode: 400 }; - } +export async function scrapeController(req: RequestWithAuth<{}, ScrapeResponse, ScrapeRequest>, res: Response) { + req.body = scrapeRequestSchema.parse(req.body); + let earlyReturn = false; - if (isUrlBlocked(url)) { - return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 }; - } + const origin = req.body.origin; + const timeout = req.body.timeout; + const pageOptions = legacyScrapeOptions(req.body); + const jobId = uuidv4(); + const startTime = new Date().getTime(); + const job = await addScrapeJob({ + url: req.body.url, + mode: "single_urls", + crawlerOptions: {}, + team_id: req.auth.team_id, + pageOptions, + extractorOptions: {}, + origin: req.body.origin, + }, {}, jobId); + + let doc: any | undefined; try { - url = checkAndUpdateURL(url); - } catch (error) { - return { success: false, error: "Invalid URL", returnCode: 400 }; - } - - // TODO: check req.body - // mockup req.body - // req.body = { - // url: "test", - // headers: { - // "x-key": "test" - // }, - // formats: ["markdown", "html", "rawHtml", "content", "linksOnPage", "screenshot", "fullPageScreenshot"], - // includeTags: ["test"], - // excludeTags: ["test"], - // onlyMainContent: false, - // timeout: 30000, - // waitFor: number - // } - - try { - let earlyReturn = false; - // make sure to authenticate user first, Bearer - const { success, team_id, error, status, plan } = await authenticateUser( - req, - res, - RateLimiterMode.Scrape - ); - if (!success) { - return res.status(status).json({ error }); + doc = (await job.waitUntilFinished(scrapeQueueEvents, timeout))[0]; // 60 seconds timeout + } catch (e) { + Logger.error(`Error in scrapeController: ${e}`); + if (e instanceof Error && e.message.startsWith("Job wait")) { + return res.status(408).json({ + success: false, + error: "Request timed out", + }); + } else { + return res.status(500).json({ + success: false, + error: "Internal server error", + }); } + } - // check credits + await job.remove(); - const result = { + if (!doc) { + console.error("!!! PANIC DOC IS", doc, job); + return res.status(200).json({ success: true, - warning: "test", - data: { - markdown: "test", - content: "test", - html: "test", - rawHtml: "test", - linksOnPage: ["test1", "test2"], - screenshot: "test", - metadata: { - title: "test", - description: "test", - language: "test", - sourceURL: "test", - statusCode: 200, - error: "test" - } - } - } - - return res.status(200).json(result); - - // const crawlerOptions = req.body.crawlerOptions ?? {}; - // const pageOptions = { ...defaultPageOptions, ...req.body.pageOptions }; - // const extractorOptions = { ...defaultExtractorOptions, ...req.body.extractorOptions }; - // const origin = req.body.origin ?? defaultOrigin; - // let timeout = req.body.timeout ?? defaultTimeout; - - // if (extractorOptions.mode.includes("llm-extraction")) { - // pageOptions.onlyMainContent = true; - // timeout = req.body.timeout ?? 90000; - // } - - // const checkCredits = async () => { - // try { - // const { success: creditsCheckSuccess, message: creditsCheckMessage } = await checkTeamCredits(team_id, 1); - // if (!creditsCheckSuccess) { - // earlyReturn = true; - // return res.status(402).json({ error: "Insufficient credits" }); - // } - // } catch (error) { - // Logger.error(error); - // earlyReturn = true; - // return res.status(500).json({ error: "Error checking team credits. Please contact hello@firecrawl.com for help." }); - // } - // }; - - - // await checkCredits(); - - // const jobId = uuidv4(); - - // const startTime = new Date().getTime(); - // const result = await scrapeHelper( - // jobId, - // req, - // team_id, - // crawlerOptions, - // pageOptions, - // extractorOptions, - // timeout, - // plan - // ); - // const endTime = new Date().getTime(); - // const timeTakenInSeconds = (endTime - startTime) / 1000; - // const numTokens = (result.data && result.data.markdown) ? numTokensFromString(result.data.markdown, "gpt-3.5-turbo") : 0; - - // if (result.success) { - // let creditsToBeBilled = 1; // Assuming 1 credit per document - // const creditsPerLLMExtract = 50; - - // if (extractorOptions.mode.includes("llm-extraction")) { - // // creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length); - // creditsToBeBilled += creditsPerLLMExtract; - // } - - // let startTimeBilling = new Date().getTime(); - - // if (earlyReturn) { - // // Don't bill if we're early returning - // return; - // } - // const billingResult = await billTeam( - // team_id, - // creditsToBeBilled - // ); - // if (!billingResult.success) { - // return res.status(402).json({ - // success: false, - // error: "Failed to bill team. Insufficient credits or subscription not found.", - // }); - // } - // } - - // logJob({ - // job_id: jobId, - // success: result.success, - // message: result.error, - // num_docs: 1, - // docs: [result.data], - // time_taken: timeTakenInSeconds, - // team_id: team_id, - // mode: "scrape", - // url: req.body.url, - // crawlerOptions: crawlerOptions, - // pageOptions: pageOptions, - // origin: origin, - // extractor_options: extractorOptions, - // num_tokens: numTokens, - // }); - - - // return res.status(result.returnCode).json(result); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); + warning: "No page found", + data: doc + }); } -} + delete doc.index; + delete doc.provider; -// export async function scrapeHelper( -// jobId: string, -// req: Request, -// team_id: string, -// crawlerOptions: any, -// pageOptions: PageOptions, -// extractorOptions: ExtractorOptions, -// timeout: number, -// plan?: string -// ): Promise<{ -// success: boolean; -// error?: string; -// data?: Document; -// returnCode: number; -// }> { + const endTime = new Date().getTime(); + const timeTakenInSeconds = (endTime - startTime) / 1000; + const numTokens = (doc && doc.markdown) ? numTokensFromString(doc.markdown, "gpt-3.5-turbo") : 0; - // const url = req.body.url; - // if (!url) { - // return { success: false, error: "Url is required", returnCode: 400 }; - // } + let creditsToBeBilled = 1; // Assuming 1 credit per document + if (earlyReturn) { + // Don't bill if we're early returning + return; + } - // if (isUrlBlocked(url)) { - // return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 }; - // } + const billingResult = await billTeam( + req.auth.team_id, + creditsToBeBilled + ); + if (!billingResult.success) { + return res.status(402).json({ + success: false, + error: "Failed to bill team. Insufficient credits or subscription not found.", + }); + } - // const a = new WebScraperDataProvider(); - // await a.setOptions({ - // jobId, - // mode: "single_urls", - // urls: [url], - // crawlerOptions: { - // ...crawlerOptions, - // }, - // pageOptions: pageOptions, - // extractorOptions: extractorOptions, - // }); + logJob({ + job_id: jobId, + success: true, + message: "Scrape completed", + num_docs: 1, + docs: [doc], + time_taken: timeTakenInSeconds, + team_id: req.auth.team_id, + mode: "scrape", + url: req.body.url, + crawlerOptions: {}, + pageOptions: pageOptions, + origin: origin, + extractor_options: { mode: "markdown" }, + num_tokens: numTokens, + }); - // const timeoutPromise = new Promise<{ success: boolean; error?: string; returnCode: number }>((_, reject) => - // setTimeout(() => reject({ success: false, error: "Request timed out. Increase the timeout by passing `timeout` param to the request.", returnCode: 408 }), timeout) - // ); - - // const docsPromise = a.getDocuments(false); - - // let docs; - // try { - // docs = await Promise.race([docsPromise, timeoutPromise]); - // } catch (error) { - // return error; - // } - - // // make sure doc.content is not empty - // let filteredDocs = docs.filter( - // (doc: { content?: string }) => doc.content && doc.content.trim().length > 0 - // ); - // if (filteredDocs.length === 0) { - // return { success: true, error: "No page found", returnCode: 200, data: docs[0] }; - // } - - - // // Remove rawHtml if pageOptions.rawHtml is false and extractorOptions.mode is llm-extraction-from-raw-html - // if (!pageOptions.includeRawHtml && extractorOptions.mode == "llm-extraction-from-raw-html") { - // filteredDocs.forEach(doc => { - // delete doc.rawHtml; - // }); - // } - - // return { - // success: true, - // data: filteredDocs[0], - // returnCode: 200, - // }; -// } \ No newline at end of file + return res.status(200).json({ + success: true, + data: legacyDocumentConverter(doc), + }); +} \ No newline at end of file diff --git a/apps/api/src/controllers/v1/types.ts b/apps/api/src/controllers/v1/types.ts new file mode 100644 index 00000000..ffaf6c19 --- /dev/null +++ b/apps/api/src/controllers/v1/types.ts @@ -0,0 +1,306 @@ +import { Request } from "express"; +import { z } from "zod"; +import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; +import { PageOptions } from "../../lib/entities"; + +export type Format = + | "markdown" + | "html" + | "rawHtml" + | "links" + | "screenshot" + | "screenshot@fullPage"; + +const url = z.preprocess( + (x) => { + if (typeof x === "string" && !/^([^.:]+:\/\/)/.test(x)) { + if (x.startsWith("://")) { + return "http" + x; + } else { + return "http://" + x; + } + } else { + return x; + } + }, + z + .string() + .url() + .regex(/^https?:\/\//, "URL uses unsupported protocol") + .refine( + (x) => !isUrlBlocked(x), + "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." + ) +); + +const strictMessage = "Unrecognized key in body -- please review the v1 API documentation for request body changes"; + +export const scrapeOptions = z.object({ + formats: z + .enum([ + "markdown", + "html", + "rawHtml", + "links", + "screenshot", + "screenshot@fullPage", + ]) + .array() + .optional() + .default(["markdown"]), + headers: z.record(z.string(), z.string()).optional(), + includeTags: z.string().array().optional(), + excludeTags: z.string().array().optional(), + onlyMainContent: z.boolean().default(true), + timeout: z.number().int().positive().finite().safe().default(30000), // default? + waitFor: z.number().int().nonnegative().finite().safe().default(0), + parsePDF: z.boolean().default(true), +}).strict(strictMessage); + +export type ScrapeOptions = z.infer; + +export const scrapeRequestSchema = scrapeOptions.extend({ + url, + origin: z.string().optional().default("api"), +}).strict(strictMessage); + +// export type ScrapeRequest = { +// url: string; +// formats?: Format[]; +// headers?: { [K: string]: string }; +// includeTags?: string[]; +// excludeTags?: string[]; +// onlyMainContent?: boolean; +// timeout?: number; +// waitFor?: number; +// } + +export type ScrapeRequest = z.infer; + +const crawlerOptions = z.object({ + includePaths: z.string().array().default([]), + excludePaths: z.string().array().default([]), + maxDepth: z.number().default(10), // default? + limit: z.number().default(10000), // default? + allowBackwardLinks: z.boolean().default(false), // >> TODO: CHANGE THIS NAME??? + allowExternalLinks: z.boolean().default(false), + ignoreSitemap: z.boolean().default(true), +}).strict(strictMessage); + +// export type CrawlerOptions = { +// includePaths?: string[]; +// excludePaths?: string[]; +// maxDepth?: number; +// limit?: number; +// allowBackwardLinks?: boolean; // >> TODO: CHANGE THIS NAME??? +// allowExternalLinks?: boolean; +// ignoreSitemap?: boolean; +// }; + +export type CrawlerOptions = z.infer; + +export const crawlRequestSchema = crawlerOptions.extend({ + url, + origin: z.string().optional().default("api"), + scrapeOptions: scrapeOptions.omit({ timeout: true }).default({}), + webhook: z.string().url().optional(), + limit: z.number().default(10000), +}).strict(strictMessage); + +// export type CrawlRequest = { +// url: string; +// crawlerOptions?: CrawlerOptions; +// scrapeOptions?: Exclude; +// }; + +export type CrawlRequest = z.infer; + +export const mapRequestSchema = crawlerOptions.extend({ + url: z.string().url(), + origin: z.string().optional().default("api"), + includeSubdomains: z.boolean().default(true), + search: z.string().optional(), + ignoreSitemap: z.boolean().default(false), +}).strict(strictMessage); + +// export type MapRequest = { +// url: string; +// crawlerOptions?: CrawlerOptions; +// }; + +export type MapRequest = z.infer; + +export type Document = { + markdown?: string; + html?: string; + rawHtml?: string; + links?: string[]; + screenshot?: string; + metadata: { + title?: string; + description?: string; + language?: string; + keywords?: string; + robots?: string; + ogTitle?: string; + ogDescription?: string; + ogUrl?: string; + ogImage?: string; + ogAudio?: string; + ogDeterminer?: string; + ogLocale?: string; + ogLocaleAlternate?: string[]; + ogSiteName?: string; + ogVideo?: string; + dcTermsCreated?: string; + dcDateCreated?: string; + dcDate?: string; + dcTermsType?: string; + dcType?: string; + dcTermsAudience?: string; + dcTermsSubject?: string; + dcSubject?: string; + dcDescription?: string; + dcTermsKeywords?: string; + modifiedTime?: string; + publishedTime?: string; + articleTag?: string; + articleSection?: string; + sourceURL?: string; + statusCode?: number; + error?: string; + }; +}; + +export type ErrorResponse = { + success: false; + error: string; + details?: any; +}; + +export type ScrapeResponse = + | ErrorResponse + | { + success: true; + warning?: string; + data: Document; + }; + +export interface ScrapeResponseRequestTest { + statusCode: number; + body: ScrapeResponse; + error?: string; +} + +export type CrawlResponse = + | ErrorResponse + | { + success: true; + id: string; + url: string; + }; + +export type MapResponse = + | ErrorResponse + | { + success: true; + links: string[]; + }; + +export type CrawlStatusParams = { + jobId: string; +}; + +export type CrawlStatusResponse = + | ErrorResponse + | { + status: "scraping" | "completed" | "failed" | "cancelled"; + totalCount: number; + creditsUsed: number; + expiresAt: string; + next?: string; + data: Document[]; + }; + +type AuthObject = { + team_id: string; + plan: string; +}; + +type Account = { + remainingCredits: number; +}; + +export interface RequestWithMaybeAuth< + ReqParams = {}, + ReqBody = undefined, + ResBody = undefined +> extends Request { + auth?: AuthObject; + account?: Account; +} + +export interface RequestWithAuth< + ReqParams = {}, + ReqBody = undefined, + ResBody = undefined, +> extends Request { + auth: AuthObject; + account?: Account; +} + +export function legacyCrawlerOptions(x: CrawlerOptions) { + return { + includes: x.includePaths, + excludes: x.excludePaths, + maxCrawledLinks: x.limit, + maxCrawledDepth: x.maxDepth, + limit: x.limit, + generateImgAltText: false, + allowBackwardCrawling: x.allowBackwardLinks, + allowExternalContentLinks: x.allowExternalLinks, + }; +} + +export function legacyScrapeOptions(x: ScrapeOptions): PageOptions { + return { + includeMarkdown: x.formats.includes("markdown"), + includeHtml: x.formats.includes("html"), + includeRawHtml: x.formats.includes("rawHtml"), + onlyIncludeTags: x.includeTags, + removeTags: x.excludeTags, + onlyMainContent: x.onlyMainContent, + waitFor: x.waitFor, + includeLinks: x.formats.includes("links"), + screenshot: x.formats.includes("screenshot"), + fullPageScreenshot: x.formats.includes("screenshot@fullPage"), + parsePDF: x.parsePDF, + }; +} + +export function legacyDocumentConverter(doc: any): Document { + if (doc.metadata.screenshot) { + doc.screenshot = doc.metadata.screenshot; + delete doc.metadata.screenshot; + } + + if (doc.metadata.fullPageScreenshot) { + doc.fullPageScreenshot = doc.metadata.fullPageScreenshot; + delete doc.metadata.fullPageScreenshot; + } + + return { + markdown: doc.markdown, + links: doc.linksOnPage, + rawHtml: doc.rawHtml, + html: doc.html, + screenshot: doc.screenshot ?? doc.fullPageScreenshot, + metadata: { + ...doc.metadata, + pageError: undefined, + pageStatusCode: undefined, + error: doc.metadata.pageError, + statusCode: doc.metadata.pageStatusCode, + }, + }; +} diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index b7e7af04..2bf5e6d0 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -2,7 +2,7 @@ import express from "express"; import bodyParser from "body-parser"; import cors from "cors"; import "dotenv/config"; -import { getScrapeQueue, getWebScraperQueue } from "./services/queue-service"; +import { getScrapeQueue } from "./services/queue-service"; import { v0Router } from "./routes/v0"; import { initSDK } from "@hyperdx/node-opentelemetry"; import cluster from "cluster"; @@ -14,6 +14,8 @@ import http from 'node:http'; import https from 'node:https'; import CacheableLookup from 'cacheable-lookup'; import { v1Router } from "./routes/v1"; +import expressWs from "express-ws"; +import { crawlStatusWSController } from "./controllers/v1/crawl-status-ws"; const { createBullBoard } = require("@bull-board/api"); const { BullAdapter } = require("@bull-board/api/bullAdapter"); @@ -46,7 +48,8 @@ if (cluster.isMaster) { } }); } else { - const app = express(); + const ws = expressWs(express()); + const app = ws.app; global.isProduction = process.env.IS_PRODUCTION === "true"; @@ -59,7 +62,7 @@ if (cluster.isMaster) { serverAdapter.setBasePath(`/admin/${process.env.BULL_AUTH_KEY}/queues`); const { addQueue, removeQueue, setQueues, replaceQueues } = createBullBoard({ - queues: [new BullAdapter(getWebScraperQueue()), new BullAdapter(getScrapeQueue())], + queues: [new BullAdapter(getScrapeQueue())], serverAdapter: serverAdapter, }); @@ -79,7 +82,7 @@ if (cluster.isMaster) { // register router app.use(v0Router); - app.use(v1Router); + app.use("/v1", v1Router); app.use(adminRouter); const DEFAULT_PORT = process.env.PORT ?? 3002; @@ -106,9 +109,9 @@ if (cluster.isMaster) { app.get(`/serverHealthCheck`, async (req, res) => { try { - const webScraperQueue = getWebScraperQueue(); + const scrapeQueue = getScrapeQueue(); const [waitingJobs] = await Promise.all([ - webScraperQueue.getWaitingCount(), + scrapeQueue.getWaitingCount(), ]); const noWaitingJobs = waitingJobs === 0; @@ -128,9 +131,9 @@ if (cluster.isMaster) { const timeout = 60000; // 1 minute // The timeout value for the check in milliseconds const getWaitingJobsCount = async () => { - const webScraperQueue = getWebScraperQueue(); + const scrapeQueue = getScrapeQueue(); const [waitingJobsCount] = await Promise.all([ - webScraperQueue.getWaitingCount(), + scrapeQueue.getWaitingCount(), ]); return waitingJobsCount; @@ -183,11 +186,12 @@ if (cluster.isMaster) { Logger.info(`Worker ${process.pid} started`); } -// const wsq = getWebScraperQueue(); +// const sq = getScrapeQueue(); + +// sq.on("waiting", j => ScrapeEvents.logJobEvent(j, "waiting")); +// sq.on("active", j => ScrapeEvents.logJobEvent(j, "active")); +// sq.on("completed", j => ScrapeEvents.logJobEvent(j, "completed")); +// sq.on("paused", j => ScrapeEvents.logJobEvent(j, "paused")); +// sq.on("resumed", j => ScrapeEvents.logJobEvent(j, "resumed")); +// sq.on("removed", j => ScrapeEvents.logJobEvent(j, "removed")); -// wsq.on("waiting", j => ScrapeEvents.logJobEvent(j, "waiting")); -// wsq.on("active", j => ScrapeEvents.logJobEvent(j, "active")); -// wsq.on("completed", j => ScrapeEvents.logJobEvent(j, "completed")); -// wsq.on("paused", j => ScrapeEvents.logJobEvent(j, "paused")); -// wsq.on("resumed", j => ScrapeEvents.logJobEvent(j, "resumed")); -// wsq.on("removed", j => ScrapeEvents.logJobEvent(j, "removed")); diff --git a/apps/api/src/lib/checkCredits.ts b/apps/api/src/lib/checkCredits.ts new file mode 100644 index 00000000..7e9d988d --- /dev/null +++ b/apps/api/src/lib/checkCredits.ts @@ -0,0 +1,32 @@ +import { checkTeamCredits } from "../services/billing/credit_billing"; +import { Logger } from "./logger"; + +type checkCreditsResponse = { + status: number; + error: string | null; +} + +export const checkCredits = async (team_id: string): Promise => { + try { + const { + success: creditsCheckSuccess, + message: creditsCheckMessage + } = await checkTeamCredits(team_id, 1); + if (!creditsCheckSuccess) { + return { + status: 402, + error: "Insufficient credits" + }; + } + } catch (error) { + Logger.error(error); + return { + status: 500, + error: "Error checking team credits. Please contact hello@firecrawl.com for help." + }; + } + return { + status: 200, + error: null + } +}; \ No newline at end of file diff --git a/apps/api/src/lib/crawl-redis.ts b/apps/api/src/lib/crawl-redis.ts new file mode 100644 index 00000000..6640678d --- /dev/null +++ b/apps/api/src/lib/crawl-redis.ts @@ -0,0 +1,123 @@ +import { WebCrawler } from "../scraper/WebScraper/crawler"; +import { redisConnection } from "../services/queue-service"; + +export type StoredCrawl = { + originUrl: string; + crawlerOptions: any; + pageOptions: any; + team_id: string; + robots?: string; + cancelled?: boolean; + createdAt: number; +}; + +export async function saveCrawl(id: string, crawl: StoredCrawl) { + await redisConnection.set("crawl:" + id, JSON.stringify(crawl)); + await redisConnection.expire("crawl:" + id, 24 * 60 * 60, "NX"); +} + +export async function getCrawl(id: string): Promise { + const x = await redisConnection.get("crawl:" + id); + + if (x === null) { + return null; + } + + return JSON.parse(x); +} + +export async function getCrawlExpiry(id: string): Promise { + const d = new Date(); + const ttl = await redisConnection.pttl("crawl:" + id); + d.setMilliseconds(d.getMilliseconds() + ttl); + d.setMilliseconds(0); + return d; +} + +export async function addCrawlJob(id: string, job_id: string) { + await redisConnection.sadd("crawl:" + id + ":jobs", job_id); + await redisConnection.expire("crawl:" + id + ":jobs", 24 * 60 * 60, "NX"); +} + +export async function addCrawlJobs(id: string, job_ids: string[]) { + await redisConnection.sadd("crawl:" + id + ":jobs", ...job_ids); + await redisConnection.expire("crawl:" + id + ":jobs", 24 * 60 * 60, "NX"); +} + +export async function addCrawlJobDone(id: string, job_id: string) { + await redisConnection.sadd("crawl:" + id + ":jobs_done", job_id); + await redisConnection.lpush("crawl:" + id + ":jobs_done_ordered", job_id); + await redisConnection.expire("crawl:" + id + ":jobs_done", 24 * 60 * 60, "NX"); + await redisConnection.expire("crawl:" + id + ":jobs_done_ordered", 24 * 60 * 60, "NX"); +} + +export async function getDoneJobsOrderedLength(id: string): Promise { + return await redisConnection.llen("crawl:" + id + ":jobs_done_ordered"); +} + +export async function getDoneJobsOrdered(id: string, start = 0, end = -1): Promise { + return await redisConnection.lrange("crawl:" + id + ":jobs_done_ordered", start, end); +} + +export async function isCrawlFinished(id: string) { + return (await redisConnection.scard("crawl:" + id + ":jobs_done")) === (await redisConnection.scard("crawl:" + id + ":jobs")); +} + +export async function isCrawlFinishedLocked(id: string) { + return (await redisConnection.exists("crawl:" + id + ":finish")); +} + +export async function finishCrawl(id: string) { + if (await isCrawlFinished(id)) { + const set = await redisConnection.setnx("crawl:" + id + ":finish", "yes"); + if (set === 1) { + await redisConnection.expire("crawl:" + id + ":finish", 24 * 60 * 60); + } + return set === 1 + } +} + +export async function getCrawlJobs(id: string): Promise { + return await redisConnection.smembers("crawl:" + id + ":jobs"); +} + +export async function lockURL(id: string, sc: StoredCrawl, url: string): Promise { + if (typeof sc.crawlerOptions?.limit === "number") { + if (await redisConnection.scard("crawl:" + id + ":visited") >= sc.crawlerOptions.limit) { + return false; + } + } + const res = (await redisConnection.sadd("crawl:" + id + ":visited", url)) !== 0 + await redisConnection.expire("crawl:" + id + ":visited", 24 * 60 * 60, "NX"); + return res; +} + +/// NOTE: does not check limit. only use if limit is checked beforehand e.g. with sitemap +export async function lockURLs(id: string, urls: string[]): Promise { + const res = (await redisConnection.sadd("crawl:" + id + ":visited", ...urls)) !== 0 + await redisConnection.expire("crawl:" + id + ":visited", 24 * 60 * 60, "NX"); + return res; +} + +export function crawlToCrawler(id: string, sc: StoredCrawl): WebCrawler { + const crawler = new WebCrawler({ + jobId: id, + initialUrl: sc.originUrl, + includes: sc.crawlerOptions?.includes ?? [], + excludes: sc.crawlerOptions?.excludes ?? [], + maxCrawledLinks: sc.crawlerOptions?.maxCrawledLinks ?? 1000, + maxCrawledDepth: sc.crawlerOptions?.maxDepth ?? 10, + limit: sc.crawlerOptions?.limit ?? 10000, + generateImgAltText: sc.crawlerOptions?.generateImgAltText ?? false, + allowBackwardCrawling: sc.crawlerOptions?.allowBackwardCrawling ?? false, + allowExternalContentLinks: sc.crawlerOptions?.allowExternalContentLinks ?? false, + }); + + if (sc.robots !== undefined) { + try { + crawler.importRobotsTxt(sc.robots); + } catch (_) {} + } + + return crawler; +} diff --git a/apps/api/src/lib/default-values.ts b/apps/api/src/lib/default-values.ts index 3b303781..152f47d7 100644 --- a/apps/api/src/lib/default-values.ts +++ b/apps/api/src/lib/default-values.ts @@ -7,6 +7,7 @@ export const defaultPageOptions = { includeHtml: false, waitFor: 0, screenshot: false, + fullPageScreenshot: false, parsePDF: true }; diff --git a/apps/api/src/lib/entities.ts b/apps/api/src/lib/entities.ts index 9ffa4810..5e26360a 100644 --- a/apps/api/src/lib/entities.ts +++ b/apps/api/src/lib/entities.ts @@ -11,6 +11,7 @@ export interface Progress { } export type PageOptions = { + includeMarkdown?: boolean; onlyMainContent?: boolean; includeHtml?: boolean; includeRawHtml?: boolean; @@ -18,11 +19,13 @@ export type PageOptions = { fetchPageContent?: boolean; waitFor?: number; screenshot?: boolean; + fullPageScreenshot?: boolean; headers?: Record; replaceAllPathsWithAbsolutePaths?: boolean; parsePDF?: boolean; removeTags?: string | string[]; onlyIncludeTags?: string | string[]; + includeLinks?: boolean; }; export type ExtractorOptions = { @@ -42,8 +45,8 @@ export type SearchOptions = { export type CrawlerOptions = { returnOnlyUrls?: boolean; - includes?: string[]; - excludes?: string[]; + includes?: string | string[]; + excludes?: string | string[]; maxCrawledLinks?: number; maxDepth?: number; limit?: number; @@ -64,6 +67,7 @@ export type WebScraperOptions = { extractorOptions?: ExtractorOptions; concurrentRequests?: number; bullJobId?: string; + priority?: number; }; export interface DocumentUrl { diff --git a/apps/api/src/lib/scrape-events.ts b/apps/api/src/lib/scrape-events.ts index 0ae2d77c..04850b4e 100644 --- a/apps/api/src/lib/scrape-events.ts +++ b/apps/api/src/lib/scrape-events.ts @@ -46,7 +46,7 @@ export class ScrapeEvents { }).select().single(); return (result.data as any).id; } catch (error) { - Logger.error(`Error inserting scrape event: ${error}`); + // Logger.error(`Error inserting scrape event: ${error}`); return null; } } diff --git a/apps/api/src/lib/supabase-jobs.ts b/apps/api/src/lib/supabase-jobs.ts index 1f9531e5..b4247883 100644 --- a/apps/api/src/lib/supabase-jobs.ts +++ b/apps/api/src/lib/supabase-jobs.ts @@ -17,3 +17,21 @@ export const supabaseGetJobById = async (jobId: string) => { return data; } + +export const supabaseGetJobsById = async (jobIds: string[]) => { + const { data, error } = await supabase_service + .from('firecrawl_jobs') + .select('*') + .in('job_id', jobIds); + + if (error) { + return []; + } + + if (!data) { + return []; + } + + return data; +} + diff --git a/apps/api/src/lib/validateUrl.test.ts b/apps/api/src/lib/validateUrl.test.ts new file mode 100644 index 00000000..43730584 --- /dev/null +++ b/apps/api/src/lib/validateUrl.test.ts @@ -0,0 +1,88 @@ +import { isSameDomain } from "./validateUrl"; +import { isSameSubdomain } from "./validateUrl"; + +describe("isSameDomain", () => { + it("should return true for a subdomain", () => { + const result = isSameDomain("http://sub.example.com", "http://example.com"); + expect(result).toBe(true); + }); + + it("should return true for the same domain", () => { + const result = isSameDomain("http://example.com", "http://example.com"); + expect(result).toBe(true); + }); + + it("should return false for different domains", () => { + const result = isSameDomain("http://example.com", "http://another.com"); + expect(result).toBe(false); + }); + + it("should return true for a subdomain with different protocols", () => { + const result = isSameDomain("https://sub.example.com", "http://example.com"); + expect(result).toBe(true); + }); + + it("should return false for invalid URLs", () => { + const result = isSameDomain("invalid-url", "http://example.com"); + expect(result).toBe(false); + const result2 = isSameDomain("http://example.com", "invalid-url"); + expect(result2).toBe(false); + }); + + it("should return true for a subdomain with www prefix", () => { + const result = isSameDomain("http://www.sub.example.com", "http://example.com"); + expect(result).toBe(true); + }); + + it("should return true for the same domain with www prefix", () => { + const result = isSameDomain("http://docs.s.s.example.com", "http://example.com"); + expect(result).toBe(true); + }); +}); + + + + +describe("isSameSubdomain", () => { + it("should return false for a subdomain", () => { + const result = isSameSubdomain("http://example.com", "http://docs.example.com"); + expect(result).toBe(false); + }); + + it("should return true for the same subdomain", () => { + const result = isSameSubdomain("http://docs.example.com", "http://docs.example.com"); + expect(result).toBe(true); + }); + + it("should return false for different subdomains", () => { + const result = isSameSubdomain("http://docs.example.com", "http://blog.example.com"); + expect(result).toBe(false); + }); + + it("should return false for different domains", () => { + const result = isSameSubdomain("http://example.com", "http://another.com"); + expect(result).toBe(false); + }); + + it("should return false for invalid URLs", () => { + const result = isSameSubdomain("invalid-url", "http://example.com"); + expect(result).toBe(false); + const result2 = isSameSubdomain("http://example.com", "invalid-url"); + expect(result2).toBe(false); + }); + + it("should return true for the same subdomain with different protocols", () => { + const result = isSameSubdomain("https://docs.example.com", "http://docs.example.com"); + expect(result).toBe(true); + }); + + it("should return true for the same subdomain with www prefix", () => { + const result = isSameSubdomain("http://www.docs.example.com", "http://docs.example.com"); + expect(result).toBe(true); + }); + + it("should return false for a subdomain with www prefix and different subdomain", () => { + const result = isSameSubdomain("http://www.docs.example.com", "http://blog.example.com"); + expect(result).toBe(false); + }); +}); \ No newline at end of file diff --git a/apps/api/src/lib/validateUrl.ts b/apps/api/src/lib/validateUrl.ts index b5f30434..fa2698e7 100644 --- a/apps/api/src/lib/validateUrl.ts +++ b/apps/api/src/lib/validateUrl.ts @@ -1,9 +1,8 @@ - const protocolIncluded = (url: string) => { // if :// not in the start of the url assume http (maybe https?) // regex checks if :// appears before any . - return(/^([^.:]+:\/\/)/.test(url)); -} + return /^([^.:]+:\/\/)/.test(url); +}; const getURLobj = (s: string) => { // URL fails if we dont include the protocol ie google.com @@ -18,7 +17,6 @@ const getURLobj = (s: string) => { }; export const checkAndUpdateURL = (url: string) => { - if (!protocolIncluded(url)) { url = `http://${url}`; } @@ -30,9 +28,95 @@ export const checkAndUpdateURL = (url: string) => { const typedUrlObj = urlObj as URL; - if(typedUrlObj.protocol !== "http:" && typedUrlObj.protocol !== "https:") { + if (typedUrlObj.protocol !== "http:" && typedUrlObj.protocol !== "https:") { throw new Error("Invalid URL"); } return { urlObj: typedUrlObj, url: url }; +}; + +/** + * Same domain check + * It checks if the domain of the url is the same as the base url + * It accounts true for subdomains and www.subdomains + * @param url + * @param baseUrl + * @returns + */ +export function isSameDomain(url: string, baseUrl: string) { + const { urlObj: urlObj1, error: error1 } = getURLobj(url); + const { urlObj: urlObj2, error: error2 } = getURLobj(baseUrl); + + if (error1 || error2) { + return false; + } + + const typedUrlObj1 = urlObj1 as URL; + const typedUrlObj2 = urlObj2 as URL; + + const cleanHostname = (hostname: string) => { + return hostname.startsWith('www.') ? hostname.slice(4) : hostname; + }; + + const domain1 = cleanHostname(typedUrlObj1.hostname).split('.').slice(-2).join('.'); + const domain2 = cleanHostname(typedUrlObj2.hostname).split('.').slice(-2).join('.'); + + return domain1 === domain2; } + + +export function isSameSubdomain(url: string, baseUrl: string) { + const { urlObj: urlObj1, error: error1 } = getURLobj(url); + const { urlObj: urlObj2, error: error2 } = getURLobj(baseUrl); + + if (error1 || error2) { + return false; + } + + const typedUrlObj1 = urlObj1 as URL; + const typedUrlObj2 = urlObj2 as URL; + + const cleanHostname = (hostname: string) => { + return hostname.startsWith('www.') ? hostname.slice(4) : hostname; + }; + + const domain1 = cleanHostname(typedUrlObj1.hostname).split('.').slice(-2).join('.'); + const domain2 = cleanHostname(typedUrlObj2.hostname).split('.').slice(-2).join('.'); + + const subdomain1 = cleanHostname(typedUrlObj1.hostname).split('.').slice(0, -2).join('.'); + const subdomain2 = cleanHostname(typedUrlObj2.hostname).split('.').slice(0, -2).join('.'); + + // Check if the domains are the same and the subdomains are the same + return domain1 === domain2 && subdomain1 === subdomain2; +} + + +export const checkAndUpdateURLForMap = (url: string) => { + if (!protocolIncluded(url)) { + url = `http://${url}`; + } + // remove last slash if present + if (url.endsWith("/")) { + url = url.slice(0, -1); + } + + + const { error, urlObj } = getURLobj(url); + if (error) { + throw new Error("Invalid URL"); + } + + const typedUrlObj = urlObj as URL; + + if (typedUrlObj.protocol !== "http:" && typedUrlObj.protocol !== "https:") { + throw new Error("Invalid URL"); + } + + // remove any query params + url = url.split("?")[0].trim(); + + return { urlObj: typedUrlObj, url: url }; +}; + + + diff --git a/apps/api/src/main/runWebScraper.ts b/apps/api/src/main/runWebScraper.ts index 166deaf9..84826bdd 100644 --- a/apps/api/src/main/runWebScraper.ts +++ b/apps/api/src/main/runWebScraper.ts @@ -12,7 +12,7 @@ import { Document } from "../lib/entities"; import { supabase_service } from "../services/supabase"; import { Logger } from "../lib/logger"; import { ScrapeEvents } from "../lib/scrape-events"; -import { getWebScraperQueue } from "../services/queue-service"; +import { getScrapeQueue } from "../services/queue-service"; export async function startWebScraperPipeline({ job, @@ -27,7 +27,12 @@ export async function startWebScraperPipeline({ mode: job.data.mode, crawlerOptions: job.data.crawlerOptions, extractorOptions: job.data.extractorOptions, - pageOptions: job.data.pageOptions, + pageOptions: { + ...job.data.pageOptions, + ...(job.data.crawl_id ? ({ + includeRawHtml: true, + }): {}), + }, inProgress: (progress) => { Logger.debug(`🐂 Job in progress ${job.id}`); if (progress.currentDocument) { @@ -35,7 +40,7 @@ export async function startWebScraperPipeline({ if (partialDocs.length > 50) { partialDocs = partialDocs.slice(-50); } - job.updateProgress({ ...progress, partialDocs: partialDocs }); + // job.updateProgress({ ...progress, partialDocs: partialDocs }); } }, onSuccess: (result, mode) => { @@ -49,6 +54,7 @@ export async function startWebScraperPipeline({ }, team_id: job.data.team_id, bull_job_id: job.id.toString(), + priority: job.opts.priority, })) as { success: boolean; message: string; docs: Document[] }; } export async function runWebScraper({ @@ -62,6 +68,7 @@ export async function runWebScraper({ onError, team_id, bull_job_id, + priority, }: RunWebScraperParams): Promise { try { const provider = new WebScraperDataProvider(); @@ -74,6 +81,7 @@ export async function runWebScraper({ crawlerOptions: crawlerOptions, pageOptions: pageOptions, bullJobId: bull_job_id, + priority, }); } else { await provider.setOptions({ @@ -83,6 +91,7 @@ export async function runWebScraper({ extractorOptions, crawlerOptions: crawlerOptions, pageOptions: pageOptions, + priority, }); } const docs = (await provider.getDocuments(false, (progress: Progress) => { @@ -104,21 +113,17 @@ export async function runWebScraper({ return { url: doc.metadata.sourceURL }; } }) - : docs.filter((doc) => doc.content.trim().length > 0); + : docs; - const isCancelled = await (await getWebScraperQueue().client).exists("cancelled:" + bull_job_id); + const billingResult = await billTeam(team_id, filteredDocs.length); - if (!isCancelled) { - const billingResult = await billTeam(team_id, filteredDocs.length); - - if (!billingResult.success) { - // throw new Error("Failed to bill team, no subscription was found"); - return { - success: false, - message: "Failed to bill team, no subscription was found", - docs: [], - }; - } + if (!billingResult.success) { + // throw new Error("Failed to bill team, no subscription was found"); + return { + success: false, + message: "Failed to bill team, no subscription was found", + docs: [], + }; } // This is where the returnvalue from the job is set @@ -141,21 +146,21 @@ const saveJob = async (job: Job, result: any, token: string, mode: string) => { .eq("job_id", job.id); if (error) throw new Error(error.message); - try { - if (mode === "crawl") { - await job.moveToCompleted(null, token, false); - } else { - await job.moveToCompleted(result, token, false); - } - } catch (error) { - // I think the job won't exist here anymore - } - } else { - try { - await job.moveToCompleted(result, token, false); - } catch (error) { - // I think the job won't exist here anymore - } + // try { + // if (mode === "crawl") { + // await job.moveToCompleted(null, token, false); + // } else { + // await job.moveToCompleted(result, token, false); + // } + // } catch (error) { + // // I think the job won't exist here anymore + // } + // } else { + // try { + // await job.moveToCompleted(result, token, false); + // } catch (error) { + // // I think the job won't exist here anymore + // } } ScrapeEvents.logJobEvent(job, "completed"); } catch (error) { diff --git a/apps/api/src/routes/admin.ts b/apps/api/src/routes/admin.ts index 77d1bf46..d8f1b1e4 100644 --- a/apps/api/src/routes/admin.ts +++ b/apps/api/src/routes/admin.ts @@ -1,10 +1,10 @@ import express from "express"; -import { redisHealthController } from "../controllers/admin/redis-health"; +import { redisHealthController } from "../controllers/v0/admin/redis-health"; import { checkQueuesController, cleanBefore24hCompleteJobsController, queuesController, -} from "../controllers/admin/queue"; +} from "../controllers/v0/admin/queue"; export const adminRouter = express.Router(); diff --git a/apps/api/src/routes/v0.ts b/apps/api/src/routes/v0.ts index 9c68d9bb..3a7bda65 100644 --- a/apps/api/src/routes/v0.ts +++ b/apps/api/src/routes/v0.ts @@ -1,14 +1,14 @@ import express from "express"; -import { crawlController } from "../../src/controllers/crawl"; -import { crawlStatusController } from "../../src/controllers/crawl-status"; -import { scrapeController } from "../../src/controllers/scrape"; -import { crawlPreviewController } from "../../src/controllers/crawlPreview"; -import { crawlJobStatusPreviewController } from "../../src/controllers/status"; -import { searchController } from "../../src/controllers/search"; -import { crawlCancelController } from "../../src/controllers/crawl-cancel"; -import { keyAuthController } from "../../src/controllers/keyAuth"; -import { livenessController } from "../controllers/liveness"; -import { readinessController } from "../controllers/readiness"; +import { crawlController } from "../../src/controllers/v0/crawl"; +import { crawlStatusController } from "../../src/controllers/v0/crawl-status"; +import { scrapeController } from "../../src/controllers/v0/scrape"; +import { crawlPreviewController } from "../../src/controllers/v0/crawlPreview"; +import { crawlJobStatusPreviewController } from "../../src/controllers/v0/status"; +import { searchController } from "../../src/controllers/v0/search"; +import { crawlCancelController } from "../../src/controllers/v0/crawl-cancel"; +import { keyAuthController } from "../../src/controllers/v0/keyAuth"; +import { livenessController } from "../controllers/v0/liveness"; +import { readinessController } from "../controllers/v0/readiness"; export const v0Router = express.Router(); diff --git a/apps/api/src/routes/v1.ts b/apps/api/src/routes/v1.ts index 5099fee9..51f07c85 100644 --- a/apps/api/src/routes/v1.ts +++ b/apps/api/src/routes/v1.ts @@ -1,9 +1,21 @@ -import express from "express"; +import express, { NextFunction, Request, Response } from "express"; import { crawlController } from "../../src/controllers/v1/crawl"; // import { crawlStatusController } from "../../src/controllers/v1/crawl-status"; import { scrapeController } from "../../src/controllers/v1/scrape"; import { crawlStatusController } from "../../src/controllers/v1/crawl-status"; import { mapController } from "../../src/controllers/v1/map"; +import { ErrorResponse, RequestWithAuth, RequestWithMaybeAuth } from "../controllers/v1/types"; +import { RateLimiterMode } from "../types"; +import { authenticateUser } from "../controllers/v1/auth"; +import { Logger } from "../lib/logger"; +import { createIdempotencyKey } from "../services/idempotency/create"; +import { validateIdempotencyKey } from "../services/idempotency/validate"; +import { ZodError } from "zod"; +import { checkTeamCredits } from "../services/billing/credit_billing"; +import { v4 as uuidv4 } from "uuid"; +import expressWs from "express-ws"; +import { crawlStatusWSController } from "../controllers/v1/crawl-status-ws"; +import { isUrlBlocked } from "../scraper/WebScraper/utils/blocklist"; // import { crawlPreviewController } from "../../src/controllers/v1/crawlPreview"; // import { crawlJobStatusPreviewController } from "../../src/controllers/v1/status"; // import { searchController } from "../../src/controllers/v1/search"; @@ -12,23 +24,142 @@ import { mapController } from "../../src/controllers/v1/map"; // import { livenessController } from "../controllers/v1/liveness"; // import { readinessController } from "../controllers/v1/readiness"; +function checkCreditsMiddleware(minimum?: number): (req: RequestWithAuth, res: Response, next: NextFunction) => void { + return (req, res, next) => { + (async () => { + if (!minimum && req.body) { + minimum = (req.body as any)?.limit ?? 1; + } + const { success, message, remainingCredits } = await checkTeamCredits(req.auth.team_id, minimum); + if (!success) { + return res.status(402).json({ success: false, error: "Insufficient credits" }); + } + req.account = { remainingCredits } + next(); + })() + .catch(err => next(err)); + }; +} + +export function authMiddleware(rateLimiterMode: RateLimiterMode): (req: RequestWithMaybeAuth, res: Response, next: NextFunction) => void { + return (req, res, next) => { + (async () => { + const { success, team_id, error, status, plan } = await authenticateUser( + req, + res, + rateLimiterMode, + ); + + if (!success) { + return res.status(status).json({ success: false, error }); + } + + req.auth = { team_id, plan }; + next(); + })() + .catch(err => next(err)); + } +} + +function idempotencyMiddleware(req: Request, res: Response, next: NextFunction) { + (async () => { + if (req.headers["x-idempotency-key"]) { + const isIdempotencyValid = await validateIdempotencyKey(req); + if (!isIdempotencyValid) { + return res.status(409).json({ success: false, error: "Idempotency key already used" }); + } + createIdempotencyKey(req); + } + next(); + })() + .catch(err => next(err)); +} + +function blocklistMiddleware(req: Request, res: Response, next: NextFunction) { + if (req.body.url && isUrlBlocked(req.body.url)) { + return res.status(403).json({ success: false, error: "URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions." }); + } + next(); +} + +function wrap(controller: (req: Request, res: Response) => Promise): (req: Request, res: Response, next: NextFunction) => any { + return (req, res, next) => { + controller(req, res) + .catch(err => next(err)) + } +} + +expressWs(express()); + export const v1Router = express.Router(); -v1Router.post("/v1/scrape", scrapeController); -v1Router.post("/v1/crawl", crawlController); -v1Router.get("/v1/crawl/:jobId", crawlStatusController); -// v1Router.post("/v1/crawlWebsitePreview", crawlPreviewController); -// v1Router.delete("/v1/crawl/cancel/:jobId", crawlCancelController); -// v1Router.get("/v1/checkJobStatus/:jobId", crawlJobStatusPreviewController); +v1Router.post( + "/scrape", + blocklistMiddleware, + authMiddleware(RateLimiterMode.Scrape), + checkCreditsMiddleware(1), + wrap(scrapeController) +); + +v1Router.post( + "/crawl", + blocklistMiddleware, + authMiddleware(RateLimiterMode.Crawl), + idempotencyMiddleware, + checkCreditsMiddleware(), + wrap(crawlController) +); + +v1Router.post( + "/map", + blocklistMiddleware, + authMiddleware(RateLimiterMode.Map), + checkCreditsMiddleware(1), + wrap(mapController) +); + +v1Router.get( + "/crawl/:jobId", + authMiddleware(RateLimiterMode.CrawlStatus), + wrap(crawlStatusController) +); + +v1Router.ws( + "/crawl/:jobId", + crawlStatusWSController +); + +// v1Router.post("/crawlWebsitePreview", crawlPreviewController); +// v1Router.delete("/crawl/:jobId", crawlCancelController); +// v1Router.get("/checkJobStatus/:jobId", crawlJobStatusPreviewController); // // Auth route for key based authentication -// v1Router.get("/v1/keyAuth", keyAuthController); +// v1Router.get("/keyAuth", keyAuthController); // // Search routes -// v0Router.post("/v1/search", searchController); +// v0Router.post("/search", searchController); // Health/Probe routes -// v1Router.get("/v1/health/liveness", livenessController); -// v1Router.get("/v1/health/readiness", readinessController); +// v1Router.get("/health/liveness", livenessController); +// v1Router.get("/health/readiness", readinessController); -v1Router.post("/v1/map", mapController); \ No newline at end of file +v1Router.use((err: unknown, req: Request<{}, ErrorResponse, undefined>, res: Response, next: NextFunction) => { + if (err instanceof ZodError) { + res.status(400).json({ success: false, error: "Bad Request", details: err.errors }); + } else { + const id = uuidv4(); + let verbose = JSON.stringify(err); + if (verbose === "{}") { + if (err instanceof Error) { + verbose = JSON.stringify({ + message: err.message, + name: err.name, + stack: err.stack, + }); + } + } + + Logger.error("Error occurred in request! (" + req.path + ") -- ID " + id + " -- " + verbose); + res.status(500).json({ success: false, error: "An unexpected error occurred. Please contact hello@firecrawl.com for help. Your exception ID is " + id + "" }); + } +}); diff --git a/apps/api/src/run-req.ts b/apps/api/src/run-req.ts new file mode 100644 index 00000000..6d29916d --- /dev/null +++ b/apps/api/src/run-req.ts @@ -0,0 +1,175 @@ +import axios from "axios"; +import { promises as fs } from "fs"; +import { v4 as uuidV4 } from "uuid"; + +interface Result { + start_url: string; + job_id?: string; + idempotency_key?: string; + result_data_jsonb?: any; +} + +async function sendCrawl(result: Result): Promise { + const idempotencyKey = uuidV4(); + const url = result.start_url; + try { + const response = await axios.post( + "https://staging-firecrawl-scraper-js.fly.dev/v0/crawl", + { + url: url, + crawlerOptions: { + limit: 75, + }, + pageOptions: { + includeHtml: true, + replaceAllPathsWithAbsolutePaths: true, + waitFor: 1000, + }, + }, + { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer `, + }, + } + ); + result.idempotency_key = idempotencyKey; + return response.data.jobId; + } catch (error) { + console.error("Error sending crawl:", error); + return undefined; + } +} + +async function getContent(result: Result): Promise { + let attempts = 0; + while (attempts < 120) { + // Reduce the number of attempts to speed up + try { + const response = await axios.get( + `https://staging-firecrawl-scraper-js.fly.dev/v0/crawl/status/${result.job_id}`, + { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer `, + }, + } + ); + if (response.data.status === "completed") { + result.result_data_jsonb = response.data.data; + // Job actually completed + return true; + } + } catch (error) { + console.error("Error getting content:", error); + } + const randomSleep = Math.floor(Math.random() * 15000) + 5000; + await new Promise((resolve) => setTimeout(resolve, randomSleep)); // Reduce sleep time to 1.5 seconds + attempts++; + } + // Set result as null if timed out + result.result_data_jsonb = null; + return false; +} + +async function processResults(results: Result[]): Promise { + let processedCount = 0; + let starterCount = 0; + const queue: Result[] = []; + const processedUrls = new Set(); + + // Initialize the queue with the first 1000 results + for (let i = 0; i < Math.min(100, results.length); i++) { + queue.push(results[i]); + processedUrls.add(results[i].start_url); + } + + // Function to process a single result + const processSingleResult = async (result: Result) => { + const jobId = await sendCrawl(result); + if (jobId) { + console.log(`Job requested count: ${starterCount}`); + starterCount++; + result.job_id = jobId; + processedCount++; + // Save the result to the file + try { + // Save job id along with the start_url + const resultWithJobId = results.map(r => ({ + start_url: r.start_url, + job_id: r.job_id, + })); + await fs.writeFile( + "results_with_job_id_4000_6000.json", + JSON.stringify(resultWithJobId, null, 4) + ); + } catch (error) { + console.error("Error writing to results_with_content.json:", error); + } + + // Add a new result to the queue if there are more results to process + // if (processedCount < results.length) { + // for (let i = queue.length; i < results.length; i++) { + // if (!processedUrls.has(results[i].start_url)) { + // const nextResult = results[i]; + // console.log("Next result:", nextResult.start_url); + // queue.push(nextResult); + // processedUrls.add(nextResult.start_url); + // console.log(`Queue length: ${queue.length}`); + // processSingleResult(nextResult); + // break; + // } + // } + // } + } + }; + + // Start processing the initial queue concurrently + // for (let i = 0; i < queue.length; i++) { + // processSingleResult(queue[i]); + // if ((i + 1) % 500 === 0) { + // console.log(`Processed ${i + 1} results, waiting for 1 minute before adding the next batch...`); + // await new Promise(resolve => setTimeout(resolve, 60 * 1000)); // Wait for 1 minute + // } + // } + // Start processing the initial queue concurrently + // await Promise.all(queue.map(result => processSingleResult(result))); + for (let i = 0; i < results.length; i += 100) { + const batch = results.slice(i, i + 100); + Promise.all(batch.map((result) => processSingleResult(result))) + .then(() => { + console.log(`Processed ${i + 100} results.`); + }) + .catch((error) => { + console.error(`Error processing batch starting at index ${i}:`, error); + }); + await new Promise((resolve) => setTimeout(resolve, 60 * 1000)); // Wait for 1 minute + } +} + +// Example call + +async function getStartUrls(): Promise { + try { + const data = await fs.readFile("starturls.json", "utf-8"); + return JSON.parse(data); + } catch (error) { + console.error("Error reading starturls.json:", error); + return []; + } +} + +async function main() { + const results: Result[] = (await getStartUrls()).slice(3999, 6000); + // console.log(results.map((r) => r.start_url).slice(0, 3)); + + processResults(results) + .then(() => { + console.log("All results processed."); + }) + .catch((error) => { + console.error("Error processing results:", error); + }); +} + +main(); diff --git a/apps/api/src/scraper/WebScraper/__tests__/single_url.test.ts b/apps/api/src/scraper/WebScraper/__tests__/single_url.test.ts index 4b720835..da66830b 100644 --- a/apps/api/src/scraper/WebScraper/__tests__/single_url.test.ts +++ b/apps/api/src/scraper/WebScraper/__tests__/single_url.test.ts @@ -23,8 +23,8 @@ describe('scrapSingleUrl', () => { }, 10000); }); -it('should return a list of links on the mendable.ai page', async () => { - const url = 'https://mendable.ai'; +it('should return a list of links on the firecrawl.ai page', async () => { + const url = 'https://flutterbricks.com'; const pageOptions: PageOptions = { includeHtml: true }; const result = await scrapSingleUrl("TEST", url, pageOptions); @@ -33,5 +33,5 @@ it('should return a list of links on the mendable.ai page', async () => { expect(result.linksOnPage).toBeDefined(); expect(Array.isArray(result.linksOnPage)).toBe(true); expect(result.linksOnPage.length).toBeGreaterThan(0); - expect(result.linksOnPage).toContain('https://mendable.ai/blog') + expect(result.linksOnPage).toContain('https://flutterbricks.com/features') }, 10000); diff --git a/apps/api/src/scraper/WebScraper/crawler.ts b/apps/api/src/scraper/WebScraper/crawler.ts index 5ee8cda8..36e0f158 100644 --- a/apps/api/src/scraper/WebScraper/crawler.ts +++ b/apps/api/src/scraper/WebScraper/crawler.ts @@ -1,4 +1,4 @@ -import axios from "axios"; +import axios, { AxiosError } from "axios"; import cheerio, { load } from "cheerio"; import { URL } from "url"; import { getLinksFromSitemap } from "./sitemap"; @@ -22,7 +22,7 @@ export class WebCrawler { private crawledUrls: Map = new Map(); private limit: number; private robotsTxtUrl: string; - private robots: any; + public robots: any; private generateImgAltText: boolean; private allowBackwardCrawling: boolean; private allowExternalContentLinks: boolean; @@ -66,7 +66,7 @@ export class WebCrawler { this.allowExternalContentLinks = allowExternalContentLinks ?? false; } - private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] { + public filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] { return sitemapLinks .filter((link) => { const url = new URL(link.trim(), this.baseUrl); @@ -130,6 +130,25 @@ export class WebCrawler { .slice(0, limit); } + public async getRobotsTxt(): Promise { + const response = await axios.get(this.robotsTxtUrl, { timeout: axiosTimeout }); + return response.data; + } + + public importRobotsTxt(txt: string) { + this.robots = robotsParser(this.robotsTxtUrl, txt); + } + + public async tryGetSitemap(): Promise<{ url: string; html: string; }[] | null> { + Logger.debug(`Fetching sitemap links from ${this.initialUrl}`); + const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl); + if (sitemapLinks.length > 0) { + let filteredLinks = this.filterLinks(sitemapLinks, this.limit, this.maxCrawledDepth); + return filteredLinks.map(link => ({ url: link, html: "" })); + } + return null; + } + public async start( inProgress?: (progress: Progress) => void, pageOptions?: PageOptions, @@ -142,19 +161,17 @@ export class WebCrawler { Logger.debug(`Crawler starting with ${this.initialUrl}`); // Fetch and parse robots.txt try { - const response = await axios.get(this.robotsTxtUrl, { timeout: axiosTimeout }); - this.robots = robotsParser(this.robotsTxtUrl, response.data); + const txt = await this.getRobotsTxt(); + this.importRobotsTxt(txt); Logger.debug(`Crawler robots.txt fetched with ${this.robotsTxtUrl}`); } catch (error) { Logger.debug(`Failed to fetch robots.txt from ${this.robotsTxtUrl}`); } if (!crawlerOptions?.ignoreSitemap){ - Logger.debug(`Fetching sitemap links from ${this.initialUrl}`); - const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl); - if (sitemapLinks.length > 0) { - let filteredLinks = this.filterLinks(sitemapLinks, limit, maxDepth); - return filteredLinks.map(link => ({ url: link, html: "" })); + const sm = await this.tryGetSitemap(); + if (sm !== null) { + return sm; } } @@ -164,7 +181,7 @@ export class WebCrawler { concurrencyLimit, inProgress ); - + if ( urls.length === 0 && this.filterLinks([this.initialUrl], limit, this.maxCrawledDepth).length > 0 @@ -241,6 +258,54 @@ export class WebCrawler { return Array.from(this.crawledUrls.entries()).map(([url, html]) => ({ url, html })); } + public filterURL(href: string, url: string): string | null { + let fullUrl = href; + if (!href.startsWith("http")) { + fullUrl = new URL(href, this.baseUrl).toString(); + } + const urlObj = new URL(fullUrl); + const path = urlObj.pathname; + + if (this.isInternalLink(fullUrl)) { // INTERNAL LINKS + if (this.isInternalLink(fullUrl) && + this.noSections(fullUrl) && + !this.matchesExcludes(path) && + this.isRobotsAllowed(fullUrl) + ) { + return fullUrl; + } + } else { // EXTERNAL LINKS + if ( + this.isInternalLink(url) && + this.allowExternalContentLinks && + !this.isSocialMediaOrEmail(fullUrl) && + !this.matchesExcludes(fullUrl, true) && + !this.isExternalMainPage(fullUrl) + ) { + return fullUrl; + } + } + + return null; + } + + public extractLinksFromHTML(html: string, url: string) { + let links: string[] = []; + + const $ = load(html); + $("a").each((_, element) => { + const href = $(element).attr("href"); + if (href) { + const u = this.filterURL(href, url); + if (u !== null) { + links.push(u); + } + } + }); + + return links; + } + async crawl(url: string, pageOptions: PageOptions): Promise<{url: string, html: string, pageStatusCode?: number, pageError?: string}[]> { if (this.visited.has(url) || !this.robots.isAllowed(url, "FireCrawlAgent")) { return []; @@ -284,37 +349,7 @@ export class WebCrawler { links.push({ url, html: content, pageStatusCode, pageError }); } - $("a").each((_, element) => { - const href = $(element).attr("href"); - if (href) { - let fullUrl = href; - if (!href.startsWith("http")) { - fullUrl = new URL(href, this.baseUrl).toString(); - } - const urlObj = new URL(fullUrl); - const path = urlObj.pathname; - - if (this.isInternalLink(fullUrl)) { // INTERNAL LINKS - if (this.isInternalLink(fullUrl) && - this.noSections(fullUrl) && - !this.matchesExcludes(path) && - this.isRobotsAllowed(fullUrl) - ) { - links.push({ url: fullUrl, html: content, pageStatusCode, pageError }); - } - } else { // EXTERNAL LINKS - if ( - this.isInternalLink(url) && - this.allowExternalContentLinks && - !this.isSocialMediaOrEmail(fullUrl) && - !this.matchesExcludes(fullUrl, true) && - !this.isExternalMainPage(fullUrl) - ) { - links.push({ url: fullUrl, html: content, pageStatusCode, pageError }); - } - } - } - }); + links.push(...this.extractLinksFromHTML(content, url).map(url => ({ url, html: content, pageStatusCode, pageError }))); if (this.visited.size === 1) { return links; @@ -420,9 +455,10 @@ export class WebCrawler { ".woff", ".ttf", ".woff2", - ".webp" + ".webp", + ".inc" ]; - return fileExtensions.some((ext) => url.endsWith(ext)); + return fileExtensions.some((ext) => url.toLowerCase().endsWith(ext)); } private isSocialMediaOrEmail(url: string): boolean { @@ -464,9 +500,13 @@ export class WebCrawler { } } catch (error) { Logger.debug(`Failed to fetch sitemap with axios from ${sitemapUrl}: ${error}`); - const response = await getLinksFromSitemap({ sitemapUrl, mode: 'fire-engine' }); - if (response) { - sitemapLinks = response; + if (error instanceof AxiosError && error.response?.status === 404) { + // ignore 404 + } else { + const response = await getLinksFromSitemap({ sitemapUrl, mode: 'fire-engine' }); + if (response) { + sitemapLinks = response; + } } } @@ -475,11 +515,15 @@ export class WebCrawler { try { const response = await axios.get(baseUrlSitemap, { timeout: axiosTimeout }); if (response.status === 200) { - sitemapLinks = await getLinksFromSitemap({ sitemapUrl: baseUrlSitemap }); + sitemapLinks = await getLinksFromSitemap({ sitemapUrl: baseUrlSitemap, mode: 'fire-engine' }); } } catch (error) { Logger.debug(`Failed to fetch sitemap from ${baseUrlSitemap}: ${error}`); - sitemapLinks = await getLinksFromSitemap({ sitemapUrl: baseUrlSitemap, mode: 'fire-engine' }); + if (error instanceof AxiosError && error.response?.status === 404) { + // ignore 404 + } else { + sitemapLinks = await getLinksFromSitemap({ sitemapUrl: baseUrlSitemap, mode: 'fire-engine' }); + } } } diff --git a/apps/api/src/scraper/WebScraper/index.ts b/apps/api/src/scraper/WebScraper/index.ts index eff709fa..45a36bb1 100644 --- a/apps/api/src/scraper/WebScraper/index.ts +++ b/apps/api/src/scraper/WebScraper/index.ts @@ -16,18 +16,19 @@ import { replacePathsWithAbsolutePaths, } from "./utils/replacePaths"; import { generateCompletions } from "../../lib/LLM-extraction"; -import { getWebScraperQueue } from "../../../src/services/queue-service"; +import { getScrapeQueue } from "../../../src/services/queue-service"; import { fetchAndProcessDocx } from "./utils/docxProcessor"; import { getAdjustedMaxDepth, getURLDepth } from "./utils/maxDepthUtils"; import { Logger } from "../../lib/logger"; +import { ScrapeEvents } from "../../lib/scrape-events"; export class WebScraperDataProvider { private jobId: string; private bullJobId: string; private urls: string[] = [""]; private mode: "single_urls" | "sitemap" | "crawl" = "single_urls"; - private includes: string[]; - private excludes: string[]; + private includes: string | string[]; + private excludes: string | string[]; private maxCrawledLinks: number; private maxCrawledDepth: number = 10; private returnOnlyUrls: boolean; @@ -43,6 +44,7 @@ export class WebScraperDataProvider { private crawlerMode: string = "default"; private allowBackwardCrawling: boolean = false; private allowExternalContentLinks: boolean = false; + private priority?: number; authorize(): void { throw new Error("Method not implemented."); @@ -71,7 +73,8 @@ export class WebScraperDataProvider { url, this.pageOptions, this.extractorOptions, - existingHTML + existingHTML, + this.priority, ); processedUrls++; if (inProgress) { @@ -87,21 +90,6 @@ export class WebScraperDataProvider { results[i + index] = result; }) ); - try { - if (this.mode === "crawl" && this.bullJobId) { - const job = await getWebScraperQueue().getJob(this.bullJobId); - const jobStatus = await job.getState(); - if (jobStatus === "failed") { - Logger.info( - "Job has failed or has been cancelled by the user. Stopping the job..." - ); - return [] as Document[]; - } - } - } catch (error) { - Logger.error(error.message); - return [] as Document[]; - } } return results.filter((result) => result !== null) as Document[]; } @@ -167,11 +155,29 @@ export class WebScraperDataProvider { private async handleCrawlMode( inProgress?: (progress: Progress) => void ): Promise { + let includes: string[]; + if (Array.isArray(this.includes)) { + if (this.includes[0] != "") { + includes = this.includes; + } + } else { + includes = this.includes.split(','); + } + + let excludes: string[]; + if (Array.isArray(this.excludes)) { + if (this.excludes[0] != "") { + excludes = this.excludes; + } + } else { + excludes = this.excludes.split(','); + } + const crawler = new WebCrawler({ jobId: this.jobId, initialUrl: this.urls[0], - includes: this.includes, - excludes: this.excludes, + includes, + excludes, maxCrawledLinks: this.maxCrawledLinks, maxCrawledDepth: getAdjustedMaxDepth(this.urls[0], this.maxCrawledDepth), limit: this.limit, @@ -287,7 +293,10 @@ export class WebScraperDataProvider { documents = await this.getSitemapData(this.urls[0], documents); } - documents = this.applyPathReplacements(documents); + if (this.pageOptions.includeMarkdown) { + documents = this.applyPathReplacements(documents); + } + // documents = await this.applyImgAltText(documents); if ( (this.extractorOptions.mode === "llm-extraction" || @@ -316,12 +325,31 @@ export class WebScraperDataProvider { private async fetchPdfDocuments(pdfLinks: string[]): Promise { return Promise.all( pdfLinks.map(async (pdfLink) => { + const timer = Date.now(); + const logInsertPromise = ScrapeEvents.insert(this.jobId, { + type: "scrape", + url: pdfLink, + worker: process.env.FLY_MACHINE_ID, + method: "pdf-scrape", + result: null, + }); + const { content, pageStatusCode, pageError } = await fetchAndProcessPdf( pdfLink, this.pageOptions.parsePDF ); + + const insertedLogId = await logInsertPromise; + ScrapeEvents.updateScrapeResult(insertedLogId, { + response_size: content.length, + success: !(pageStatusCode && pageStatusCode >= 400) && !!content && (content.trim().length >= 100), + error: pageError, + response_code: pageStatusCode, + time_taken: Date.now() - timer, + }); return { content: content, + markdown: content, metadata: { sourceURL: pdfLink, pageStatusCode, pageError }, provider: "web-scraper", }; @@ -330,12 +358,32 @@ export class WebScraperDataProvider { } private async fetchDocxDocuments(docxLinks: string[]): Promise { return Promise.all( - docxLinks.map(async (p) => { - const { content, pageStatusCode, pageError } = - await fetchAndProcessDocx(p); + docxLinks.map(async (docxLink) => { + const timer = Date.now(); + const logInsertPromise = ScrapeEvents.insert(this.jobId, { + type: "scrape", + url: docxLink, + worker: process.env.FLY_MACHINE_ID, + method: "docx-scrape", + result: null, + }); + + const { content, pageStatusCode, pageError } = await fetchAndProcessDocx( + docxLink + ); + + const insertedLogId = await logInsertPromise; + ScrapeEvents.updateScrapeResult(insertedLogId, { + response_size: content.length, + success: !(pageStatusCode && pageStatusCode >= 400) && !!content && (content.trim().length >= 100), + error: pageError, + response_code: pageStatusCode, + time_taken: Date.now() - timer, + }); + return { content, - metadata: { sourceURL: p, pageStatusCode, pageError }, + metadata: { sourceURL: docxLink, pageStatusCode, pageError }, provider: "web-scraper", }; }) @@ -406,6 +454,10 @@ export class WebScraperDataProvider { const url = new URL(document.metadata.sourceURL); const path = url.pathname; + if (!Array.isArray(this.excludes)) { + this.excludes = this.excludes.split(','); + } + if (this.excludes.length > 0 && this.excludes[0] !== "") { // Check if the link should be excluded if ( @@ -417,6 +469,10 @@ export class WebScraperDataProvider { } } + if (!Array.isArray(this.includes)) { + this.includes = this.includes.split(','); + } + if (this.includes.length > 0 && this.includes[0] !== "") { // Check if the link matches the include patterns, if any are specified if (this.includes.length > 0) { @@ -528,14 +584,22 @@ export class WebScraperDataProvider { options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? options.pageOptions?.replaceAllPathsWithAbsolutePaths ?? false; - //! @nicolas, for some reason this was being injected and breaking everything. Don't have time to find source of the issue so adding this check - this.excludes = this.excludes.filter((item) => item !== ""); + + if (typeof options.crawlerOptions?.excludes === 'string') { + this.excludes = options.crawlerOptions?.excludes.split(',').filter((item) => item.trim() !== ""); + } + + if (typeof options.crawlerOptions?.includes === 'string') { + this.includes = options.crawlerOptions?.includes.split(',').filter((item) => item.trim() !== ""); + } + this.crawlerMode = options.crawlerOptions?.mode ?? "default"; this.ignoreSitemap = options.crawlerOptions?.ignoreSitemap ?? false; this.allowBackwardCrawling = options.crawlerOptions?.allowBackwardCrawling ?? false; this.allowExternalContentLinks = options.crawlerOptions?.allowExternalContentLinks ?? false; + this.priority = options.priority; // make sure all urls start with https:// this.urls = this.urls.map((url) => { diff --git a/apps/api/src/scraper/WebScraper/scrapers/fireEngine.ts b/apps/api/src/scraper/WebScraper/scrapers/fireEngine.ts index 0f4c2320..77697411 100644 --- a/apps/api/src/scraper/WebScraper/scrapers/fireEngine.ts +++ b/apps/api/src/scraper/WebScraper/scrapers/fireEngine.ts @@ -11,6 +11,7 @@ import { Logger } from "../../../lib/logger"; * @param url The URL to scrape * @param waitFor The time to wait for the page to load * @param screenshot Whether to take a screenshot + * @param fullPageScreenshot Whether to take a full page screenshot * @param pageOptions The options for the page * @param headers The headers to send with the request * @param options The options for the request @@ -20,18 +21,22 @@ export async function scrapWithFireEngine({ url, waitFor = 0, screenshot = false, + fullPageScreenshot = false, pageOptions = { parsePDF: true }, fireEngineOptions = {}, headers, options, + priority, }: { url: string; waitFor?: number; screenshot?: boolean; + fullPageScreenshot?: boolean; pageOptions?: { scrollXPaths?: string[]; parsePDF?: boolean }; fireEngineOptions?: FireEngineOptions; headers?: Record; options?: any; + priority?: number; }): Promise { const logParams = { url, @@ -47,8 +52,9 @@ export async function scrapWithFireEngine({ try { const reqParams = await generateRequestParams(url); const waitParam = reqParams["params"]?.wait ?? waitFor; - const engineParam = reqParams["params"]?.engine ?? fireEngineOptions?.engine ?? "playwright"; + const engineParam = reqParams["params"]?.engine ?? reqParams["params"]?.fireEngineOptions?.engine ?? fireEngineOptions?.engine ?? "playwright"; const screenshotParam = reqParams["params"]?.screenshot ?? screenshot; + const fullPageScreenshotParam = reqParams["params"]?.fullPageScreenshot ?? fullPageScreenshot; const fireEngineOptionsParam : FireEngineOptions = reqParams["params"]?.fireEngineOptions ?? fireEngineOptions; @@ -61,17 +67,20 @@ export async function scrapWithFireEngine({ let engine = engineParam; // do we want fireEngineOptions as first choice? Logger.info( - `⛏️ Fire-Engine (${engine}): Scraping ${url} | params: { wait: ${waitParam}, screenshot: ${screenshotParam}, method: ${fireEngineOptionsParam?.method ?? "null"} }` + `⛏️ Fire-Engine (${engine}): Scraping ${url} | params: { wait: ${waitParam}, screenshot: ${screenshotParam}, fullPageScreenshot: ${fullPageScreenshot}, method: ${fireEngineOptionsParam?.method ?? "null"} }` ); + const response = await axios.post( process.env.FIRE_ENGINE_BETA_URL + endpoint, { url: url, wait: waitParam, screenshot: screenshotParam, + fullPageScreenshot: fullPageScreenshotParam, headers: headers, pageOptions: pageOptions, + priority, ...fireEngineOptionsParam, }, { diff --git a/apps/api/src/scraper/WebScraper/single_url.ts b/apps/api/src/scraper/WebScraper/single_url.ts index fc55037e..408f9838 100644 --- a/apps/api/src/scraper/WebScraper/single_url.ts +++ b/apps/api/src/scraper/WebScraper/single_url.ts @@ -123,17 +123,21 @@ export async function scrapSingleUrl( jobId: string, urlToScrap: string, pageOptions: PageOptions = { + includeMarkdown: true, onlyMainContent: true, includeHtml: false, includeRawHtml: false, waitFor: 0, screenshot: false, + fullPageScreenshot: false, headers: undefined, + includeLinks: true }, extractorOptions: ExtractorOptions = { mode: "llm-extraction-from-markdown", }, - existingHtml: string = "" + existingHtml: string = "", + priority?: number, ): Promise { urlToScrap = urlToScrap.trim(); @@ -171,11 +175,13 @@ export async function scrapSingleUrl( url, waitFor: pageOptions.waitFor, screenshot: pageOptions.screenshot, + fullPageScreenshot: pageOptions.fullPageScreenshot, pageOptions: pageOptions, headers: pageOptions.headers, fireEngineOptions: { engine: engine, - } + }, + priority, }); scraperResponse.text = response.html; scraperResponse.screenshot = response.screenshot; @@ -306,7 +312,7 @@ export async function scrapSingleUrl( const scrapersInOrder = getScrapingFallbackOrder( defaultScraper, pageOptions && pageOptions.waitFor && pageOptions.waitFor > 0, - pageOptions && pageOptions.screenshot && pageOptions.screenshot === true, + pageOptions && (pageOptions.screenshot || pageOptions.fullPageScreenshot) && (pageOptions.screenshot === true || pageOptions.fullPageScreenshot === true), pageOptions && pageOptions.headers && pageOptions.headers !== undefined ); @@ -334,8 +340,8 @@ export async function scrapSingleUrl( pageError = undefined; } - if (text && text.trim().length >= 100) { - Logger.debug(`⛏️ ${scraper}: Successfully scraped ${urlToScrap} with text length >= 100, breaking`); + if ((text && text.trim().length >= 100) || (typeof screenshot === "string" && screenshot.length > 0)) { + Logger.debug(`⛏️ ${scraper}: Successfully scraped ${urlToScrap} with text length >= 100 or screenshot, breaking`); break; } if (pageStatusCode && (pageStatusCode == 404 || pageStatusCode == 500)) { @@ -357,20 +363,22 @@ export async function scrapSingleUrl( let linksOnPage: string[] | undefined; - linksOnPage = extractLinks(rawHtml, urlToScrap); + if (pageOptions.includeLinks) { + linksOnPage = extractLinks(rawHtml, urlToScrap); + } let document: Document; if (screenshot && screenshot.length > 0) { document = { content: text, - markdown: text, + markdown: pageOptions.includeMarkdown ? text : undefined, html: pageOptions.includeHtml ? html : undefined, rawHtml: pageOptions.includeRawHtml || extractorOptions.mode === "llm-extraction-from-raw-html" ? rawHtml : undefined, - linksOnPage, + linksOnPage: pageOptions.includeLinks ? linksOnPage : undefined, metadata: { ...metadata, screenshot: screenshot, @@ -382,7 +390,7 @@ export async function scrapSingleUrl( } else { document = { content: text, - markdown: text, + markdown: pageOptions.includeMarkdown ? text : undefined, html: pageOptions.includeHtml ? html : undefined, rawHtml: pageOptions.includeRawHtml || @@ -395,7 +403,7 @@ export async function scrapSingleUrl( pageStatusCode: pageStatusCode, pageError: pageError, }, - linksOnPage, + linksOnPage: pageOptions.includeLinks ? linksOnPage : undefined, }; } @@ -409,9 +417,9 @@ export async function scrapSingleUrl( }); return { content: "", - markdown: "", + markdown: pageOptions.includeMarkdown ? "" : undefined, html: "", - linksOnPage: [], + linksOnPage: pageOptions.includeLinks ? [] : undefined, metadata: { sourceURL: urlToScrap, pageStatusCode: pageStatusCode, diff --git a/apps/api/src/scraper/WebScraper/sitemap.ts b/apps/api/src/scraper/WebScraper/sitemap.ts index 3dfc9a1c..b1a6a6ff 100644 --- a/apps/api/src/scraper/WebScraper/sitemap.ts +++ b/apps/api/src/scraper/WebScraper/sitemap.ts @@ -19,7 +19,7 @@ export async function getLinksFromSitemap( try { let content: string; try { - if (mode === 'axios') { + if (mode === 'axios' || process.env.FIRE_ENGINE_BETA_URL === '') { const response = await axios.get(sitemapUrl, { timeout: axiosTimeout }); content = response.data; } else if (mode === 'fire-engine') { diff --git a/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts b/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts index c688061d..af8d1f34 100644 --- a/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts +++ b/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts @@ -1,24 +1,11 @@ export const urlSpecificParams = { "platform.openai.com": { - params: { - wait_browser: "networkidle2", - block_resources: false, - }, - headers: { - "User-Agent": - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36", - "sec-fetch-site": "same-origin", - "sec-fetch-mode": "cors", - "sec-fetch-dest": "empty", - referer: "https://www.google.com/", - "accept-language": "en-US,en;q=0.9", - "accept-encoding": "gzip, deflate, br", - accept: - "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", - }, - cookies: { - __cf_bm: - "mC1On8P2GWT3A5UeSYH6z_MP94xcTAdZ5jfNi9IT2U0-1714327136-1.0.1.1-ILAP5pSX_Oo9PPo2iHEYCYX.p9a0yRBNLr58GHyrzYNDJ537xYpG50MXxUYVdfrD.h3FV5O7oMlRKGA0scbxaQ", + defaultScraper: "fire-engine", + params:{ + wait: 3000, + fireEngineOptions:{ + engine: "chrome-cdp" + }, }, }, "support.greenpay.me":{ @@ -232,4 +219,28 @@ export const urlSpecificParams = { } }, }, + "amazon.com":{ + defaultScraper: "fire-engine", + params:{ + fireEngineOptions:{ + engine: "chrome-cdp", + }, + }, + }, + "digikey.com":{ + defaultScraper: "fire-engine", + params:{ + fireEngineOptions:{ + engine: "tlsclient", + }, + }, + }, + "zoopla.co.uk":{ + defaultScraper: "fire-engine", + params:{ + fireEngineOptions:{ + engine: "chrome-cdp", + }, + }, + } }; diff --git a/apps/api/src/scraper/WebScraper/utils/docxProcessor.ts b/apps/api/src/scraper/WebScraper/utils/docxProcessor.ts index a01b8a28..8f6dc97c 100644 --- a/apps/api/src/scraper/WebScraper/utils/docxProcessor.ts +++ b/apps/api/src/scraper/WebScraper/utils/docxProcessor.ts @@ -4,38 +4,76 @@ import { createWriteStream } from "node:fs"; import path from "path"; import os from "os"; import mammoth from "mammoth"; +import { Logger } from "../../../lib/logger"; export async function fetchAndProcessDocx(url: string): Promise<{ content: string; pageStatusCode: number; pageError: string }> { - const { tempFilePath, pageStatusCode, pageError } = await downloadDocx(url); - const content = await processDocxToText(tempFilePath); - fs.unlinkSync(tempFilePath); // Clean up the temporary file + let tempFilePath = ''; + let pageStatusCode = 200; + let pageError = ''; + let content = ''; + + try { + const downloadResult = await downloadDocx(url); + tempFilePath = downloadResult.tempFilePath; + pageStatusCode = downloadResult.pageStatusCode; + pageError = downloadResult.pageError; + content = await processDocxToText(tempFilePath); + } catch (error) { + Logger.error(`Failed to fetch and process DOCX: ${error.message}`); + pageStatusCode = 500; + pageError = error.message; + content = ''; + } finally { + if (tempFilePath) { + fs.unlinkSync(tempFilePath); // Clean up the temporary file + } + } + return { content, pageStatusCode, pageError }; } async function downloadDocx(url: string): Promise<{ tempFilePath: string; pageStatusCode: number; pageError: string }> { - const response = await axios({ - url, - method: "GET", - responseType: "stream", - }); + try { + const response = await axios({ + url, + method: "GET", + responseType: "stream", + }); - const tempFilePath = path.join(os.tmpdir(), `tempDocx-${Date.now()}.docx`); - const writer = createWriteStream(tempFilePath); + const tempFilePath = path.join(os.tmpdir(), `tempDocx-${Date.now()}.docx`); + const writer = createWriteStream(tempFilePath); - response.data.pipe(writer); + response.data.pipe(writer); - return new Promise((resolve, reject) => { - writer.on("finish", () => resolve({ tempFilePath, pageStatusCode: response.status, pageError: response.statusText != "OK" ? response.statusText : undefined })); - writer.on("error", reject); - }); + return new Promise((resolve, reject) => { + writer.on("finish", () => resolve({ tempFilePath, pageStatusCode: response.status, pageError: response.statusText != "OK" ? response.statusText : undefined })); + writer.on("error", () => { + Logger.error('Failed to write DOCX file to disk'); + reject(new Error('Failed to write DOCX file to disk')); + }); + }); + } catch (error) { + Logger.error(`Failed to download DOCX: ${error.message}`); + return { tempFilePath: "", pageStatusCode: 500, pageError: error.message }; + } } export async function processDocxToText(filePath: string): Promise { - const content = await extractTextFromDocx(filePath); - return content; + try { + const content = await extractTextFromDocx(filePath); + return content; + } catch (error) { + Logger.error(`Failed to process DOCX to text: ${error.message}`); + return ""; + } } async function extractTextFromDocx(filePath: string): Promise { - const result = await mammoth.extractRawText({ path: filePath }); - return result.value; + try { + const result = await mammoth.extractRawText({ path: filePath }); + return result.value; + } catch (error) { + Logger.error(`Failed to extract text from DOCX: ${error.message}`); + return ""; + } } diff --git a/apps/api/src/scraper/WebScraper/utils/metadata.ts b/apps/api/src/scraper/WebScraper/utils/metadata.ts index 9496d569..fac53b38 100644 --- a/apps/api/src/scraper/WebScraper/utils/metadata.ts +++ b/apps/api/src/scraper/WebScraper/utils/metadata.ts @@ -75,9 +75,7 @@ export function extractMetadata(soup: CheerioAPI, url: string): Metadata { description = soup('meta[name="description"]').attr("content") || null; // Assuming the language is part of the URL as per the regex pattern - const pattern = /([a-zA-Z]+-[A-Z]{2})/; - const match = pattern.exec(url); - language = match ? match[1] : null; + language = soup('html').attr('lang') || null; keywords = soup('meta[name="keywords"]').attr("content") || null; robots = soup('meta[name="robots"]').attr("content") || null; diff --git a/apps/api/src/scraper/WebScraper/utils/pdfProcessor.ts b/apps/api/src/scraper/WebScraper/utils/pdfProcessor.ts index 660d27eb..b27db99a 100644 --- a/apps/api/src/scraper/WebScraper/utils/pdfProcessor.ts +++ b/apps/api/src/scraper/WebScraper/utils/pdfProcessor.ts @@ -76,7 +76,6 @@ export async function processPdfToText(filePath: string, parsePDF: boolean): Pro let attempt = 0; const maxAttempts = 10; // Maximum number of attempts let resultAvailable = false; - while (attempt < maxAttempts && !resultAvailable) { try { resultResponse = await axios.get(resultUrl, { headers, timeout: (axiosTimeout * 2) }); @@ -90,13 +89,22 @@ export async function processPdfToText(filePath: string, parsePDF: boolean): Pro } catch (error) { Logger.debug("Error fetching result w/ LlamaIndex"); attempt++; + if (attempt >= maxAttempts) { + Logger.error("Max attempts reached, unable to fetch result."); + break; // Exit the loop if max attempts are reached + } await new Promise((resolve) => setTimeout(resolve, 500)); // Wait for 0.5 seconds before retrying // You may want to handle specific errors differently } } if (!resultAvailable) { - content = await processPdf(filePath); + try { + content = await processPdf(filePath); + } catch (error) { + Logger.error(`Failed to process PDF: ${error}`); + content = ""; + } } content = resultResponse.data[resultType]; } catch (error) { @@ -104,15 +112,29 @@ export async function processPdfToText(filePath: string, parsePDF: boolean): Pro content = await processPdf(filePath); } } else if (parsePDF) { - content = await processPdf(filePath); + try { + content = await processPdf(filePath); + } catch (error) { + Logger.error(`Failed to process PDF: ${error}`); + content = ""; + } } else { - content = fs.readFileSync(filePath, "utf-8"); + try { + content = fs.readFileSync(filePath, "utf-8"); + } catch (error) { + Logger.error(`Failed to read PDF file: ${error}`); + content = ""; + } } return content; } async function processPdf(file: string) { - const fileContent = fs.readFileSync(file); - const data = await pdf(fileContent); - return data.text; + try { + const fileContent = fs.readFileSync(file); + const data = await pdf(fileContent); + return data.text; + } catch (error) { + throw error; + } } \ No newline at end of file diff --git a/apps/api/src/scraper/WebScraper/utils/utils.ts b/apps/api/src/scraper/WebScraper/utils/utils.ts index dd5906b0..872adc6e 100644 --- a/apps/api/src/scraper/WebScraper/utils/utils.ts +++ b/apps/api/src/scraper/WebScraper/utils/utils.ts @@ -41,10 +41,10 @@ export function extractLinks(html: string, baseUrl: string): string[] { links.push(href); } else if (href.startsWith('/')) { // Relative URL starting with '/', append to origin - links.push(`${origin}${href}`); + links.push(new URL(href, baseUrl).href); } else if (!href.startsWith('#') && !href.startsWith('mailto:')) { // Relative URL not starting with '/', append to base URL - links.push(`${baseUrl}/${href}`); + links.push(new URL(href, baseUrl).href); } else if (href.startsWith('mailto:')) { // mailto: links, add as is links.push(href); diff --git a/apps/api/src/search/fireEngine.ts b/apps/api/src/search/fireEngine.ts new file mode 100644 index 00000000..c316f8f7 --- /dev/null +++ b/apps/api/src/search/fireEngine.ts @@ -0,0 +1,44 @@ +import axios from "axios"; +import dotenv from "dotenv"; +import { SearchResult } from "../../src/lib/entities"; + +dotenv.config(); + +export async function fireEngineMap(q: string, options: { + tbs?: string; + filter?: string; + lang?: string; + country?: string; + location?: string; + numResults: number; + page?: number; +}): Promise { + let data = JSON.stringify({ + query: q, + lang: options.lang, + country: options.country, + location: options.location, + tbs: options.tbs, + numResults: options.numResults, + page: options.page ?? 1, + }); + + if (!process.env.FIRE_ENGINE_BETA_URL) { + return []; + } + + let config = { + method: "POST", + url: `${process.env.FIRE_ENGINE_BETA_URL}/search`, + headers: { + "Content-Type": "application/json", + }, + data: data, + }; + const response = await axios(config); + if (response && response) { + return response.data + } else { + return []; + } +} diff --git a/apps/api/src/search/googlesearch.ts b/apps/api/src/search/googlesearch.ts index 060f4bd8..0e247702 100644 --- a/apps/api/src/search/googlesearch.ts +++ b/apps/api/src/search/googlesearch.ts @@ -52,7 +52,7 @@ async function _req(term: string, results: number, lang: string, country: string -export async function google_search(term: string, advanced = false, num_results = 7, tbs = null, filter = null, lang = "en", country = "us", proxy = null, sleep_interval = 0, timeout = 5000, ) :Promise { +export async function googleSearch(term: string, advanced = false, num_results = 7, tbs = null, filter = null, lang = "en", country = "us", proxy = null, sleep_interval = 0, timeout = 5000, ) :Promise { let proxies = null; if (proxy) { if (proxy.startsWith("https")) { diff --git a/apps/api/src/search/index.ts b/apps/api/src/search/index.ts index f5bc06e3..f4c5b6d0 100644 --- a/apps/api/src/search/index.ts +++ b/apps/api/src/search/index.ts @@ -1,11 +1,9 @@ import { Logger } from "../../src/lib/logger"; import { SearchResult } from "../../src/lib/entities"; -import { google_search } from "./googlesearch"; +import { googleSearch } from "./googlesearch"; +import { fireEngineMap } from "./fireEngine"; import { serper_search } from "./serper"; - - - export async function search({ query, advanced = false, @@ -30,12 +28,20 @@ export async function search({ proxy?: string; sleep_interval?: number; timeout?: number; -}) : Promise { +}): Promise { try { - if (process.env.SERPER_API_KEY ) { - return await serper_search(query, {num_results, tbs, filter, lang, country, location}); + + if (process.env.SERPER_API_KEY) { + return await serper_search(query, { + num_results, + tbs, + filter, + lang, + country, + location, + }); } - return await google_search( + return await googleSearch( query, advanced, num_results, @@ -49,7 +55,6 @@ export async function search({ ); } catch (error) { Logger.error(`Error in search function: ${error}`); - return [] + return []; } - // if process.env.SERPER_API_KEY is set, use serper } diff --git a/apps/api/src/services/alerts/index.ts b/apps/api/src/services/alerts/index.ts index 0376f4c2..f5e38d14 100644 --- a/apps/api/src/services/alerts/index.ts +++ b/apps/api/src/services/alerts/index.ts @@ -1,5 +1,5 @@ import { Logger } from "../../../src/lib/logger"; -import { getWebScraperQueue } from "../queue-service"; +import { getScrapeQueue } from "../queue-service"; import { sendSlackWebhook } from "./slack"; export async function checkAlerts() { @@ -13,8 +13,8 @@ export async function checkAlerts() { Logger.info("Initializing alerts"); const checkActiveJobs = async () => { try { - const webScraperQueue = getWebScraperQueue(); - const activeJobs = await webScraperQueue.getActiveCount(); + const scrapeQueue = getScrapeQueue(); + const activeJobs = await scrapeQueue.getActiveCount(); if (activeJobs > Number(process.env.ALERT_NUM_ACTIVE_JOBS)) { Logger.warn( `Alert: Number of active jobs is over ${process.env.ALERT_NUM_ACTIVE_JOBS}. Current active jobs: ${activeJobs}.` @@ -34,8 +34,8 @@ export async function checkAlerts() { }; const checkWaitingQueue = async () => { - const webScraperQueue = getWebScraperQueue(); - const waitingJobs = await webScraperQueue.getWaitingCount(); + const scrapeQueue = getScrapeQueue(); + const waitingJobs = await scrapeQueue.getWaitingCount(); if (waitingJobs > Number(process.env.ALERT_NUM_WAITING_JOBS)) { Logger.warn( @@ -49,7 +49,7 @@ export async function checkAlerts() { }; const checkAll = async () => { - await checkActiveJobs(); + // await checkActiveJobs(); await checkWaitingQueue(); }; diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts index 9369cdbb..2ad07318 100644 --- a/apps/api/src/services/billing/credit_billing.ts +++ b/apps/api/src/services/billing/credit_billing.ts @@ -3,9 +3,13 @@ import { withAuth } from "../../lib/withAuth"; import { sendNotification } from "../notification/email_notification"; import { supabase_service } from "../supabase"; import { Logger } from "../../lib/logger"; +import { getValue, setValue } from "../redis"; +import { redlock } from "../redlock"; + const FREE_CREDITS = 500; + export async function billTeam(team_id: string, credits: number) { return withAuth(supaBillTeam)(team_id, credits); } @@ -164,10 +168,11 @@ export async function supaBillTeam(team_id: string, credits: number) { export async function checkTeamCredits(team_id: string, credits: number) { return withAuth(supaCheckTeamCredits)(team_id, credits); } + // if team has enough credits for the operation, return true, else return false export async function supaCheckTeamCredits(team_id: string, credits: number) { if (team_id === "preview") { - return { success: true, message: "Preview team, no credits used" }; + return { success: true, message: "Preview team, no credits used", remainingCredits: Infinity }; } // Retrieve the team's active subscription and check for available coupons concurrently @@ -198,7 +203,7 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { if (subscriptionError || !subscription) { // If there is no active subscription but there are available coupons if (couponCredits >= credits) { - return { success: true, message: "Sufficient credits available" }; + return { success: true, message: "Sufficient credits available", remainingCredits: couponCredits }; } const { data: creditUsages, error: creditUsageError } = @@ -248,29 +253,48 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { return { success: false, message: "Insufficient credits, please upgrade!", + remainingCredits: FREE_CREDITS - totalCreditsUsed }; } - return { success: true, message: "Sufficient credits available" }; + return { success: true, message: "Sufficient credits available", remainingCredits: FREE_CREDITS - totalCreditsUsed }; } let totalCreditsUsed = 0; + const cacheKey = `credit_usage_${subscription.id}_${subscription.current_period_start}_${subscription.current_period_end}_lc`; + const redLockKey = `lock_${cacheKey}`; + const lockTTL = 10000; // 10 seconds + try { - const { data: creditUsages, error: creditUsageError } = - await supabase_service.rpc("get_credit_usage_2", { - sub_id: subscription.id, - start_time: subscription.current_period_start, - end_time: subscription.current_period_end, - }); + const lock = await redlock.acquire([redLockKey], lockTTL); - if (creditUsageError) { - Logger.error(`Error calculating credit usage: ${creditUsageError}`); - } + try { + const cachedCreditUsage = await getValue(cacheKey); - if (creditUsages && creditUsages.length > 0) { - totalCreditsUsed = creditUsages[0].total_credits_used; + if (cachedCreditUsage) { + totalCreditsUsed = parseInt(cachedCreditUsage); + } else { + const { data: creditUsages, error: creditUsageError } = + await supabase_service.rpc("get_credit_usage_2", { + sub_id: subscription.id, + start_time: subscription.current_period_start, + end_time: subscription.current_period_end, + }); + + if (creditUsageError) { + Logger.error(`Error calculating credit usage: ${creditUsageError}`); + } + + if (creditUsages && creditUsages.length > 0) { + totalCreditsUsed = creditUsages[0].total_credits_used; + await setValue(cacheKey, totalCreditsUsed.toString(), 1800); // Cache for 30 minutes + // Logger.info(`Cache set for credit usage: ${totalCreditsUsed}`); + } + } + } finally { + await lock.release(); } } catch (error) { - Logger.error(`Error calculating credit usage: ${error}`); + Logger.error(`Error acquiring lock or calculating credit usage: ${error}`); } // Adjust total credits used by subtracting coupon value @@ -299,7 +323,7 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { subscription.current_period_start, subscription.current_period_end ); - return { success: false, message: "Insufficient credits, please upgrade!" }; + return { success: false, message: "Insufficient credits, please upgrade!", remainingCredits: creditLimit - adjustedCreditsUsed }; } else if (creditUsagePercentage >= 0.8) { // Send email notification for approaching credit limit await sendNotification( @@ -310,7 +334,7 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { ); } - return { success: true, message: "Sufficient credits available" }; + return { success: true, message: "Sufficient credits available", remainingCredits: creditLimit - adjustedCreditsUsed }; } // Count the total credits used by a team within the current billing period and return the remaining credits. diff --git a/apps/api/src/services/logging/log_job.ts b/apps/api/src/services/logging/log_job.ts index 93d0b311..61983be0 100644 --- a/apps/api/src/services/logging/log_job.ts +++ b/apps/api/src/services/logging/log_job.ts @@ -40,10 +40,11 @@ export async function logJob(job: FirecrawlJob) { extractor_options: job.extractor_options, num_tokens: job.num_tokens, retry: !!job.retry, + crawl_id: job.crawl_id, }, ]); - if (process.env.POSTHOG_API_KEY) { + if (process.env.POSTHOG_API_KEY && !job.crawl_id) { let phLog = { distinctId: "from-api", //* To identify this on the group level, setting distinctid to a static string per posthog docs: https://posthog.com/docs/product-analytics/group-analytics#advanced-server-side-only-capturing-group-events-without-a-user ...(job.team_id !== "preview" && { diff --git a/apps/api/src/services/logging/scrape_log.ts b/apps/api/src/services/logging/scrape_log.ts index 208159da..099e4a0b 100644 --- a/apps/api/src/services/logging/scrape_log.ts +++ b/apps/api/src/services/logging/scrape_log.ts @@ -44,9 +44,9 @@ export async function logScrape( ]); if (error) { - Logger.error(`Error logging proxy:\n${error}`); + Logger.error(`Error logging proxy:\n${JSON.stringify(error)}`); } } catch (error) { - Logger.error(`Error logging proxy:\n${error}`); + Logger.error(`Error logging proxy:\n${JSON.stringify(error)}`); } } diff --git a/apps/api/src/services/queue-jobs.ts b/apps/api/src/services/queue-jobs.ts index 46c2fb22..3099da68 100644 --- a/apps/api/src/services/queue-jobs.ts +++ b/apps/api/src/services/queue-jobs.ts @@ -1,28 +1,15 @@ import { Job, Queue } from "bullmq"; -import { - getScrapeQueue, - getWebScraperQueue, -} from "./queue-service"; +import { getScrapeQueue } from "./queue-service"; import { v4 as uuidv4 } from "uuid"; import { WebScraperOptions } from "../types"; -export async function addWebScraperJob( - webScraperOptions: WebScraperOptions, - options: any = {}, - jobId: string = uuidv4(), -): Promise { - return await getWebScraperQueue().add(jobId, webScraperOptions, { - ...options, - jobId, - }); -} - export async function addScrapeJob( webScraperOptions: WebScraperOptions, options: any = {}, jobId: string = uuidv4(), ): Promise { return await getScrapeQueue().add(jobId, webScraperOptions, { + priority: webScraperOptions.crawl_id ? 20 : 10, ...options, jobId, }); diff --git a/apps/api/src/services/queue-service.ts b/apps/api/src/services/queue-service.ts index 348e3d7c..b13489a6 100644 --- a/apps/api/src/services/queue-service.ts +++ b/apps/api/src/services/queue-service.ts @@ -2,38 +2,13 @@ import { Queue } from "bullmq"; import { Logger } from "../lib/logger"; import IORedis from "ioredis"; -let webScraperQueue: Queue; let scrapeQueue: Queue; export const redisConnection = new IORedis(process.env.REDIS_URL, { maxRetriesPerRequest: null, }); -export const webScraperQueueName = "{crawlQueue}"; export const scrapeQueueName = "{scrapeQueue}"; -export function getWebScraperQueue() { - if (!webScraperQueue) { - webScraperQueue = new Queue( - webScraperQueueName, - { - connection: redisConnection, - } - // { - // settings: { - // lockDuration: 1 * 60 * 1000, // 1 minute in milliseconds, - // lockRenewTime: 15 * 1000, // 15 seconds in milliseconds - // stalledInterval: 30 * 1000, - // maxStalledCount: 10, - // }, - // defaultJobOptions:{ - // attempts: 5 - // } - // } - ); - Logger.info("Web scraper queue created"); - } - return webScraperQueue; -} export function getScrapeQueue() { if (!scrapeQueue) { @@ -62,5 +37,4 @@ export function getScrapeQueue() { import { QueueEvents } from 'bullmq'; -export const scrapeQueueEvents = new QueueEvents(scrapeQueueName, { connection: redisConnection }); -export const webScraperQueueEvents = new QueueEvents(webScraperQueueName, { connection: redisConnection }); \ No newline at end of file +export const scrapeQueueEvents = new QueueEvents(scrapeQueueName, { connection: redisConnection }); \ No newline at end of file diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts index e660378b..afd80f42 100644 --- a/apps/api/src/services/queue-worker.ts +++ b/apps/api/src/services/queue-worker.ts @@ -1,23 +1,24 @@ +import "dotenv/config"; import { CustomError } from "../lib/custom-error"; import { - getWebScraperQueue, getScrapeQueue, redisConnection, - webScraperQueueName, scrapeQueueName, } from "./queue-service"; -import "dotenv/config"; import { logtail } from "./logtail"; import { startWebScraperPipeline } from "../main/runWebScraper"; import { callWebhook } from "./webhook"; import { logJob } from "./logging/log_job"; import { initSDK } from "@hyperdx/node-opentelemetry"; -import { Job, QueueEvents, tryCatch } from "bullmq"; +import { Job } from "bullmq"; import { Logger } from "../lib/logger"; -import { ScrapeEvents } from "../lib/scrape-events"; import { Worker } from "bullmq"; import systemMonitor from "./system-monitor"; import { v4 as uuidv4 } from "uuid"; +import { addCrawlJob, addCrawlJobDone, crawlToCrawler, finishCrawl, getCrawl, getCrawlJobs, lockURL } from "../lib/crawl-redis"; +import { StoredCrawl } from "../lib/crawl-redis"; +import { addScrapeJob } from "./queue-jobs"; +import { supabaseGetJobById } from "../../src/lib/supabase-jobs"; if (process.env.ENV === "production") { initSDK({ @@ -33,30 +34,29 @@ const workerStalledCheckInterval = const jobLockExtendInterval = Number(process.env.JOB_LOCK_EXTEND_INTERVAL) || 15000; const jobLockExtensionTime = - Number(process.env.JOB_LOCK_EXTENSION_TIME) || 15000; + Number(process.env.JOB_LOCK_EXTENSION_TIME) || 60000; const cantAcceptConnectionInterval = Number(process.env.CANT_ACCEPT_CONNECTION_INTERVAL) || 2000; const connectionMonitorInterval = Number(process.env.CONNECTION_MONITOR_INTERVAL) || 10; const gotJobInterval = Number(process.env.CONNECTION_MONITOR_INTERVAL) || 20; -const wsq = getWebScraperQueue(); -const sq = getScrapeQueue(); const processJobInternal = async (token: string, job: Job) => { const extendLockInterval = setInterval(async () => { + Logger.info(`🐂 Worker extending lock on job ${job.id}`); await job.extendLock(token, jobLockExtensionTime); }, jobLockExtendInterval); try { const result = await processJob(job, token); - const jobState = await job.getState(); - if(jobState !== "completed" && jobState !== "failed"){ - try{ - await job.moveToCompleted(result.docs, token, false); //3rd arg fetchNext - }catch(e){ - // console.log("Job already completed, error:", e); + try{ + if (job.data.crawl_id && process.env.USE_DB_AUTHENTICATION === "true") { + await job.moveToCompleted(null, token, false); + } else { + await job.moveToCompleted(result.docs, token, false); } + }catch(e){ } } catch (error) { console.log("Job failed, error:", error); @@ -110,11 +110,10 @@ const workerFun = async (queueName: string, processJobInternal: (token: string, } }; -workerFun(webScraperQueueName, processJobInternal); workerFun(scrapeQueueName, processJobInternal); async function processJob(job: Job, token: string) { - Logger.debug(`🐂 Worker taking job ${job.id}`); + Logger.info(`🐂 Worker taking job ${job.id}`); try { job.updateProgress({ @@ -131,18 +130,16 @@ async function processJob(job: Job, token: string) { const end = Date.now(); const timeTakenInSeconds = (end - start) / 1000; - const isCancelled = await (await getWebScraperQueue().client).exists("cancelled:" + job.id); + const rawHtml = docs[0].rawHtml; - if (isCancelled) { - await job.discard(); - await job.moveToFailed(Error("Job cancelled by user"), job.token); - await job.discard(); + if (job.data.crawl_id && (!job.data.pageOptions || !job.data.pageOptions.includeRawHtml)) { + delete docs[0].rawHtml; } const data = { success, result: { - links: isCancelled ? [] : docs.map((doc) => { + links: docs.map((doc) => { return { content: doc, source: doc?.metadata?.sourceURL ?? doc?.url ?? "", @@ -150,36 +147,132 @@ async function processJob(job: Job, token: string) { }), }, project_id: job.data.project_id, - error: isCancelled ? "Job cancelled by user" : message /* etc... */, - docs: isCancelled ? [] : docs, + error: message /* etc... */, + docs, }; - if (job.data.mode === "crawl" && !isCancelled) { - await callWebhook(job.data.team_id, job.id as string, data); + if (job.data.mode === "crawl") { + await callWebhook(job.data.team_id, job.id as string, data, job.data.webhook); } - await logJob({ - job_id: job.id as string, - success: success && !isCancelled, - message: isCancelled ? "Job cancelled by user" : message, - num_docs: isCancelled ? 0 : docs.length, - docs: isCancelled ? [] : docs, - time_taken: timeTakenInSeconds, - team_id: job.data.team_id, - mode: job.data.mode, - url: job.data.url, - crawlerOptions: job.data.crawlerOptions, - pageOptions: job.data.pageOptions, - origin: job.data.origin, - }); - Logger.debug(`🐂 Job done ${job.id}`); + if (job.data.crawl_id) { + await logJob({ + job_id: job.id as string, + success: success, + message: message, + num_docs: docs.length, + docs: docs, + time_taken: timeTakenInSeconds, + team_id: job.data.team_id, + mode: job.data.mode, + url: job.data.url, + crawlerOptions: job.data.crawlerOptions, + pageOptions: job.data.pageOptions, + origin: job.data.origin, + crawl_id: job.data.crawl_id, + }); + + await addCrawlJobDone(job.data.crawl_id, job.id); + + const sc = await getCrawl(job.data.crawl_id) as StoredCrawl; + + if (!job.data.sitemapped) { + if (!sc.cancelled) { + const crawler = crawlToCrawler(job.data.crawl_id, sc); + + const links = crawler.filterLinks( + crawler.extractLinksFromHTML(rawHtml ?? "", sc.originUrl), + Infinity, + sc.crawlerOptions?.maxDepth ?? 10 + ) + + for (const link of links) { + if (await lockURL(job.data.crawl_id, sc, link)) { + const newJob = await addScrapeJob({ + url: link, + mode: "single_urls", + crawlerOptions: sc.crawlerOptions, + team_id: sc.team_id, + pageOptions: sc.pageOptions, + origin: job.data.origin, + crawl_id: job.data.crawl_id, + }); + + await addCrawlJob(job.data.crawl_id, newJob.id); + } + } + } + } + + if (await finishCrawl(job.data.crawl_id)) { + const jobIDs = await getCrawlJobs(job.data.crawl_id); + + const jobs = (await Promise.all(jobIDs.map(async x => { + if (x === job.id) { + return { + async getState() { + return "completed" + }, + timestamp: Date.now(), + returnvalue: docs, + } + } + + const j = await getScrapeQueue().getJob(x); + + if (process.env.USE_DB_AUTHENTICATION === "true") { + const supabaseData = await supabaseGetJobById(j.id); + + if (supabaseData) { + j.returnvalue = supabaseData.docs; + } + } + + return j; + }))).sort((a, b) => a.timestamp - b.timestamp); + const jobStatuses = await Promise.all(jobs.map(x => x.getState())); + const jobStatus = sc.cancelled || jobStatuses.some(x => x === "failed") ? "failed" : "completed"; + + const fullDocs = jobs.map(x => Array.isArray(x.returnvalue) ? x.returnvalue[0] : x.returnvalue); + + await logJob({ + job_id: job.data.crawl_id, + success: jobStatus === "completed", + message: sc.cancelled ? "Cancelled" : message, + num_docs: fullDocs.length, + docs: [], + time_taken: (Date.now() - sc.createdAt) / 1000, + team_id: job.data.team_id, + mode: "crawl", + url: sc.originUrl, + crawlerOptions: sc.crawlerOptions, + pageOptions: sc.pageOptions, + origin: job.data.origin, + }); + + const data = { + success: jobStatus !== "failed", + result: { + links: fullDocs.map((doc) => { + return { + content: doc, + source: doc?.metadata?.sourceURL ?? doc?.url ?? "", + }; + }), + }, + project_id: job.data.project_id, + error: message /* etc... */, + docs: fullDocs, + }; + + await callWebhook(job.data.team_id, job.data.crawl_id, data); + } + } + + Logger.info(`🐂 Job done ${job.id}`); return data; } catch (error) { Logger.error(`🐂 Job errored ${job.id} - ${error}`); - if (await getWebScraperQueue().isPaused()) { - Logger.debug("🐂Queue is paused, ignoring"); - return; - } if (error instanceof CustomError) { // Here we handle the error, then save the failed job @@ -192,6 +285,9 @@ async function processJob(job: Job, token: string) { }); } Logger.error(error); + if (error.stack) { + Logger.error(error.stack); + } logtail.error("Overall error ingesting", { job_id: job.id, @@ -205,26 +301,51 @@ async function processJob(job: Job, token: string) { error: "Something went wrong... Contact help@mendable.ai or try again." /* etc... */, }; - if (job.data.mode === "crawl") { - await callWebhook(job.data.team_id, job.id as string, data); + + if (job.data.mode === "crawl" || job.data.crawl_id) { + await callWebhook(job.data.team_id, job.data.crawl_id ?? job.id as string, data); + } + + if (job.data.crawl_id) { + await logJob({ + job_id: job.id as string, + success: false, + message: + typeof error === "string" + ? error + : error.message ?? "Something went wrong... Contact help@mendable.ai", + num_docs: 0, + docs: [], + time_taken: 0, + team_id: job.data.team_id, + mode: job.data.mode, + url: job.data.url, + crawlerOptions: job.data.crawlerOptions, + pageOptions: job.data.pageOptions, + origin: job.data.origin, + crawl_id: job.data.crawl_id, + }); + + const sc = await getCrawl(job.data.crawl_id); + + await logJob({ + job_id: job.data.crawl_id, + success: false, + message: + typeof error === "string" + ? error + : error.message ?? "Something went wrong... Contact help@mendable.ai", + num_docs: 0, + docs: [], + time_taken: 0, + team_id: job.data.team_id, + mode: "crawl", + url: sc ? sc.originUrl : job.data.url, + crawlerOptions: sc ? sc.crawlerOptions : job.data.crawlerOptions, + pageOptions: sc ? sc.pageOptions : job.data.pageOptions, + origin: job.data.origin, + }); } - await logJob({ - job_id: job.id as string, - success: false, - message: - typeof error === "string" - ? error - : error.message ?? "Something went wrong... Contact help@mendable.ai", - num_docs: 0, - docs: [], - time_taken: 0, - team_id: job.data.team_id, - mode: "crawl", - url: job.data.url, - crawlerOptions: job.data.crawlerOptions, - pageOptions: job.data.pageOptions, - origin: job.data.origin, - }); // done(null, data); return data; } diff --git a/apps/api/src/services/rate-limiter.ts b/apps/api/src/services/rate-limiter.ts index d90ab4f7..2682d0a2 100644 --- a/apps/api/src/services/rate-limiter.ts +++ b/apps/api/src/services/rate-limiter.ts @@ -14,18 +14,20 @@ const RATE_LIMITS = { standardNew: 10, standardnew: 10, growth: 50, + growthdouble: 50, }, scrape: { default: 20, free: 5, starter: 20, - standard: 50, + standard: 100, standardOld: 40, scale: 500, hobby: 10, - standardNew: 50, - standardnew: 50, - growth: 500, + standardNew: 100, + standardnew: 100, + growth: 1000, + growthdouble: 1000, }, search: { default: 20, @@ -38,6 +40,20 @@ const RATE_LIMITS = { standardNew: 50, standardnew: 50, growth: 500, + growthdouble: 500, + }, + map:{ + default: 20, + free: 5, + starter: 20, + standard: 40, + standardOld: 40, + scale: 500, + hobby: 10, + standardNew: 50, + standardnew: 50, + growth: 500, + growthdouble: 500, }, preview: { free: 5, diff --git a/apps/api/src/services/redlock.ts b/apps/api/src/services/redlock.ts new file mode 100644 index 00000000..9cbfc1fc --- /dev/null +++ b/apps/api/src/services/redlock.ts @@ -0,0 +1,29 @@ +import Redlock from "redlock"; +import Client from "ioredis"; + +export const redlock = new Redlock( + // You should have one client for each independent redis node + // or cluster. + [new Client(process.env.REDIS_RATE_LIMIT_URL)], + { + // The expected clock drift; for more details see: + // http://redis.io/topics/distlock + driftFactor: 0.01, // multiplied by lock ttl to determine drift time + + // The max number of times Redlock will attempt to lock a resource + // before erroring. + retryCount: 5, + + // the time in ms between attempts + retryDelay: 100, // time in ms + + // the max time in ms randomly added to retries + // to improve performance under high contention + // see https://www.awsarchitectureblog.com/2015/03/backoff.html + retryJitter: 200, // time in ms + + // The minimum remaining time on a lock before an extension is automatically + // attempted with the `using` API. + automaticExtensionThreshold: 500, // time in ms + } +); diff --git a/apps/api/src/services/supabase.ts b/apps/api/src/services/supabase.ts index d34f7b52..70ada12b 100644 --- a/apps/api/src/services/supabase.ts +++ b/apps/api/src/services/supabase.ts @@ -36,17 +36,9 @@ export const supabase_service: SupabaseClient = new Proxy( new SupabaseService(), { get: function (target, prop, receiver) { - if (process.env.USE_DB_AUTHENTICATION === "false") { - Logger.debug( - "Attempted to access Supabase client when it's not configured." - ); - } const client = target.getClient(); // If the Supabase client is not initialized, intercept property access to provide meaningful error feedback. if (client === null) { - Logger.error( - "Attempted to access Supabase client when it's not configured." - ); return () => { throw new Error("Supabase client is not configured."); }; diff --git a/apps/api/src/services/webhook.ts b/apps/api/src/services/webhook.ts index b0222ea3..2b008667 100644 --- a/apps/api/src/services/webhook.ts +++ b/apps/api/src/services/webhook.ts @@ -1,15 +1,15 @@ import { Logger } from "../../src/lib/logger"; import { supabase_service } from "./supabase"; -export const callWebhook = async (teamId: string, jobId: string,data: any) => { +export const callWebhook = async (teamId: string, jobId: string, data: any, specified?: string) => { try { const selfHostedUrl = process.env.SELF_HOSTED_WEBHOOK_URL?.replace("{{JOB_ID}}", jobId); const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; - let webhookUrl = selfHostedUrl; + let webhookUrl = specified ?? selfHostedUrl; - // Only fetch the webhook URL from the database if the self-hosted webhook URL is not set + // Only fetch the webhook URL from the database if the self-hosted webhook URL and specified webhook are not set // and the USE_DB_AUTHENTICATION environment variable is set to true - if (!selfHostedUrl && useDbAuthentication) { + if (!webhookUrl && useDbAuthentication) { const { data: webhooksData, error } = await supabase_service .from("webhooks") .select("url") diff --git a/apps/api/src/types.ts b/apps/api/src/types.ts index 3b28b765..70a8ab07 100644 --- a/apps/api/src/types.ts +++ b/apps/api/src/types.ts @@ -28,6 +28,9 @@ export interface WebScraperOptions { extractorOptions?: any; team_id: string; origin?: string; + crawl_id?: string; + sitemapped?: boolean; + webhook?: string; } export interface RunWebScraperParams { @@ -41,6 +44,7 @@ export interface RunWebScraperParams { onError: (error: Error) => void; team_id: string; bull_job_id: string; + priority?: number; } export interface RunWebScraperResult { @@ -65,6 +69,7 @@ export interface FirecrawlJob { extractor_options?: ExtractorOptions, num_tokens?: number, retry?: boolean, + crawl_id?: string; } export interface FirecrawlScrapeResponse { @@ -101,6 +106,7 @@ export enum RateLimiterMode { Scrape = "scrape", Preview = "preview", Search = "search", + Map = "map", } @@ -110,6 +116,7 @@ export interface AuthResponse { error?: string; status?: number; plan?: string; + api_key?: string; } diff --git a/apps/api/tsconfig.json b/apps/api/tsconfig.json index 84007570..a9bba04c 100644 --- a/apps/api/tsconfig.json +++ b/apps/api/tsconfig.json @@ -8,10 +8,6 @@ "sourceMap": true, "outDir": "./dist/src", "moduleResolution": "node", - "baseUrl": ".", - "paths": { - "*": ["node_modules/*", "src/types/*"], - } }, "include": ["src/","src/**/*", "services/db/supabase.ts", "utils/utils.ts", "services/db/supabaseEmbeddings.ts", "utils/EventEmmitter.ts", "src/services/queue-service.ts"] } diff --git a/apps/go-sdk/examples/.gitignore b/apps/go-sdk/examples/.gitignore new file mode 100644 index 00000000..6f72f892 --- /dev/null +++ b/apps/go-sdk/examples/.gitignore @@ -0,0 +1,25 @@ +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work +go.work.sum + +# env file +.env diff --git a/apps/go-sdk/examples/LICENSE b/apps/go-sdk/examples/LICENSE new file mode 100644 index 00000000..25800a2e --- /dev/null +++ b/apps/go-sdk/examples/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Mendable + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/apps/go-sdk/examples/example.go b/apps/go-sdk/examples/example.go new file mode 100644 index 00000000..48ba49da --- /dev/null +++ b/apps/go-sdk/examples/example.go @@ -0,0 +1,87 @@ +package main + +import ( + "encoding/json" + "fmt" + "log" + + "github.com/google/uuid" + "github.com/mendableai/firecrawl-go" +) + +func main() { + app, err := firecrawl.NewFirecrawlApp("fc-YOUR_API_KEY", "https://api.firecrawl.dev") + if err != nil { + log.Fatalf("Failed to create FirecrawlApp: %v", err) + } + + // Scrape a website + scrapeResult, err := app.ScrapeURL("firecrawl.dev", nil) + if err != nil { + log.Fatalf("Failed to scrape URL: %v", err) + } + fmt.Println(scrapeResult.Markdown) + + // Crawl a website + idempotencyKey := uuid.New().String() // optional idempotency key + crawlParams := map[string]any{ + "crawlerOptions": map[string]any{ + "excludes": []string{"blog/*"}, + }, + } + crawlResult, err := app.CrawlURL("mendable.ai", crawlParams, true, 2, idempotencyKey) + if err != nil { + log.Fatalf("Failed to crawl URL: %v", err) + } + jsonCrawlResult, err := json.MarshalIndent(crawlResult, "", " ") + if err != nil { + log.Fatalf("Failed to marshal crawl result: %v", err) + } + fmt.Println(string(jsonCrawlResult)) + + // LLM Extraction using JSON schema + jsonSchema := map[string]any{ + "type": "object", + "properties": map[string]any{ + "top": map[string]any{ + "type": "array", + "items": map[string]any{ + "type": "object", + "properties": map[string]any{ + "title": map[string]string{"type": "string"}, + "points": map[string]string{"type": "number"}, + "by": map[string]string{"type": "string"}, + "commentsURL": map[string]string{"type": "string"}, + }, + "required": []string{"title", "points", "by", "commentsURL"}, + }, + "minItems": 5, + "maxItems": 5, + "description": "Top 5 stories on Hacker News", + }, + }, + "required": []string{"top"}, + } + + llmExtractionParams := map[string]any{ + "extractorOptions": firecrawl.ExtractorOptions{ + ExtractionSchema: jsonSchema, + Mode: "llm-extraction", + }, + "pageOptions": map[string]any{ + "onlyMainContent": true, + }, + } + + llmExtractionResult, err := app.ScrapeURL("https://news.ycombinator.com", llmExtractionParams) + if err != nil { + log.Fatalf("Failed to perform LLM extraction: %v", err) + } + + // Pretty print the LLM extraction result + jsonResult, err := json.MarshalIndent(llmExtractionResult.LLMExtraction, "", " ") + if err != nil { + log.Fatalf("Failed to marshal LLM extraction result: %v", err) + } + fmt.Println(string(jsonResult)) +} diff --git a/apps/go-sdk/examples/go.mod b/apps/go-sdk/examples/go.mod new file mode 100644 index 00000000..3ea9b92f --- /dev/null +++ b/apps/go-sdk/examples/go.mod @@ -0,0 +1,9 @@ +module github.com/mendableai/firecrawl-go-examples + +go 1.22.5 + +replace github.com/mendableai/firecrawl => ../ + +require github.com/google/uuid v1.6.0 + +require github.com/mendableai/firecrawl-go v0.0.0-20240813205613-366e8d8dcf46 // indirect diff --git a/apps/go-sdk/examples/go.sum b/apps/go-sdk/examples/go.sum new file mode 100644 index 00000000..760ca553 --- /dev/null +++ b/apps/go-sdk/examples/go.sum @@ -0,0 +1,14 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/mendableai/firecrawl-go v0.0.0-20240813205613-366e8d8dcf46 h1:461um7fbSQYj2E3ETl8GINuRg5MTY3BdjMnogwUIhBs= +github.com/mendableai/firecrawl-go v0.0.0-20240813205613-366e8d8dcf46/go.mod h1:mTGbJ37fy43aaqonp/tdpzCH516jHFw/XVvfFi4QXHo= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/apps/go-sdk/firecrawl/.env.example b/apps/go-sdk/firecrawl/.env.example new file mode 100644 index 00000000..772a6243 --- /dev/null +++ b/apps/go-sdk/firecrawl/.env.example @@ -0,0 +1,2 @@ +API_URL=http://localhost:3002 +TEST_API_KEY=fc-YOUR-API-KEY diff --git a/apps/go-sdk/firecrawl/.gitignore b/apps/go-sdk/firecrawl/.gitignore new file mode 100644 index 00000000..db27dc80 --- /dev/null +++ b/apps/go-sdk/firecrawl/.gitignore @@ -0,0 +1,2 @@ +.env +vendor \ No newline at end of file diff --git a/apps/go-sdk/firecrawl/LICENSE b/apps/go-sdk/firecrawl/LICENSE new file mode 100644 index 00000000..2635155f --- /dev/null +++ b/apps/go-sdk/firecrawl/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Sideguide Technologies Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/apps/go-sdk/firecrawl/README.md b/apps/go-sdk/firecrawl/README.md new file mode 100644 index 00000000..7e17c10f --- /dev/null +++ b/apps/go-sdk/firecrawl/README.md @@ -0,0 +1,189 @@ +# Firecrawl Go SDK + +The Firecrawl Go SDK is a library that allows you to easily scrape and crawl websites, and output the data in a format ready for use with language models (LLMs). It provides a simple and intuitive interface for interacting with the Firecrawl API. + +## Installation + +To install the Firecrawl Go SDK, you can + +```bash +go get github.com/mendableai/firecrawl +``` + +## Usage + +1. Get an API key from [firecrawl.dev](https://firecrawl.dev) +2. Set the API key as an environment variable named `FIRECRAWL_API_KEY` or pass it as a parameter to the `FirecrawlApp` class. + + +Here's an example of how to use the SDK with error handling: + +```go +import ( + "fmt" + "log" + + "github.com/mendableai/firecrawl/firecrawl" +) + +func main() { + // Initialize the FirecrawlApp with your API key + app, err := firecrawl.NewFirecrawlApp("YOUR_API_KEY") + if err != nil { + log.Fatalf("Failed to initialize FirecrawlApp: %v", err) + } + + // Scrape a single URL + url := "https://mendable.ai" + scrapedData, err := app.ScrapeURL(url, nil) + if err != nil { + log.Fatalf("Error occurred while scraping: %v", err) + } + fmt.Println(scrapedData) + + // Crawl a website + crawlUrl := "https://mendable.ai" + params := map[string]any{ + "pageOptions": map[string]any{ + "onlyMainContent": true, + }, + } + + crawlResult, err := app.CrawlURL(crawlUrl, params) + if err != nil { + log.Fatalf("Error occurred while crawling: %v", err) + } + fmt.Println(crawlResult) +} +``` + +### Scraping a URL + +To scrape a single URL with error handling, use the `ScrapeURL` method. It takes the URL as a parameter and returns the scraped data as a dictionary. + +```go +url := "https://mendable.ai" +scrapedData, err := app.ScrapeURL(url, nil) +if err != nil { + log.Fatalf("Failed to scrape URL: %v", err) +} +fmt.Println(scrapedData) +``` + +### Extracting structured data from a URL + +With LLM extraction, you can easily extract structured data from any URL. Here is how you to use it: + +```go +jsonSchema := map[string]any{ + "type": "object", + "properties": map[string]any{ + "top": map[string]any{ + "type": "array", + "items": map[string]any{ + "type": "object", + "properties": map[string]any{ + "title": map[string]string{"type": "string"}, + "points": map[string]string{"type": "number"}, + "by": map[string]string{"type": "string"}, + "commentsURL": map[string]string{"type": "string"}, + }, + "required": []string{"title", "points", "by", "commentsURL"}, + }, + "minItems": 5, + "maxItems": 5, + "description": "Top 5 stories on Hacker News", + }, + }, + "required": []string{"top"}, +} + +llmExtractionParams := map[string]any{ + "extractorOptions": firecrawl.ExtractorOptions{ + ExtractionSchema: jsonSchema, + }, +} + +scrapeResult, err := app.ScrapeURL("https://news.ycombinator.com", llmExtractionParams) +if err != nil { + log.Fatalf("Failed to perform LLM extraction: %v", err) +} +fmt.Println(scrapeResult) +``` + +### Search for a query + +To search the web, get the most relevant results, scrap each page and return the markdown, use the `Search` method. The method takes the query as a parameter and returns the search results. + + +```go +query := "what is mendable?" +searchResult, err := app.Search(query) +if err != nil { + log.Fatalf("Failed to search: %v", err) +} +fmt.Println(searchResult) +``` + +### Crawling a Website + +To crawl a website, use the `CrawlUrl` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. + +```go +crawlParams := map[string]any{ + "crawlerOptions": map[string]any{ + "excludes": []string{"blog/*"}, + "includes": []string{}, // leave empty for all pages + "limit": 1000, + }, + "pageOptions": map[string]any{ + "onlyMainContent": true, + }, +} +crawlResult, err := app.CrawlURL("mendable.ai", crawlParams, true, 2, idempotencyKey) +if err != nil { + log.Fatalf("Failed to crawl URL: %v", err) +} +fmt.Println(crawlResult) +``` + +### Checking Crawl Status + +To check the status of a crawl job, use the `CheckCrawlStatus` method. It takes the job ID as a parameter and returns the current status of the crawl job. + +```go +status, err := app.CheckCrawlStatus(jobId) +if err != nil { + log.Fatalf("Failed to check crawl status: %v", err) +} +fmt.Println(status) +``` + +### Canceling a Crawl Job +To cancel a crawl job, use the `CancelCrawlJob` method. It takes the job ID as a parameter and returns the cancellation status of the crawl job. + +```go +canceled, err := app.CancelCrawlJob(jobId) +if err != nil { + log.Fatalf("Failed to cancel crawl job: %v", err) +} +fmt.Println(canceled) +``` + +## Error Handling + +The SDK handles errors returned by the Firecrawl API and raises appropriate exceptions. If an error occurs during a request, an exception will be raised with a descriptive error message. + +## Contributing + +Contributions to the Firecrawl Go SDK are welcome! If you find any issues or have suggestions for improvements, please open an issue or submit a pull request on the GitHub repository. + +## License + +The Firecrawl Go SDK is licensed under the MIT License. This means you are free to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the SDK, subject to the following conditions: + +- The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Please note that while this SDK is MIT licensed, it is part of a larger project which may be under different licensing terms. Always refer to the license information in the root directory of the main project for overall licensing details. diff --git a/apps/go-sdk/firecrawl/firecrawl.go b/apps/go-sdk/firecrawl/firecrawl.go new file mode 100644 index 00000000..9a9dcfef --- /dev/null +++ b/apps/go-sdk/firecrawl/firecrawl.go @@ -0,0 +1,584 @@ +// Package firecrawl provides a client for interacting with the Firecrawl API. +package firecrawl + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "os" + "time" +) + +// FirecrawlDocumentMetadata represents metadata for a Firecrawl document +type FirecrawlDocumentMetadata struct { + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Language string `json:"language,omitempty"` + Keywords string `json:"keywords,omitempty"` + Robots string `json:"robots,omitempty"` + OGTitle string `json:"ogTitle,omitempty"` + OGDescription string `json:"ogDescription,omitempty"` + OGURL string `json:"ogUrl,omitempty"` + OGImage string `json:"ogImage,omitempty"` + OGAudio string `json:"ogAudio,omitempty"` + OGDeterminer string `json:"ogDeterminer,omitempty"` + OGLocale string `json:"ogLocale,omitempty"` + OGLocaleAlternate []string `json:"ogLocaleAlternate,omitempty"` + OGSiteName string `json:"ogSiteName,omitempty"` + OGVideo string `json:"ogVideo,omitempty"` + DCTermsCreated string `json:"dctermsCreated,omitempty"` + DCDateCreated string `json:"dcDateCreated,omitempty"` + DCDate string `json:"dcDate,omitempty"` + DCTermsType string `json:"dctermsType,omitempty"` + DCType string `json:"dcType,omitempty"` + DCTermsAudience string `json:"dctermsAudience,omitempty"` + DCTermsSubject string `json:"dctermsSubject,omitempty"` + DCSubject string `json:"dcSubject,omitempty"` + DCDescription string `json:"dcDescription,omitempty"` + DCTermsKeywords string `json:"dctermsKeywords,omitempty"` + ModifiedTime string `json:"modifiedTime,omitempty"` + PublishedTime string `json:"publishedTime,omitempty"` + ArticleTag string `json:"articleTag,omitempty"` + ArticleSection string `json:"articleSection,omitempty"` + SourceURL string `json:"sourceURL,omitempty"` + PageStatusCode int `json:"pageStatusCode,omitempty"` + PageError string `json:"pageError,omitempty"` +} + +// FirecrawlDocument represents a document in Firecrawl +type FirecrawlDocument struct { + ID string `json:"id,omitempty"` + URL string `json:"url,omitempty"` + Content string `json:"content"` + Markdown string `json:"markdown,omitempty"` + HTML string `json:"html,omitempty"` + LLMExtraction map[string]any `json:"llm_extraction,omitempty"` + CreatedAt *time.Time `json:"createdAt,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` + Type string `json:"type,omitempty"` + Metadata *FirecrawlDocumentMetadata `json:"metadata,omitempty"` + ChildrenLinks []string `json:"childrenLinks,omitempty"` + Provider string `json:"provider,omitempty"` + Warning string `json:"warning,omitempty"` + Index int `json:"index,omitempty"` +} + +// ExtractorOptions represents options for extraction. +type ExtractorOptions struct { + Mode string `json:"mode,omitempty"` + ExtractionPrompt string `json:"extractionPrompt,omitempty"` + ExtractionSchema any `json:"extractionSchema,omitempty"` +} + +// ScrapeResponse represents the response for scraping operations +type ScrapeResponse struct { + Success bool `json:"success"` + Data *FirecrawlDocument `json:"data,omitempty"` +} + +// SearchResponse represents the response for searching operations +type SearchResponse struct { + Success bool `json:"success"` + Data []*FirecrawlDocument `json:"data,omitempty"` +} + +// CrawlResponse represents the response for crawling operations +type CrawlResponse struct { + Success bool `json:"success"` + JobID string `json:"jobId,omitempty"` + Data []*FirecrawlDocument `json:"data,omitempty"` +} + +// JobStatusResponse represents the response for checking crawl job status +type JobStatusResponse struct { + Success bool `json:"success"` + Status string `json:"status"` + Current int `json:"current,omitempty"` + CurrentURL string `json:"current_url,omitempty"` + CurrentStep string `json:"current_step,omitempty"` + Total int `json:"total,omitempty"` + JobID string `json:"jobId,omitempty"` + Data []*FirecrawlDocument `json:"data,omitempty"` + PartialData []*FirecrawlDocument `json:"partial_data,omitempty"` +} + +// CancelCrawlJobResponse represents the response for canceling a crawl job +type CancelCrawlJobResponse struct { + Success bool `json:"success"` + Status string `json:"status"` +} + +// requestOptions represents options for making requests. +type requestOptions struct { + retries int + backoff int +} + +// requestOption is a functional option type for requestOptions. +type requestOption func(*requestOptions) + +// newRequestOptions creates a new requestOptions instance with the provided options. +// +// Parameters: +// - opts: Optional request options. +// +// Returns: +// - *requestOptions: A new instance of requestOptions with the provided options. +func newRequestOptions(opts ...requestOption) *requestOptions { + options := &requestOptions{retries: 1} + for _, opt := range opts { + opt(options) + } + return options +} + +// withRetries sets the number of retries for a request. +// +// Parameters: +// - retries: The number of retries to be performed. +// +// Returns: +// - requestOption: A functional option that sets the number of retries for a request. +func withRetries(retries int) requestOption { + return func(opts *requestOptions) { + opts.retries = retries + } +} + +// withBackoff sets the backoff interval for a request. +// +// Parameters: +// - backoff: The backoff interval (in milliseconds) to be used for retries. +// +// Returns: +// - requestOption: A functional option that sets the backoff interval for a request. +func withBackoff(backoff int) requestOption { + return func(opts *requestOptions) { + opts.backoff = backoff + } +} + +// FirecrawlApp represents a client for the Firecrawl API. +type FirecrawlApp struct { + APIKey string + APIURL string + Client *http.Client +} + +// NewFirecrawlApp creates a new instance of FirecrawlApp with the provided API key and API URL. +// If the API key or API URL is not provided, it attempts to retrieve them from environment variables. +// If the API key is still not found, it returns an error. +// +// Parameters: +// - apiKey: The API key for authenticating with the Firecrawl API. If empty, it will be retrieved from the FIRECRAWL_API_KEY environment variable. +// - apiURL: The base URL for the Firecrawl API. If empty, it will be retrieved from the FIRECRAWL_API_URL environment variable, defaulting to "https://api.firecrawl.dev". +// +// Returns: +// - *FirecrawlApp: A new instance of FirecrawlApp configured with the provided or retrieved API key and API URL. +// - error: An error if the API key is not provided or retrieved. +func NewFirecrawlApp(apiKey, apiURL string) (*FirecrawlApp, error) { + if apiKey == "" { + apiKey = os.Getenv("FIRECRAWL_API_KEY") + if apiKey == "" { + return nil, fmt.Errorf("no API key provided") + } + } + + if apiURL == "" { + apiURL = os.Getenv("FIRECRAWL_API_URL") + if apiURL == "" { + apiURL = "https://api.firecrawl.dev" + } + } + + client := &http.Client{ + Timeout: 60 * time.Second, + } + + return &FirecrawlApp{ + APIKey: apiKey, + APIURL: apiURL, + Client: client, + }, nil +} + +// ScrapeURL scrapes the content of the specified URL using the Firecrawl API. +// +// Parameters: +// - url: The URL to be scraped. +// - params: Optional parameters for the scrape request, including extractor options for LLM extraction. +// +// Returns: +// - *FirecrawlDocument: The scraped document data. +// - error: An error if the scrape request fails. +func (app *FirecrawlApp) ScrapeURL(url string, params map[string]any) (*FirecrawlDocument, error) { + headers := app.prepareHeaders("") + scrapeBody := map[string]any{"url": url} + + if params != nil { + if extractorOptions, ok := params["extractorOptions"].(ExtractorOptions); ok { + if schema, ok := extractorOptions.ExtractionSchema.(interface{ schema() any }); ok { + extractorOptions.ExtractionSchema = schema.schema() + } + if extractorOptions.Mode == "" { + extractorOptions.Mode = "llm-extraction" + } + scrapeBody["extractorOptions"] = extractorOptions + } + + for key, value := range params { + if key != "extractorOptions" { + scrapeBody[key] = value + } + } + } + + resp, err := app.makeRequest( + http.MethodPost, + fmt.Sprintf("%s/v0/scrape", app.APIURL), + scrapeBody, + headers, + "scrape URL", + ) + if err != nil { + return nil, err + } + + var scrapeResponse ScrapeResponse + err = json.Unmarshal(resp, &scrapeResponse) + if err != nil { + return nil, err + } + + if scrapeResponse.Success { + return scrapeResponse.Data, nil + } + + return nil, fmt.Errorf("failed to scrape URL") +} + +// Search performs a search query using the Firecrawl API and returns the search results. +// +// Parameters: +// - query: The search query string. +// - params: Optional parameters for the search request. +// +// Returns: +// - []*FirecrawlDocument: A slice of FirecrawlDocument containing the search results. +// - error: An error if the search request fails. +func (app *FirecrawlApp) Search(query string, params map[string]any) ([]*FirecrawlDocument, error) { + headers := app.prepareHeaders("") + searchBody := map[string]any{"query": query} + for k, v := range params { + searchBody[k] = v + } + + resp, err := app.makeRequest( + http.MethodPost, + fmt.Sprintf("%s/v0/search", app.APIURL), + searchBody, + headers, + "search", + ) + if err != nil { + return nil, err + } + + var searchResponse SearchResponse + err = json.Unmarshal(resp, &searchResponse) + if err != nil { + return nil, err + } + + if searchResponse.Success { + return searchResponse.Data, nil + } + + return nil, fmt.Errorf("failed to search") +} + +// CrawlURL starts a crawl job for the specified URL using the Firecrawl API. +// +// Parameters: +// - url: The URL to crawl. +// - params: Optional parameters for the crawl request. +// - waitUntilDone: If true, the method will wait until the crawl job is completed before returning. +// - pollInterval: The interval (in seconds) at which to poll the job status if waitUntilDone is true. +// - idempotencyKey: An optional idempotency key to ensure the request is idempotent. +// +// Returns: +// - any: The job ID if waitUntilDone is false, or the crawl result if waitUntilDone is true. +// - error: An error if the crawl request fails. +func (app *FirecrawlApp) CrawlURL(url string, params map[string]any, waitUntilDone bool, pollInterval int, idempotencyKey string) (any, error) { + headers := app.prepareHeaders(idempotencyKey) + crawlBody := map[string]any{"url": url} + for k, v := range params { + crawlBody[k] = v + } + + resp, err := app.makeRequest( + http.MethodPost, + fmt.Sprintf("%s/v0/crawl", app.APIURL), + crawlBody, + headers, + "start crawl job", + withRetries(3), + withBackoff(500), + ) + if err != nil { + return nil, err + } + + var crawlResponse CrawlResponse + err = json.Unmarshal(resp, &crawlResponse) + if err != nil { + return nil, err + } + + if waitUntilDone { + return app.monitorJobStatus(crawlResponse.JobID, headers, pollInterval) + } + + if crawlResponse.JobID == "" { + return nil, fmt.Errorf("failed to get job ID") + } + + return crawlResponse.JobID, nil +} + +// CheckCrawlStatus checks the status of a crawl job using the Firecrawl API. +// +// Parameters: +// - jobID: The ID of the crawl job to check. +// +// Returns: +// - *JobStatusResponse: The status of the crawl job. +// - error: An error if the crawl status check request fails. +func (app *FirecrawlApp) CheckCrawlStatus(jobID string) (*JobStatusResponse, error) { + headers := app.prepareHeaders("") + resp, err := app.makeRequest( + http.MethodGet, + fmt.Sprintf("%s/v0/crawl/status/%s", app.APIURL, jobID), + nil, + headers, + "check crawl status", + withRetries(3), + withBackoff(500), + ) + if err != nil { + return nil, err + } + + var jobStatusResponse JobStatusResponse + err = json.Unmarshal(resp, &jobStatusResponse) + if err != nil { + return nil, err + } + + return &jobStatusResponse, nil +} + +// CancelCrawlJob cancels a crawl job using the Firecrawl API. +// +// Parameters: +// - jobID: The ID of the crawl job to cancel. +// +// Returns: +// - string: The status of the crawl job after cancellation. +// - error: An error if the crawl job cancellation request fails. +func (app *FirecrawlApp) CancelCrawlJob(jobID string) (string, error) { + headers := app.prepareHeaders("") + resp, err := app.makeRequest( + http.MethodDelete, + fmt.Sprintf("%s/v0/crawl/cancel/%s", app.APIURL, jobID), + nil, + headers, + "cancel crawl job", + ) + if err != nil { + return "", err + } + + var cancelCrawlJobResponse CancelCrawlJobResponse + err = json.Unmarshal(resp, &cancelCrawlJobResponse) + if err != nil { + return "", err + } + + return cancelCrawlJobResponse.Status, nil +} + +// prepareHeaders prepares the headers for an HTTP request. +// +// Parameters: +// - idempotencyKey: A string representing the idempotency key to be included in the headers. +// If the idempotency key is an empty string, it will not be included in the headers. +// +// Returns: +// - map[string]string: A map containing the headers for the HTTP request. +func (app *FirecrawlApp) prepareHeaders(idempotencyKey string) map[string]string { + headers := map[string]string{ + "Content-Type": "application/json", + "Authorization": fmt.Sprintf("Bearer %s", app.APIKey), + } + if idempotencyKey != "" { + headers["x-idempotency-key"] = idempotencyKey + } + return headers +} + +// makeRequest makes a request to the specified URL with the provided method, data, headers, and options. +// +// Parameters: +// - method: The HTTP method to use for the request (e.g., "GET", "POST", "DELETE"). +// - url: The URL to send the request to. +// - data: The data to be sent in the request body. +// - headers: The headers to be included in the request. +// - action: A string describing the action being performed. +// - opts: Optional request options. +// +// Returns: +// - []byte: The response body from the request. +// - error: An error if the request fails. +func (app *FirecrawlApp) makeRequest(method, url string, data map[string]any, headers map[string]string, action string, opts ...requestOption) ([]byte, error) { + var body []byte + var err error + if data != nil { + body, err = json.Marshal(data) + if err != nil { + return nil, err + } + } + + req, err := http.NewRequest(method, url, bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + + for key, value := range headers { + req.Header.Set(key, value) + } + + var resp *http.Response + options := newRequestOptions(opts...) + for i := 0; i < options.retries; i++ { + resp, err = app.Client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != 502 { + break + } + + time.Sleep(time.Duration(math.Pow(2, float64(i))) * time.Duration(options.backoff) * time.Millisecond) + } + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + statusCode := resp.StatusCode + if statusCode != 200 { + return nil, app.handleError(statusCode, respBody, action) + } + + return respBody, nil +} + +// monitorJobStatus monitors the status of a crawl job using the Firecrawl API. +// +// Parameters: +// - jobID: The ID of the crawl job to monitor. +// - headers: The headers to be included in the request. +// - pollInterval: The interval (in seconds) at which to poll the job status. +// +// Returns: +// - []*FirecrawlDocument: The crawl result if the job is completed. +// - error: An error if the crawl status check request fails. +func (app *FirecrawlApp) monitorJobStatus(jobID string, headers map[string]string, pollInterval int) ([]*FirecrawlDocument, error) { + attempts := 0 + for { + resp, err := app.makeRequest( + http.MethodGet, + fmt.Sprintf("%s/v0/crawl/status/%s", app.APIURL, jobID), + nil, + headers, + "check crawl status", + withRetries(3), + withBackoff(500), + ) + if err != nil { + return nil, err + } + + var statusData JobStatusResponse + err = json.Unmarshal(resp, &statusData) + if err != nil { + return nil, err + } + + status := statusData.Status + if status == "" { + return nil, fmt.Errorf("invalid status in response") + } + + if status == "completed" { + if statusData.Data != nil { + return statusData.Data, nil + } + attempts++ + if attempts > 3 { + return nil, fmt.Errorf("crawl job completed but no data was returned") + } + } else if status == "active" || status == "paused" || status == "pending" || status == "queued" || status == "waiting" { + pollInterval = max(pollInterval, 2) + time.Sleep(time.Duration(pollInterval) * time.Second) + } else { + return nil, fmt.Errorf("crawl job failed or was stopped. Status: %s", status) + } + } +} + +// handleError handles errors returned by the Firecrawl API. +// +// Parameters: +// - resp: The HTTP response object. +// - body: The response body from the HTTP response. +// - action: A string describing the action being performed. +// +// Returns: +// - error: An error describing the failure reason. +func (app *FirecrawlApp) handleError(statusCode int, body []byte, action string) error { + var errorData map[string]any + err := json.Unmarshal(body, &errorData) + if err != nil { + return fmt.Errorf("failed to parse error response: %v", err) + } + + errorMessage, _ := errorData["error"].(string) + if errorMessage == "" { + errorMessage = "No additional error details provided." + } + + var message string + switch statusCode { + case 402: + message = fmt.Sprintf("Payment Required: Failed to %s. %s", action, errorMessage) + case 408: + message = fmt.Sprintf("Request Timeout: Failed to %s as the request timed out. %s", action, errorMessage) + case 409: + message = fmt.Sprintf("Conflict: Failed to %s due to a conflict. %s", action, errorMessage) + case 500: + message = fmt.Sprintf("Internal Server Error: Failed to %s. %s", action, errorMessage) + default: + message = fmt.Sprintf("Unexpected error during %s: Status code %d. %s", action, statusCode, errorMessage) + } + + return fmt.Errorf(message) +} diff --git a/apps/go-sdk/firecrawl/firecrawl_test.go b/apps/go-sdk/firecrawl/firecrawl_test.go new file mode 100644 index 00000000..9d56c7ac --- /dev/null +++ b/apps/go-sdk/firecrawl/firecrawl_test.go @@ -0,0 +1,292 @@ +package firecrawl + +import ( + "log" + "os" + "testing" + "time" + + "github.com/google/uuid" + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var API_URL string +var TEST_API_KEY string + +func init() { + err := godotenv.Load("../.env") + if err != nil { + log.Fatalf("Error loading .env file: %v", err) + } + API_URL = os.Getenv("API_URL") + TEST_API_KEY = os.Getenv("TEST_API_KEY") +} + +func TestNoAPIKey(t *testing.T) { + _, err := NewFirecrawlApp("", API_URL) + assert.Error(t, err) + assert.Contains(t, err.Error(), "no API key provided") +} + +func TestScrapeURLInvalidAPIKey(t *testing.T) { + app, err := NewFirecrawlApp("invalid_api_key", API_URL) + require.NoError(t, err) + + _, err = app.ScrapeURL("https://firecrawl.dev", nil) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Unexpected error during scrape URL: Status code 401. Unauthorized: Invalid token") +} + +func TestBlocklistedURL(t *testing.T) { + app, err := NewFirecrawlApp(TEST_API_KEY, API_URL) + require.NoError(t, err) + + _, err = app.ScrapeURL("https://facebook.com/fake-test", nil) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Unexpected error during scrape URL: Status code 403. Firecrawl currently does not support social media scraping due to policy restrictions.") +} + +func TestSuccessfulResponseWithValidPreviewToken(t *testing.T) { + app, err := NewFirecrawlApp("this_is_just_a_preview_token", API_URL) + require.NoError(t, err) + + response, err := app.ScrapeURL("https://roastmywebsite.ai", nil) + require.NoError(t, err) + assert.NotNil(t, response) + + assert.Contains(t, response.Content, "_Roast_") +} + +func TestScrapeURLE2E(t *testing.T) { + app, err := NewFirecrawlApp(TEST_API_KEY, API_URL) + require.NoError(t, err) + + response, err := app.ScrapeURL("https://roastmywebsite.ai", nil) + require.NoError(t, err) + assert.NotNil(t, response) + + assert.Contains(t, response.Content, "_Roast_") + assert.NotEqual(t, response.Markdown, "") + assert.NotNil(t, response.Metadata) + assert.Equal(t, response.HTML, "") +} + +func TestSuccessfulResponseWithValidAPIKeyAndIncludeHTML(t *testing.T) { + app, err := NewFirecrawlApp(TEST_API_KEY, API_URL) + require.NoError(t, err) + + params := map[string]any{ + "pageOptions": map[string]any{ + "includeHtml": true, + }, + } + response, err := app.ScrapeURL("https://roastmywebsite.ai", params) + require.NoError(t, err) + assert.NotNil(t, response) + + assert.Contains(t, response.Content, "_Roast_") + assert.Contains(t, response.Markdown, "_Roast_") + assert.Contains(t, response.HTML, " setTimeout(resolve, 1000)); // wait 1 second } -console.log(job.data[0].content); - -// Search for a query: -const query = 'what is mendable?' -const searchResult = await app.search(query) -console.log(searchResult) - -// LLM Extraction: -// Define schema to extract contents into using zod schema -const zodSchema = z.object({ - top: z - .array( - z.object({ - title: z.string(), - points: z.number(), - by: z.string(), - commentsURL: z.string(), - }) - ) - .length(5) - .describe("Top 5 stories on Hacker News"), -}); - -let llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: zodSchema }, -}); - -console.log(llmExtractionResult.data.llm_extraction); - -// Define schema to extract contents into using json schema -const jsonSchema = { - "type": "object", - "properties": { - "top": { - "type": "array", - "items": { - "type": "object", - "properties": { - "title": {"type": "string"}, - "points": {"type": "number"}, - "by": {"type": "string"}, - "commentsURL": {"type": "string"} - }, - "required": ["title", "points", "by", "commentsURL"] - }, - "minItems": 5, - "maxItems": 5, - "description": "Top 5 stories on Hacker News" - } - }, - "required": ["top"] +if (job.data) { + console.log(job.data[0].markdown); } -llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: jsonSchema }, -}); - -console.log(llmExtractionResult.data.llm_extraction); \ No newline at end of file +const mapResult = await app.map('https://firecrawl.dev'); +console.log(mapResult) diff --git a/apps/js-sdk/example.ts b/apps/js-sdk/example.ts index f314c080..056695b8 100644 --- a/apps/js-sdk/example.ts +++ b/apps/js-sdk/example.ts @@ -1,5 +1,5 @@ -import FirecrawlApp, { JobStatusResponse } from './firecrawl/src/index' //'@mendable/firecrawl-js'; -import { z } from "zod"; +import FirecrawlApp from './firecrawl/src/index' //'@mendable/firecrawl-js'; +import { CrawlStatusResponse } from './firecrawl/src/index'; const app = new FirecrawlApp({apiKey: "fc-YOUR_API_KEY"}); @@ -7,7 +7,7 @@ const app = new FirecrawlApp({apiKey: "fc-YOUR_API_KEY"}); const scrapeResult = await app.scrapeUrl('firecrawl.dev'); if (scrapeResult.data) { - console.log(scrapeResult.data.content) + console.log(scrapeResult.data.markdown) } // Crawl a website: @@ -17,9 +17,9 @@ console.log(crawlResult) const jobId: string = await crawlResult['jobId']; console.log(jobId); -let job: JobStatusResponse; +let job: CrawlStatusResponse; while (true) { - job = await app.checkCrawlStatus(jobId); + job = await app.checkCrawlStatus(jobId) as CrawlStatusResponse; if (job.status === 'completed') { break; } @@ -27,66 +27,8 @@ while (true) { } if (job.data) { - console.log(job.data[0].content); -} - -// Search for a query: -const query = 'what is mendable?' -const searchResult = await app.search(query) - -// LLM Extraction: -// Define schema to extract contents into using zod schema -const zodSchema = z.object({ - top: z - .array( - z.object({ - title: z.string(), - points: z.number(), - by: z.string(), - commentsURL: z.string(), - }) - ) - .length(5) - .describe("Top 5 stories on Hacker News"), -}); - -let llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: zodSchema }, -}); - -if (llmExtractionResult.data) { - console.log(llmExtractionResult.data.llm_extraction); -} - -// Define schema to extract contents into using json schema -const jsonSchema = { - "type": "object", - "properties": { - "top": { - "type": "array", - "items": { - "type": "object", - "properties": { - "title": {"type": "string"}, - "points": {"type": "number"}, - "by": {"type": "string"}, - "commentsURL": {"type": "string"} - }, - "required": ["title", "points", "by", "commentsURL"] - }, - "minItems": 5, - "maxItems": 5, - "description": "Top 5 stories on Hacker News" - } - }, - "required": ["top"] -} - -llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: jsonSchema }, -}); - -if (llmExtractionResult.data) { - console.log(llmExtractionResult.data.llm_extraction); + console.log(job.data[0].markdown); } +const mapResult = await app.map('https://firecrawl.dev'); +console.log(mapResult) diff --git a/apps/js-sdk/exampleV0.js b/apps/js-sdk/exampleV0.js new file mode 100644 index 00000000..7f198598 --- /dev/null +++ b/apps/js-sdk/exampleV0.js @@ -0,0 +1,85 @@ +import { v4 as uuidv4 } from 'uuid'; +import FirecrawlApp from '@mendable/firecrawl-js'; +import { z } from "zod"; + +const app = new FirecrawlApp({apiKey: "fc-YOUR_API_KEY"}); + +// Scrape a website: +const scrapeResult = await app.scrapeUrl('firecrawl.dev'); +console.log(scrapeResult.data.content) + +// Crawl a website: +const idempotencyKey = uuidv4(); // optional +const crawlResult = await app.crawlUrl('mendable.ai', {crawlerOptions: {excludes: ['blog/*'], limit: 5}}, false, 2, idempotencyKey); +console.log(crawlResult) + +const jobId = await crawlResult['jobId']; +console.log(jobId); + +let job; +while (true) { + job = await app.checkCrawlStatus(jobId); + if (job.status == 'completed') { + break; + } + await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second +} + +console.log(job.data[0].content); + +// Search for a query: +const query = 'what is mendable?' +const searchResult = await app.search(query) +console.log(searchResult) + +// LLM Extraction: +// Define schema to extract contents into using zod schema +const zodSchema = z.object({ + top: z + .array( + z.object({ + title: z.string(), + points: z.number(), + by: z.string(), + commentsURL: z.string(), + }) + ) + .length(5) + .describe("Top 5 stories on Hacker News"), +}); + +let llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { + extractorOptions: { extractionSchema: zodSchema }, +}); + +console.log(llmExtractionResult.data.llm_extraction); + +// Define schema to extract contents into using json schema +const jsonSchema = { + "type": "object", + "properties": { + "top": { + "type": "array", + "items": { + "type": "object", + "properties": { + "title": {"type": "string"}, + "points": {"type": "number"}, + "by": {"type": "string"}, + "commentsURL": {"type": "string"} + }, + "required": ["title", "points", "by", "commentsURL"] + }, + "minItems": 5, + "maxItems": 5, + "description": "Top 5 stories on Hacker News" + } + }, + "required": ["top"] +} + +llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { + extractorOptions: { extractionSchema: jsonSchema }, +}); + +console.log(llmExtractionResult.data.llm_extraction); \ No newline at end of file diff --git a/apps/js-sdk/exampleV0.ts b/apps/js-sdk/exampleV0.ts new file mode 100644 index 00000000..58c46b6a --- /dev/null +++ b/apps/js-sdk/exampleV0.ts @@ -0,0 +1,95 @@ +import FirecrawlApp, { ScrapeResponseV0, CrawlStatusResponseV0, SearchResponseV0 } from './firecrawl/src/index' //'@mendable/firecrawl-js'; +import { z } from "zod"; + +const app = new FirecrawlApp({apiKey: "fc-YOUR_API_KEY", version: "v0"}); + +// Scrape a website: +const scrapeResult = await app.scrapeUrl('firecrawl.dev') as ScrapeResponseV0; + +if (scrapeResult.data) { + console.log(scrapeResult.data.content) +} + +// Crawl a website: +const crawlResult = await app.crawlUrl('mendable.ai', {crawlerOptions: {excludes: ['blog/*'], limit: 5}}, false); +console.log(crawlResult) + +const jobId: string = await crawlResult['jobId']; +console.log(jobId); + +let job: CrawlStatusResponseV0; +while (true) { + job = await app.checkCrawlStatus(jobId) as CrawlStatusResponseV0; + if (job.status === 'completed') { + break; + } + await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second +} + +if (job.data) { + console.log(job.data[0].content); +} + +// Search for a query: +const query = 'what is mendable?' +const searchResult = await app.search(query) as SearchResponseV0; +if (searchResult.data) { + console.log(searchResult.data[0].content) +} + +// LLM Extraction: +// Define schema to extract contents into using zod schema +const zodSchema = z.object({ + top: z + .array( + z.object({ + title: z.string(), + points: z.number(), + by: z.string(), + commentsURL: z.string(), + }) + ) + .length(5) + .describe("Top 5 stories on Hacker News"), +}); + +let llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { + extractorOptions: { extractionSchema: zodSchema }, +}); + +if (llmExtractionResult.data) { + console.log(llmExtractionResult.data[0].llm_extraction); +} + +// Define schema to extract contents into using json schema +const jsonSchema = { + "type": "object", + "properties": { + "top": { + "type": "array", + "items": { + "type": "object", + "properties": { + "title": {"type": "string"}, + "points": {"type": "number"}, + "by": {"type": "string"}, + "commentsURL": {"type": "string"} + }, + "required": ["title", "points", "by", "commentsURL"] + }, + "minItems": 5, + "maxItems": 5, + "description": "Top 5 stories on Hacker News" + } + }, + "required": ["top"] +} + +llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { + extractorOptions: { extractionSchema: jsonSchema }, +}); + +if (llmExtractionResult.data) { + console.log(llmExtractionResult.data[0].llm_extraction); +} + diff --git a/apps/js-sdk/firecrawl/.gitignore b/apps/js-sdk/firecrawl/.gitignore index c6bba591..96e545b3 100644 --- a/apps/js-sdk/firecrawl/.gitignore +++ b/apps/js-sdk/firecrawl/.gitignore @@ -128,3 +128,5 @@ dist .yarn/build-state.yml .yarn/install-state.gz .pnp.* + +build diff --git a/apps/js-sdk/firecrawl/build/cjs/index.js b/apps/js-sdk/firecrawl/build/cjs/index.js new file mode 100644 index 00000000..dbc2d6b9 --- /dev/null +++ b/apps/js-sdk/firecrawl/build/cjs/index.js @@ -0,0 +1,271 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const axios_1 = __importDefault(require("axios")); +const zod_1 = require("zod"); +const zod_to_json_schema_1 = require("zod-to-json-schema"); +/** + * Main class for interacting with the Firecrawl API. + */ +class FirecrawlApp { + /** + * Initializes a new instance of the FirecrawlApp class. + * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. + */ + constructor({ apiKey = null, apiUrl = null }) { + this.apiKey = apiKey || ""; + this.apiUrl = apiUrl || "https://api.firecrawl.dev"; + if (!this.apiKey) { + throw new Error("No API key provided"); + } + } + /** + * Scrapes a URL using the Firecrawl API. + * @param {string} url - The URL to scrape. + * @param {Params | null} params - Additional parameters for the scrape request. + * @returns {Promise} The response from the scrape operation. + */ + scrapeUrl(url_1) { + return __awaiter(this, arguments, void 0, function* (url, params = null) { + var _a; + const headers = { + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, + }; + let jsonData = Object.assign({ url }, params); + if ((_a = params === null || params === void 0 ? void 0 : params.extractorOptions) === null || _a === void 0 ? void 0 : _a.extractionSchema) { + let schema = params.extractorOptions.extractionSchema; + // Check if schema is an instance of ZodSchema to correctly identify Zod schemas + if (schema instanceof zod_1.z.ZodSchema) { + schema = (0, zod_to_json_schema_1.zodToJsonSchema)(schema); + } + jsonData = Object.assign(Object.assign({}, jsonData), { extractorOptions: Object.assign(Object.assign({}, params.extractorOptions), { extractionSchema: schema, mode: params.extractorOptions.mode || "llm-extraction" }) }); + } + try { + const response = yield axios_1.default.post(this.apiUrl + "/v0/scrape", jsonData, { headers }); + if (response.status === 200) { + const responseData = response.data; + if (responseData.success) { + return responseData; + } + else { + throw new Error(`Failed to scrape URL. Error: ${responseData.error}`); + } + } + else { + this.handleError(response, "scrape URL"); + } + } + catch (error) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; + }); + } + /** + * Searches for a query using the Firecrawl API. + * @param {string} query - The query to search for. + * @param {Params | null} params - Additional parameters for the search request. + * @returns {Promise} The response from the search operation. + */ + search(query_1) { + return __awaiter(this, arguments, void 0, function* (query, params = null) { + const headers = { + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, + }; + let jsonData = { query }; + if (params) { + jsonData = Object.assign(Object.assign({}, jsonData), params); + } + try { + const response = yield axios_1.default.post(this.apiUrl + "/v0/search", jsonData, { headers }); + if (response.status === 200) { + const responseData = response.data; + if (responseData.success) { + return responseData; + } + else { + throw new Error(`Failed to search. Error: ${responseData.error}`); + } + } + else { + this.handleError(response, "search"); + } + } + catch (error) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; + }); + } + /** + * Initiates a crawl job for a URL using the Firecrawl API. + * @param {string} url - The URL to crawl. + * @param {Params | null} params - Additional parameters for the crawl request. + * @param {boolean} waitUntilDone - Whether to wait for the crawl job to complete. + * @param {number} pollInterval - Time in seconds for job status checks. + * @param {string} idempotencyKey - Optional idempotency key for the request. + * @returns {Promise} The response from the crawl operation. + */ + crawlUrl(url_1) { + return __awaiter(this, arguments, void 0, function* (url, params = null, waitUntilDone = true, pollInterval = 2, idempotencyKey) { + const headers = this.prepareHeaders(idempotencyKey); + let jsonData = { url }; + if (params) { + jsonData = Object.assign(Object.assign({}, jsonData), params); + } + try { + const response = yield this.postRequest(this.apiUrl + "/v0/crawl", jsonData, headers); + if (response.status === 200) { + const jobId = response.data.jobId; + if (waitUntilDone) { + return this.monitorJobStatus(jobId, headers, pollInterval); + } + else { + return { success: true, jobId }; + } + } + else { + this.handleError(response, "start crawl job"); + } + } + catch (error) { + console.log(error); + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; + }); + } + /** + * Checks the status of a crawl job using the Firecrawl API. + * @param {string} jobId - The job ID of the crawl operation. + * @returns {Promise} The response containing the job status. + */ + checkCrawlStatus(jobId) { + return __awaiter(this, void 0, void 0, function* () { + const headers = this.prepareHeaders(); + try { + const response = yield this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers); + if (response.status === 200) { + return { + success: true, + status: response.data.status, + current: response.data.current, + current_url: response.data.current_url, + current_step: response.data.current_step, + total: response.data.total, + data: response.data.data, + partial_data: !response.data.data + ? response.data.partial_data + : undefined, + }; + } + else { + this.handleError(response, "check crawl status"); + } + } + catch (error) { + throw new Error(error.message); + } + return { + success: false, + status: "unknown", + current: 0, + current_url: "", + current_step: "", + total: 0, + error: "Internal server error.", + }; + }); + } + /** + * Prepares the headers for an API request. + * @returns {AxiosRequestHeaders} The prepared headers. + */ + prepareHeaders(idempotencyKey) { + return Object.assign({ "Content-Type": "application/json", Authorization: `Bearer ${this.apiKey}` }, (idempotencyKey ? { "x-idempotency-key": idempotencyKey } : {})); + } + /** + * Sends a POST request to the specified URL. + * @param {string} url - The URL to send the request to. + * @param {Params} data - The data to send in the request. + * @param {AxiosRequestHeaders} headers - The headers for the request. + * @returns {Promise} The response from the POST request. + */ + postRequest(url, data, headers) { + return axios_1.default.post(url, data, { headers }); + } + /** + * Sends a GET request to the specified URL. + * @param {string} url - The URL to send the request to. + * @param {AxiosRequestHeaders} headers - The headers for the request. + * @returns {Promise} The response from the GET request. + */ + getRequest(url, headers) { + return axios_1.default.get(url, { headers }); + } + /** + * Monitors the status of a crawl job until completion or failure. + * @param {string} jobId - The job ID of the crawl operation. + * @param {AxiosRequestHeaders} headers - The headers for the request. + * @param {number} timeout - Timeout in seconds for job status checks. + * @returns {Promise} The final job status or data. + */ + monitorJobStatus(jobId, headers, checkInterval) { + return __awaiter(this, void 0, void 0, function* () { + while (true) { + const statusResponse = yield this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers); + if (statusResponse.status === 200) { + const statusData = statusResponse.data; + if (statusData.status === "completed") { + if ("data" in statusData) { + return statusData.data; + } + else { + throw new Error("Crawl job completed but no data was returned"); + } + } + else if (["active", "paused", "pending", "queued"].includes(statusData.status)) { + if (checkInterval < 2) { + checkInterval = 2; + } + yield new Promise((resolve) => setTimeout(resolve, checkInterval * 1000)); // Wait for the specified timeout before checking again + } + else { + throw new Error(`Crawl job failed or was stopped. Status: ${statusData.status}`); + } + } + else { + this.handleError(statusResponse, "check crawl status"); + } + } + }); + } + /** + * Handles errors from API responses. + * @param {AxiosResponse} response - The response from the API. + * @param {string} action - The action being performed when the error occurred. + */ + handleError(response, action) { + if ([402, 408, 409, 500].includes(response.status)) { + const errorMessage = response.data.error || "Unknown error occurred"; + throw new Error(`Failed to ${action}. Status code: ${response.status}. Error: ${errorMessage}`); + } + else { + throw new Error(`Unexpected error occurred while trying to ${action}. Status code: ${response.status}`); + } + } +} +exports.default = FirecrawlApp; diff --git a/apps/js-sdk/firecrawl/build/cjs/package.json b/apps/js-sdk/firecrawl/build/cjs/package.json new file mode 100644 index 00000000..b731bd61 --- /dev/null +++ b/apps/js-sdk/firecrawl/build/cjs/package.json @@ -0,0 +1 @@ +{"type": "commonjs"} diff --git a/apps/js-sdk/firecrawl/build/index.js b/apps/js-sdk/firecrawl/build/esm/index.js similarity index 100% rename from apps/js-sdk/firecrawl/build/index.js rename to apps/js-sdk/firecrawl/build/esm/index.js diff --git a/apps/js-sdk/firecrawl/build/esm/package.json b/apps/js-sdk/firecrawl/build/esm/package.json new file mode 100644 index 00000000..6990891f --- /dev/null +++ b/apps/js-sdk/firecrawl/build/esm/package.json @@ -0,0 +1 @@ +{"type": "module"} diff --git a/apps/js-sdk/firecrawl/package-lock.json b/apps/js-sdk/firecrawl/package-lock.json index 25b0e305..4d9254ac 100644 --- a/apps/js-sdk/firecrawl/package-lock.json +++ b/apps/js-sdk/firecrawl/package-lock.json @@ -1,12 +1,12 @@ { "name": "@mendable/firecrawl-js", - "version": "0.0.29", + "version": "0.0.36", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@mendable/firecrawl-js", - "version": "0.0.29", + "version": "0.0.36", "license": "MIT", "dependencies": { "axios": "^1.6.8", diff --git a/apps/js-sdk/firecrawl/package.json b/apps/js-sdk/firecrawl/package.json index 71d2362e..4b857b65 100644 --- a/apps/js-sdk/firecrawl/package.json +++ b/apps/js-sdk/firecrawl/package.json @@ -1,12 +1,22 @@ { "name": "@mendable/firecrawl-js", - "version": "0.0.29", + "version": "0.0.36", "description": "JavaScript SDK for Firecrawl API", - "main": "build/index.js", + "main": "build/cjs/index.js", "types": "types/index.d.ts", "type": "module", + "exports": { + "require": { + "types": "./types/index.d.ts", + "default": "./build/cjs/index.js" + }, + "import": { + "types": "./types/index.d.ts", + "default": "./build/esm/index.js" + } + }, "scripts": { - "build": "tsc", + "build": "tsc --module commonjs --moduleResolution node10 --outDir build/cjs/ && echo '{\"type\": \"commonjs\"}' > build/cjs/package.json && npx tsc --module NodeNext --moduleResolution NodeNext --outDir build/esm/ && echo '{\"type\": \"module\"}' > build/esm/package.json", "build-and-publish": "npm run build && npm publish --access public", "publish-beta": "npm run build && npm publish --access public --tag beta", "test": "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/**/*.test.ts" diff --git a/apps/js-sdk/firecrawl/src/__tests__/e2e_withAuth/index.test.ts b/apps/js-sdk/firecrawl/src/__tests__/e2e_withAuth/index.test.ts index ad917de4..91dfb9e1 100644 --- a/apps/js-sdk/firecrawl/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/js-sdk/firecrawl/src/__tests__/e2e_withAuth/index.test.ts @@ -1,4 +1,4 @@ -import FirecrawlApp from '../../index'; +import FirecrawlApp, { CrawlResponseV0, FirecrawlDocumentV0, JobStatusResponseV0, ScrapeResponseV0, SearchResponseV0 } from '../../index'; import { v4 as uuidv4 } from 'uuid'; import dotenv from 'dotenv'; import { describe, test, expect } from '@jest/globals'; @@ -11,31 +11,31 @@ const API_URL = "http://127.0.0.1:3002"; describe('FirecrawlApp E2E Tests', () => { test.concurrent('should throw error for no API key', async () => { expect(() => { - new FirecrawlApp({ apiKey: null, apiUrl: API_URL }); + new FirecrawlApp({ apiKey: null, apiUrl: API_URL, version: "v0" }); }).toThrow("No API key provided"); }); test.concurrent('should throw error for invalid API key on scrape', async () => { - const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL, version: "v0" }); await expect(invalidApp.scrapeUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); }); test.concurrent('should throw error for blocklisted URL on scrape', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); const blocklistedUrl = "https://facebook.com/fake-test"; await expect(app.scrapeUrl(blocklistedUrl)).rejects.toThrow("Request failed with status code 403"); }); test.concurrent('should return successful response with valid preview token', async () => { - const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL }); - const response = await app.scrapeUrl('https://roastmywebsite.ai'); + const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL, version: "v0" }); + const response = await app.scrapeUrl('https://roastmywebsite.ai') as ScrapeResponseV0; expect(response).not.toBeNull(); expect(response.data?.content).toContain("_Roast_"); }, 30000); // 30 seconds timeout test.concurrent('should return successful response for valid scrape', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl('https://roastmywebsite.ai'); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); + const response = await app.scrapeUrl('https://roastmywebsite.ai') as ScrapeResponseV0; expect(response).not.toBeNull(); expect(response.data?.content).toContain("_Roast_"); expect(response.data).toHaveProperty('markdown'); @@ -44,8 +44,8 @@ describe('FirecrawlApp E2E Tests', () => { }, 30000); // 30 seconds timeout test.concurrent('should return successful response with valid API key and include HTML', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl('https://roastmywebsite.ai', { pageOptions: { includeHtml: true } }); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); + const response = await app.scrapeUrl('https://roastmywebsite.ai', { pageOptions: { includeHtml: true } }) as ScrapeResponseV0; expect(response).not.toBeNull(); expect(response.data?.content).toContain("_Roast_"); expect(response.data?.markdown).toContain("_Roast_"); @@ -53,41 +53,41 @@ describe('FirecrawlApp E2E Tests', () => { }, 30000); // 30 seconds timeout test.concurrent('should return successful response for valid scrape with PDF file', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001.pdf'); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); + const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001.pdf') as ScrapeResponseV0; expect(response).not.toBeNull(); expect(response.data?.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); }, 30000); // 30 seconds timeout test.concurrent('should return successful response for valid scrape with PDF file without explicit extension', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001'); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); + const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001') as ScrapeResponseV0; expect(response).not.toBeNull(); expect(response.data?.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); }, 30000); // 30 seconds timeout test.concurrent('should throw error for invalid API key on crawl', async () => { - const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL, version: "v0" }); await expect(invalidApp.crawlUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); }); test.concurrent('should throw error for blocklisted URL on crawl', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); const blocklistedUrl = "https://twitter.com/fake-test"; await expect(app.crawlUrl(blocklistedUrl)).rejects.toThrow("Request failed with status code 403"); }); test.concurrent('should return successful response for crawl and wait for completion', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, true, 30); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); + const response = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, true, 30) as CrawlResponseV0; expect(response).not.toBeNull(); expect(response[0].content).toContain("_Roast_"); }, 60000); // 60 seconds timeout test.concurrent('should handle idempotency key for crawl', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); const uniqueIdempotencyKey = uuidv4(); - const response = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, false, 2, uniqueIdempotencyKey); + const response = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, false, 2, uniqueIdempotencyKey) as CrawlResponseV0; expect(response).not.toBeNull(); expect(response.jobId).toBeDefined(); @@ -95,12 +95,12 @@ describe('FirecrawlApp E2E Tests', () => { }); test.concurrent('should check crawl status', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, false); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); + const response: any = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, false) as JobStatusResponseV0; expect(response).not.toBeNull(); expect(response.jobId).toBeDefined(); - let statusResponse = await app.checkCrawlStatus(response.jobId); + let statusResponse: any = await app.checkCrawlStatus(response.jobId); const maxChecks = 15; let checks = 0; @@ -108,7 +108,7 @@ describe('FirecrawlApp E2E Tests', () => { await new Promise(resolve => setTimeout(resolve, 1000)); expect(statusResponse.partial_data).not.toBeNull(); expect(statusResponse.current).toBeGreaterThanOrEqual(1); - statusResponse = await app.checkCrawlStatus(response.jobId); + statusResponse = await app.checkCrawlStatus(response.jobId) as CrawlResponseV0; checks++; } @@ -121,20 +121,20 @@ describe('FirecrawlApp E2E Tests', () => { }, 35000); // 35 seconds timeout test.concurrent('should return successful response for search', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.search("test query"); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); + const response = await app.search("test query") as SearchResponseV0; expect(response).not.toBeNull(); expect(response?.data?.[0]?.content).toBeDefined(); expect(response?.data?.length).toBeGreaterThan(2); }, 30000); // 30 seconds timeout test.concurrent('should throw error for invalid API key on search', async () => { - const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL, version: "v0" }); await expect(invalidApp.search("test query")).rejects.toThrow("Request failed with status code 401"); }); test.concurrent('should perform LLM extraction', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL, version: "v0" }); const response = await app.scrapeUrl("https://mendable.ai", { extractorOptions: { mode: 'llm-extraction', @@ -149,7 +149,7 @@ describe('FirecrawlApp E2E Tests', () => { required: ['company_mission', 'supports_sso', 'is_open_source'] } } - }); + }) as ScrapeResponseV0; expect(response).not.toBeNull(); expect(response.data?.llm_extraction).toBeDefined(); const llmExtraction = response.data?.llm_extraction; diff --git a/apps/js-sdk/firecrawl/src/__tests__/v1/e2e_withAuth/index.test.ts b/apps/js-sdk/firecrawl/src/__tests__/v1/e2e_withAuth/index.test.ts new file mode 100644 index 00000000..724996bc --- /dev/null +++ b/apps/js-sdk/firecrawl/src/__tests__/v1/e2e_withAuth/index.test.ts @@ -0,0 +1,307 @@ +import FirecrawlApp, { CrawlParams, CrawlResponse, CrawlStatusResponse, MapResponse, ScrapeParams, ScrapeResponse } from '../../../index'; +import { v4 as uuidv4 } from 'uuid'; +import dotenv from 'dotenv'; +import { describe, test, expect } from '@jest/globals'; + +dotenv.config(); + +const TEST_API_KEY = process.env.TEST_API_KEY; +const API_URL = "http://127.0.0.1:3002"; + +describe('FirecrawlApp E2E Tests', () => { + test.concurrent('should throw error for no API key', async () => { + expect(() => { + new FirecrawlApp({ apiKey: null, apiUrl: API_URL }); + }).toThrow("No API key provided"); + }); + + test.concurrent('should throw error for invalid API key on scrape', async () => { + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + await expect(invalidApp.scrapeUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); + }); + + test.concurrent('should throw error for blocklisted URL on scrape', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const blocklistedUrl = "https://facebook.com/fake-test"; + await expect(app.scrapeUrl(blocklistedUrl)).rejects.toThrow("Request failed with status code 403"); + }); + + test.concurrent('should return successful response with valid preview token', async () => { + const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL }); + const response = await app.scrapeUrl('https://roastmywebsite.ai') as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response.data?.markdown).toContain("_Roast_"); + }, 30000); // 30 seconds timeout + + test.concurrent('should return successful response for valid scrape', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.scrapeUrl('https://roastmywebsite.ai') as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response.data).not.toHaveProperty('content'); // v0 + expect(response.data).not.toHaveProperty('html'); + expect(response.data).not.toHaveProperty('rawHtml'); + expect(response.data).not.toHaveProperty('screenshot'); + expect(response.data).not.toHaveProperty('links'); + + expect(response.data).toHaveProperty('markdown'); + expect(response.data).toHaveProperty('metadata'); + }, 30000); // 30 seconds timeout + + test.concurrent('should return successful response with valid API key and include HTML', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.scrapeUrl( + 'https://roastmywebsite.ai', { + formats: ['markdown', 'html', 'rawHtml', 'screenshot', 'links'], + headers: { "x-key": "test" }, + includeTags: ['h1'], + excludeTags: ['h2'], + onlyMainContent: true, + timeout: 30000, + waitFor: 1000 + }) as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response.data).not.toHaveProperty('content'); // v0 + expect(response.data?.markdown).toContain("_Roast_"); + expect(response.data?.html).toContain(" { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001.pdf') as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response.data?.markdown).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); + }, 30000); // 30 seconds timeout + + test.concurrent('should return successful response for valid scrape with PDF file without explicit extension', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001') as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response.data?.markdown).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); + }, 30000); // 30 seconds timeout + + test.concurrent('should throw error for invalid API key on crawl', async () => { + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + await expect(invalidApp.crawlUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); + }); + + test.concurrent('should throw error for blocklisted URL on crawl', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const blocklistedUrl = "https://twitter.com/fake-test"; + await expect(app.crawlUrl(blocklistedUrl)).rejects.toThrow("URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions."); + }); + + test.concurrent('should return successful response for crawl and wait for completion', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.crawlUrl('https://roastmywebsite.ai', {}, true, 30) as CrawlStatusResponse; + expect(response).not.toBeNull(); + expect(response).toHaveProperty("totalCount"); + expect(response.totalCount).toBeGreaterThan(0); + expect(response).toHaveProperty("creditsUsed"); + expect(response.creditsUsed).toBeGreaterThan(0); + expect(response).toHaveProperty("expiresAt"); + expect(new Date(response.expiresAt).getTime()).toBeGreaterThan(Date.now()); + expect(response).toHaveProperty("status"); + expect(response.status).toBe("completed"); + expect(response).not.toHaveProperty("next"); // wait until done + expect(response.data?.length).toBeGreaterThan(0); + expect(response.data?.[0]).toHaveProperty("markdown"); + expect(response.data?.[0].markdown).toContain("_Roast_"); + expect(response.data?.[0]).not.toHaveProperty('content'); // v0 + expect(response.data?.[0]).not.toHaveProperty("html"); + expect(response.data?.[0]).not.toHaveProperty("rawHtml"); + expect(response.data?.[0]).not.toHaveProperty("screenshot"); + expect(response.data?.[0]).not.toHaveProperty("links"); + expect(response.data?.[0]).toHaveProperty("metadata"); + expect(response.data?.[0].metadata).toHaveProperty("title"); + expect(response.data?.[0].metadata).toHaveProperty("description"); + expect(response.data?.[0].metadata).toHaveProperty("language"); + expect(response.data?.[0].metadata).toHaveProperty("sourceURL"); + expect(response.data?.[0].metadata).toHaveProperty("statusCode"); + expect(response.data?.[0].metadata).not.toHaveProperty("error"); + }, 60000); // 60 seconds timeout + + test.concurrent('should return successful response for crawl with options and wait for completion', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.crawlUrl('https://roastmywebsite.ai', { + excludePaths: ['blog/*'], + includePaths: ['/'], + maxDepth: 2, + ignoreSitemap: true, + limit: 10, + allowBackwardLinks: true, + allowExternalLinks: true, + scrapeOptions: { + formats: ['markdown', 'html', 'rawHtml', 'screenshot', 'links'], + headers: { "x-key": "test" }, + includeTags: ['h1'], + excludeTags: ['h2'], + onlyMainContent: true, + waitFor: 1000 + } + } as CrawlParams, true, 30) as CrawlStatusResponse; + expect(response).not.toBeNull(); + expect(response).toHaveProperty("totalCount"); + expect(response.totalCount).toBeGreaterThan(0); + expect(response).toHaveProperty("creditsUsed"); + expect(response.creditsUsed).toBeGreaterThan(0); + expect(response).toHaveProperty("expiresAt"); + expect(new Date(response.expiresAt).getTime()).toBeGreaterThan(Date.now()); + expect(response).toHaveProperty("status"); + expect(response.status).toBe("completed"); + expect(response).not.toHaveProperty("next"); + expect(response.data?.length).toBeGreaterThan(0); + expect(response.data?.[0]).toHaveProperty("markdown"); + expect(response.data?.[0].markdown).toContain("_Roast_"); + expect(response.data?.[0]).not.toHaveProperty('content'); // v0 + expect(response.data?.[0]).toHaveProperty("html"); + expect(response.data?.[0].html).toContain(" { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const uniqueIdempotencyKey = uuidv4(); + const response = await app.crawlUrl('https://roastmywebsite.ai', {}, false, 2, uniqueIdempotencyKey) as CrawlResponse; + expect(response).not.toBeNull(); + expect(response.id).toBeDefined(); + + await expect(app.crawlUrl('https://roastmywebsite.ai', {}, true, 2, uniqueIdempotencyKey)).rejects.toThrow("Request failed with status code 409"); + }); + + test.concurrent('should check crawl status', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.crawlUrl('https://firecrawl.dev', { scrapeOptions: { formats: ['markdown', 'html', 'rawHtml', 'screenshot', 'links']}} as CrawlParams, false) as CrawlResponse; + expect(response).not.toBeNull(); + expect(response.id).toBeDefined(); + + let statusResponse: any = await app.checkCrawlStatus(response.id) as CrawlStatusResponse; + const maxChecks = 15; + let checks = 0; + + while (statusResponse.status === 'scraping' && checks < maxChecks) { + await new Promise(resolve => setTimeout(resolve, 5000)); + expect(statusResponse).not.toHaveProperty("partial_data"); // v0 + expect(statusResponse).not.toHaveProperty("current"); // v0 + expect(statusResponse).toHaveProperty("data"); + expect(statusResponse).toHaveProperty("totalCount"); + expect(statusResponse).toHaveProperty("creditsUsed"); + expect(statusResponse).toHaveProperty("expiresAt"); + expect(statusResponse).toHaveProperty("status"); + expect(statusResponse).toHaveProperty("next"); + expect(statusResponse.totalCount).toBeGreaterThan(0); + expect(statusResponse.creditsUsed).toBeGreaterThan(0); + expect(statusResponse.expiresAt.getTime()).toBeGreaterThan(Date.now()); + expect(statusResponse.status).toBe("scraping"); + expect(statusResponse.next).toContain("/v1/crawl/"); + statusResponse = await app.checkCrawlStatus(response.id) as CrawlStatusResponse; + checks++; + } + + expect(statusResponse).not.toBeNull(); + expect(statusResponse).toHaveProperty("totalCount"); + expect(statusResponse.totalCount).toBeGreaterThan(0); + expect(statusResponse).toHaveProperty("creditsUsed"); + expect(statusResponse.creditsUsed).toBeGreaterThan(0); + expect(statusResponse).toHaveProperty("expiresAt"); + expect(statusResponse.expiresAt.getTime()).toBeGreaterThan(Date.now()); + expect(statusResponse).toHaveProperty("status"); + expect(statusResponse.status).toBe("completed"); + expect(statusResponse.data?.length).toBeGreaterThan(0); + expect(statusResponse.data?.[0]).toHaveProperty("markdown"); + expect(statusResponse.data?.[0].markdown?.length).toBeGreaterThan(10); + expect(statusResponse.data?.[0]).not.toHaveProperty('content'); // v0 + expect(statusResponse.data?.[0]).toHaveProperty("html"); + expect(statusResponse.data?.[0].html).toContain(" { + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + await expect(invalidApp.mapUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); + }); + + test.concurrent('should throw error for blocklisted URL on map', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const blocklistedUrl = "https://facebook.com/fake-test"; + await expect(app.mapUrl(blocklistedUrl)).rejects.toThrow("Request failed with status code 403"); + }); + + test.concurrent('should return successful response with valid preview token', async () => { + const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL }); + const response = await app.mapUrl('https://roastmywebsite.ai') as MapResponse; + expect(response).not.toBeNull(); + expect(response.links?.length).toBeGreaterThan(0); + }, 30000); // 30 seconds timeout + + test.concurrent('should return successful response for valid map', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.mapUrl('https://roastmywebsite.ai') as MapResponse; + expect(response).not.toBeNull(); + + expect(response.links?.length).toBeGreaterThan(0); + expect(response.links?.[0]).toContain("https://"); + const filteredLinks = response.links?.filter((link: string) => link.includes("roastmywebsite.ai")); + expect(filteredLinks?.length).toBeGreaterThan(0); + }, 30000); // 30 seconds timeout +}); diff --git a/apps/js-sdk/firecrawl/src/index.ts b/apps/js-sdk/firecrawl/src/index.ts index a42d4618..90c86a2a 100644 --- a/apps/js-sdk/firecrawl/src/index.ts +++ b/apps/js-sdk/firecrawl/src/index.ts @@ -1,16 +1,22 @@ import axios, { AxiosResponse, AxiosRequestHeaders } from "axios"; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; + /** * Configuration interface for FirecrawlApp. + * @param apiKey - Optional API key for authentication. + * @param apiUrl - Optional base URL of the API; defaults to 'https://api.firecrawl.dev'. + * @param version - API version, either 'v0' or 'v1'. */ export interface FirecrawlAppConfig { apiKey?: string | null; apiUrl?: string | null; + version?: "v0" | "v1"; } /** * Metadata for a Firecrawl document. + * Includes various optional properties for document metadata. */ export interface FirecrawlDocumentMetadata { title?: string; @@ -43,6 +49,17 @@ export interface FirecrawlDocumentMetadata { articleTag?: string; articleSection?: string; sourceURL?: string; + statusCode?: number; + error?: string; + [key: string]: any; // Allows for additional metadata properties not explicitly defined. +} + +/** + * Metadata for a Firecrawl document on v0. + * Similar to FirecrawlDocumentMetadata but includes properties specific to API version v0. + */ +export interface FirecrawlDocumentMetadataV0 { + // Similar properties as FirecrawlDocumentMetadata with additional v0 specific adjustments pageStatusCode?: number; pageError?: string; [key: string]: any; @@ -50,8 +67,23 @@ export interface FirecrawlDocumentMetadata { /** * Document interface for Firecrawl. + * Represents a document retrieved or processed by Firecrawl. */ export interface FirecrawlDocument { + url?: string; + markdown?: string; + html?: string; + rawHtml?: string; + links?: string[]; + screenshot?: string; + metadata: FirecrawlDocumentMetadata; +} + +/** + * Document interface for Firecrawl on v0. + * Represents a document specifically for API version v0 with additional properties. + */ +export interface FirecrawlDocumentV0 { id?: string; url?: string; content: string; @@ -61,79 +93,241 @@ export interface FirecrawlDocument { createdAt?: Date; updatedAt?: Date; type?: string; - metadata: FirecrawlDocumentMetadata; + metadata: FirecrawlDocumentMetadataV0; childrenLinks?: string[]; provider?: string; warning?: string; - index?: number; } +/** + * Parameters for scraping operations. + * Defines the options and configurations available for scraping web content. + */ +export interface ScrapeParams { + formats: ("markdown" | "html" | "rawHtml" | "content" | "links" | "screenshot")[]; + headers?: Record; + includeTags?: string[]; + excludeTags?: string[]; + onlyMainContent?: boolean; + screenshotMode?: "desktop" | "full-desktop" | "mobile" | "full-mobile"; + waitFor?: number; + timeout?: number; +} + +/** + * Parameters for scraping operations on v0. + * Includes page and extractor options specific to API version v0. + */ +export interface ScrapeParamsV0 { + pageOptions?: { + headers?: Record; + includeHtml?: boolean; + includeRawHtml?: boolean; + onlyIncludeTags?: string[]; + onlyMainContent?: boolean; + removeTags?: string[]; + replaceAllPathsWithAbsolutePaths?: boolean; + screenshot?: boolean; + fullPageScreenshot?: boolean; + waitFor?: number; + }; + extractorOptions?: { + mode?: "markdown" | "llm-extraction" | "llm-extraction-from-raw-html" | "llm-extraction-from-markdown"; + extractionPrompt?: string; + extractionSchema?: Record | z.ZodSchema | any; + }; + timeout?: number; +} + /** * Response interface for scraping operations. + * Defines the structure of the response received after a scraping operation. */ export interface ScrapeResponse { success: boolean; + warning?: string; data?: FirecrawlDocument; error?: string; } + /** - * Response interface for searching operations. + * Response interface for scraping operations on v0. + * Similar to ScrapeResponse but tailored for responses from API version v0. */ -export interface SearchResponse { +export interface ScrapeResponseV0 { success: boolean; - data?: FirecrawlDocument[]; + data?: FirecrawlDocumentV0; error?: string; } + +/** + * Parameters for crawling operations. + * Includes options for both scraping and mapping during a crawl. + */ +export interface CrawlParams { + scrapeOptions?: ScrapeParams; + crawlerOptions?: { + includePaths?: string[] + excludePaths?: string[] + maxDepth?: number + limit?: number + allowBackwardLinks?: boolean + allowExternalLinks?: boolean + ignoreSitemap?: boolean + }; +} + +/** + * Parameters for crawling operations on v0. + * Tailored for API version v0, includes specific options for crawling. + */ +export interface CrawlParamsV0 { + crawlerOptions?: { + includes?: string[]; + excludes?: string[]; + generateImgAltText?: boolean; + returnOnlyUrls?: boolean; + maxDepth?: number; + mode?: "default" | "fast"; + ignoreSitemap?: boolean; + limit?: number; + allowBackwardCrawling?: boolean; + allowExternalContentLinks?: boolean; + }; + pageOptions?: { + headers?: Record; + includeHtml?: boolean; + includeRawHtml?: boolean; + onlyIncludeTags?: string[]; + onlyMainContent?: boolean; + removeTags?: string[]; + replaceAllPathsWithAbsolutePaths?: boolean; + screenshot?: boolean; + fullPageScreenshot?: boolean; + waitFor?: number; + }; +} + /** * Response interface for crawling operations. + * Defines the structure of the response received after initiating a crawl. */ export interface CrawlResponse { + id?: string; + url?: string; success: boolean; + error?: string; +} + +/** + * Response interface for crawling operations on v0. + * Similar to CrawlResponse but tailored for responses from API version v0. + */ +export interface CrawlResponseV0 { jobId?: string; + success: boolean; + error?: string; +} + +/** + * Response interface for job status checks. + * Provides detailed status of a crawl job including progress and results. + */ +export interface CrawlStatusResponse { + success: boolean; + totalCount: number; + creditsUsed: number; + expiresAt: Date; + status: "scraping" | "completed" | "failed"; + next: string; data?: FirecrawlDocument[]; error?: string; } + /** - * Response interface for job status checks. + * Response interface for job status checks on v0. + * Tailored for API version v0, provides status and partial data of a crawl job. */ -export interface JobStatusResponse { +export interface CrawlStatusResponseV0 { success: boolean; status: string; current?: number; current_url?: string; current_step?: string; total?: number; - jobId?: string; - data?: FirecrawlDocument[]; - partial_data?: FirecrawlDocument[]; + data?: FirecrawlDocumentV0[]; + partial_data?: FirecrawlDocumentV0[]; error?: string; } + /** - * Generic parameter interface. + * Parameters for mapping operations. + * Defines options for mapping URLs during a crawl. */ -export interface Params { - [key: string]: any; - extractorOptions?: { - extractionSchema: z.ZodSchema | any; - mode?: "llm-extraction"; - extractionPrompt?: string; +export interface MapParams { + includePaths?: string[] + excludePaths?: string[] + maxDepth?: number + limit?: number + allowBackwardLinks?: boolean + allowExternalLinks?: boolean + ignoreSitemap?: boolean +} + +/** + * Response interface for mapping operations. + * Defines the structure of the response received after a mapping operation. + */ +export interface MapResponse { + success: boolean; + links?: string[]; + error?: string; +} + +/** + * Parameters for searching operations on v0. + * Tailored for API version v0, includes specific options for searching content. + */ +export interface SearchParamsV0 { + pageOptions?: { + onlyMainContent?: boolean; + fetchPageContent?: boolean; + includeHtml?: boolean; + includeRawHtml?: boolean; + }; + searchOptions?: { + limit?: number; }; } + +/** + * Response interface for searching operations on v0. + * Defines the structure of the response received after a search operation on v0. + */ +export interface SearchResponseV0 { + success: boolean; + data?: FirecrawlDocumentV0[]; + error?: string; +} + /** * Main class for interacting with the Firecrawl API. + * Provides methods for scraping, searching, crawling, and mapping web content. */ export default class FirecrawlApp { private apiKey: string; private apiUrl: string; + private version: "v0" | "v1"; /** * Initializes a new instance of the FirecrawlApp class. - * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. + * @param config - Configuration options for the FirecrawlApp instance. */ - constructor({ apiKey = null, apiUrl = null }: FirecrawlAppConfig) { + constructor({ apiKey = null, apiUrl = null, version = "v1" }: FirecrawlAppConfig) { this.apiKey = apiKey || ""; this.apiUrl = apiUrl || "https://api.firecrawl.dev"; + this.version = version; if (!this.apiKey) { throw new Error("No API key provided"); } @@ -141,21 +335,21 @@ export default class FirecrawlApp { /** * Scrapes a URL using the Firecrawl API. - * @param {string} url - The URL to scrape. - * @param {Params | null} params - Additional parameters for the scrape request. - * @returns {Promise} The response from the scrape operation. + * @param url - The URL to scrape. + * @param params - Additional parameters for the scrape request. + * @returns The response from the scrape operation. */ async scrapeUrl( url: string, - params: Params | null = null - ): Promise { + params?: ScrapeParams | ScrapeParamsV0 + ): Promise { const headers: AxiosRequestHeaders = { "Content-Type": "application/json", Authorization: `Bearer ${this.apiKey}`, } as AxiosRequestHeaders; - let jsonData: Params = { url, ...params }; - if (params?.extractorOptions?.extractionSchema) { - let schema = params.extractorOptions.extractionSchema; + let jsonData: any = { url, ...params }; + if (jsonData?.extractorOptions?.extractionSchema) { + let schema = jsonData.extractorOptions.extractionSchema; // Check if schema is an instance of ZodSchema to correctly identify Zod schemas if (schema instanceof z.ZodSchema) { schema = zodToJsonSchema(schema); @@ -163,22 +357,26 @@ export default class FirecrawlApp { jsonData = { ...jsonData, extractorOptions: { - ...params.extractorOptions, + ...jsonData.extractorOptions, extractionSchema: schema, - mode: params.extractorOptions.mode || "llm-extraction", + mode: jsonData.extractorOptions.mode || "llm-extraction", }, }; } try { const response: AxiosResponse = await axios.post( - this.apiUrl + "/v0/scrape", + this.apiUrl + `/${this.version}/scrape`, jsonData, { headers } ); if (response.status === 200) { const responseData = response.data; if (responseData.success) { - return responseData; + if (this.version == 'v0') { + return responseData as ScrapeResponseV0; + } else { + return responseData as ScrapeResponse; + } } else { throw new Error(`Failed to scrape URL. Error: ${responseData.error}`); } @@ -193,19 +391,23 @@ export default class FirecrawlApp { /** * Searches for a query using the Firecrawl API. - * @param {string} query - The query to search for. - * @param {Params | null} params - Additional parameters for the search request. - * @returns {Promise} The response from the search operation. + * @param query - The query to search for. + * @param params - Additional parameters for the search request. + * @returns The response from the search operation. */ async search( query: string, - params: Params | null = null - ): Promise { + params?: SearchParamsV0 + ): Promise { + if (this.version === "v1") { + throw new Error("Search is not supported in v1"); + } + const headers: AxiosRequestHeaders = { "Content-Type": "application/json", Authorization: `Bearer ${this.apiKey}`, } as AxiosRequestHeaders; - let jsonData: Params = { query }; + let jsonData: any = { query }; if (params) { jsonData = { ...jsonData, ...params }; } @@ -233,93 +435,157 @@ export default class FirecrawlApp { /** * Initiates a crawl job for a URL using the Firecrawl API. - * @param {string} url - The URL to crawl. - * @param {Params | null} params - Additional parameters for the crawl request. - * @param {boolean} waitUntilDone - Whether to wait for the crawl job to complete. - * @param {number} pollInterval - Time in seconds for job status checks. - * @param {string} idempotencyKey - Optional idempotency key for the request. - * @returns {Promise} The response from the crawl operation. + * @param url - The URL to crawl. + * @param params - Additional parameters for the crawl request. + * @param waitUntilDone - Whether to wait for the crawl job to complete. + * @param pollInterval - Time in seconds for job status checks. + * @param idempotencyKey - Optional idempotency key for the request. + * @returns The response from the crawl operation. */ async crawlUrl( url: string, - params: Params | null = null, + params?: CrawlParams | CrawlParamsV0, waitUntilDone: boolean = true, pollInterval: number = 2, idempotencyKey?: string - ): Promise { + ): Promise { const headers = this.prepareHeaders(idempotencyKey); - let jsonData: Params = { url }; - if (params) { - jsonData = { ...jsonData, ...params }; - } + let jsonData: any = { url, ...params }; try { const response: AxiosResponse = await this.postRequest( - this.apiUrl + "/v0/crawl", + this.apiUrl + `/${this.version}/crawl`, jsonData, headers ); if (response.status === 200) { - const jobId: string = response.data.jobId; + const id: string = this.version == 'v0' ? response.data.jobId : response.data.id; + let checkUrl: string | undefined = undefined; if (waitUntilDone) { - return this.monitorJobStatus(jobId, headers, pollInterval); + if (this.version == 'v1') { checkUrl = response.data.url } + return this.monitorJobStatus(id, headers, pollInterval, checkUrl); } else { - return { success: true, jobId }; + if (this.version == 'v0') { + return { + success: true, + jobId: id + } as CrawlResponseV0; + } else { + return { + success: true, + id: id + } as CrawlResponse; + } } } else { this.handleError(response, "start crawl job"); } } catch (error: any) { - console.log(error); - throw new Error(error.message); + if (error.response.data.error) { + throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`); + } else { + throw new Error(error.message); + } } return { success: false, error: "Internal server error." }; } /** * Checks the status of a crawl job using the Firecrawl API. - * @param {string} jobId - The job ID of the crawl operation. - * @returns {Promise} The response containing the job status. + * @param id - The ID of the crawl operation. + * @returns The response containing the job status. */ - async checkCrawlStatus(jobId: string): Promise { + async checkCrawlStatus(id?: string): Promise { + if (!id) { + throw new Error("No crawl ID provided"); + } + const headers: AxiosRequestHeaders = this.prepareHeaders(); try { const response: AxiosResponse = await this.getRequest( - this.apiUrl + `/v0/crawl/status/${jobId}`, + this.version == 'v1' ? + this.apiUrl + `/${this.version}/crawl/${id}` : + this.apiUrl + `/${this.version}/crawl/status/${id}`, headers ); if (response.status === 200) { - return { - success: true, - status: response.data.status, - current: response.data.current, - current_url: response.data.current_url, - current_step: response.data.current_step, - total: response.data.total, - data: response.data.data, - partial_data: !response.data.data - ? response.data.partial_data - : undefined, - }; + if (this.version == 'v0') { + return { + success: true, + status: response.data.status, + current: response.data.current, + current_url: response.data.current_url, + current_step: response.data.current_step, + total: response.data.total, + data: response.data.data, + partial_data: !response.data.data + ? response.data.partial_data + : undefined, + } as CrawlStatusResponseV0; + } else if (this.version == 'v1') { + return { + success: true, + status: response.data.status, + totalCount: response.data.totalCount, + creditsUsed: response.data.creditsUsed, + expiresAt: new Date(response.data.expiresAt), + next: response.data.next, + data: response.data.data, + error: response.data.error + } as CrawlStatusResponse; + } } else { this.handleError(response, "check crawl status"); } } catch (error: any) { throw new Error(error.message); } - return { - success: false, - status: "unknown", - current: 0, - current_url: "", - current_step: "", - total: 0, - error: "Internal server error.", - }; + + if (this.version == 'v0') { + return { + success: false, + status: "unknown", + current: 0, + current_url: "", + current_step: "", + total: 0, + error: "Internal server error.", + } as CrawlStatusResponseV0; + } else { + return { + success: false, + error: "Internal server error.", + } as CrawlStatusResponse; + } + } + + async mapUrl(url: string, params?: MapParams): Promise { + if (this.version == 'v0') { + throw new Error("Map is not supported in v0"); + } + const headers = this.prepareHeaders(); + let jsonData: { url: string } & MapParams = { url, ...params }; + + try { + const response: AxiosResponse = await this.postRequest( + this.apiUrl + `/${this.version}/map`, + jsonData, + headers + ); + if (response.status === 200) { + return response.data as MapResponse; + } else { + this.handleError(response, "map"); + } + } catch (error: any) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." } as MapResponse; } /** * Prepares the headers for an API request. - * @returns {AxiosRequestHeaders} The prepared headers. + * @param idempotencyKey - Optional key to ensure idempotency. + * @returns The prepared headers. */ prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders { return { @@ -331,14 +597,14 @@ export default class FirecrawlApp { /** * Sends a POST request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {Params} data - The data to send in the request. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the POST request. + * @param url - The URL to send the request to. + * @param data - The data to send in the request. + * @param headers - The headers for the request. + * @returns The response from the POST request. */ postRequest( url: string, - data: Params, + data: any, headers: AxiosRequestHeaders ): Promise { return axios.post(url, data, { headers }); @@ -346,9 +612,9 @@ export default class FirecrawlApp { /** * Sends a GET request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the GET request. + * @param url - The URL to send the request to. + * @param headers - The headers for the request. + * @returns The response from the GET request. */ getRequest( url: string, @@ -359,31 +625,38 @@ export default class FirecrawlApp { /** * Monitors the status of a crawl job until completion or failure. - * @param {string} jobId - The job ID of the crawl operation. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @param {number} timeout - Timeout in seconds for job status checks. - * @returns {Promise} The final job status or data. + * @param id - The ID of the crawl operation. + * @param headers - The headers for the request. + * @param checkInterval - Interval in seconds for job status checks. + * @returns The final job status or data. */ async monitorJobStatus( - jobId: string, + id: string, headers: AxiosRequestHeaders, - checkInterval: number - ): Promise { + checkInterval: number, + checkUrl?: string + ): Promise { + let apiUrl: string = ''; while (true) { + if (this.version == 'v1') { + apiUrl = checkUrl ?? this.apiUrl + `/v1/crawl/${id}`; + } else if (this.version == 'v0') { + apiUrl = checkUrl ?? this.apiUrl + `/v0/crawl/status/${id}`; + } const statusResponse: AxiosResponse = await this.getRequest( - this.apiUrl + `/v0/crawl/status/${jobId}`, + apiUrl, headers ); if (statusResponse.status === 200) { const statusData = statusResponse.data; if (statusData.status === "completed") { if ("data" in statusData) { - return statusData.data; + return this.version == 'v0' ? statusData.data : statusData; } else { throw new Error("Crawl job completed but no data was returned"); } } else if ( - ["active", "paused", "pending", "queued"].includes(statusData.status) + ["active", "paused", "pending", "queued", "scraping"].includes(statusData.status) ) { if (checkInterval < 2) { checkInterval = 2; diff --git a/apps/js-sdk/firecrawl/tsconfig.json b/apps/js-sdk/firecrawl/tsconfig.json index d7764a46..56f13ced 100644 --- a/apps/js-sdk/firecrawl/tsconfig.json +++ b/apps/js-sdk/firecrawl/tsconfig.json @@ -11,7 +11,7 @@ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ /* Language and Environment */ - "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + "target": "es2020", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ // "jsx": "preserve", /* Specify what JSX code is generated. */ // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ @@ -25,9 +25,9 @@ // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ /* Modules */ - "module": "NodeNext", /* Specify what module code is generated. */ + "module": "commonjs", /* Specify what module code is generated. */ "rootDir": "./src", /* Specify the root folder within your source files. */ - "moduleResolution": "nodenext", /* Specify how TypeScript looks up a file from a given module specifier. */ + "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ diff --git a/apps/js-sdk/firecrawl/types/index.d.ts b/apps/js-sdk/firecrawl/types/index.d.ts index 91a58043..bd6cfc20 100644 --- a/apps/js-sdk/firecrawl/types/index.d.ts +++ b/apps/js-sdk/firecrawl/types/index.d.ts @@ -73,16 +73,16 @@ export interface ScrapeResponse { error?: string; } /** -* Response interface for searching operations. -*/ + * Response interface for searching operations. + */ export interface SearchResponse { success: boolean; data?: FirecrawlDocument[]; error?: string; } /** -* Response interface for crawling operations. -*/ + * Response interface for crawling operations. + */ export interface CrawlResponse { success: boolean; jobId?: string; @@ -90,24 +90,28 @@ export interface CrawlResponse { error?: string; } /** -* Response interface for job status checks. -*/ + * Response interface for job status checks. + */ export interface JobStatusResponse { success: boolean; status: string; + current?: number; + current_url?: string; + current_step?: string; + total?: number; jobId?: string; data?: FirecrawlDocument[]; partial_data?: FirecrawlDocument[]; error?: string; } /** - * Generic parameter interface. - */ + * Generic parameter interface. + */ export interface Params { [key: string]: any; extractorOptions?: { extractionSchema: z.ZodSchema | any; - mode?: "llm-extraction" | "llm-extraction-from-raw-html"; + mode?: "llm-extraction"; extractionPrompt?: string; }; } diff --git a/apps/js-sdk/package-lock.json b/apps/js-sdk/package-lock.json index 2bf3f001..ca337062 100644 --- a/apps/js-sdk/package-lock.json +++ b/apps/js-sdk/package-lock.json @@ -13,6 +13,7 @@ "axios": "^1.6.8", "ts-node": "^10.9.2", "typescript": "^5.4.5", + "uuid": "^10.0.0", "zod": "^3.23.8" }, "devDependencies": { @@ -450,6 +451,15 @@ "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==" }, + "node_modules/@types/node": { + "version": "20.14.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.11.tgz", + "integrity": "sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==", + "peer": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, "node_modules/acorn": { "version": "8.11.3", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", @@ -728,6 +738,24 @@ "node": ">=14.17" } }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "peer": true + }, + "node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/v8-compile-cache-lib": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", @@ -750,9 +778,9 @@ } }, "node_modules/zod-to-json-schema": { - "version": "3.23.0", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.23.0.tgz", - "integrity": "sha512-az0uJ243PxsRIa2x1WmNE/pnuA05gUq/JB8Lwe1EDCCL/Fz9MgjYQ0fPlyc2Tcv6aF2ZA7WM5TWaRZVEFaAIag==", + "version": "3.23.1", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.23.1.tgz", + "integrity": "sha512-oT9INvydob1XV0v1d2IadrR74rLtDInLvDFfAa1CG0Pmg/vxATk7I2gSelfj271mbzeM4Da0uuDQE/Nkj3DWNw==", "peerDependencies": { "zod": "^3.23.3" } diff --git a/apps/js-sdk/package.json b/apps/js-sdk/package.json index 0e93fe3c..2d2c36e8 100644 --- a/apps/js-sdk/package.json +++ b/apps/js-sdk/package.json @@ -15,6 +15,7 @@ "axios": "^1.6.8", "ts-node": "^10.9.2", "typescript": "^5.4.5", + "uuid": "^10.0.0", "zod": "^3.23.8" }, "devDependencies": { diff --git a/apps/test-suite/package.json b/apps/test-suite/package.json index 1728ab37..91c95801 100644 --- a/apps/test-suite/package.json +++ b/apps/test-suite/package.json @@ -24,6 +24,7 @@ "devDependencies": { "@types/jest": "^29.5.12", "@types/supertest": "^6.0.2", + "artillery": "^2.0.19", "typescript": "^5.4.5" } } diff --git a/apps/test-suite/pnpm-lock.yaml b/apps/test-suite/pnpm-lock.yaml index 58f193c9..bf714b55 100644 --- a/apps/test-suite/pnpm-lock.yaml +++ b/apps/test-suite/pnpm-lock.yaml @@ -10,7 +10,7 @@ importers: dependencies: '@anthropic-ai/sdk': specifier: ^0.24.3 - version: 0.24.3 + version: 0.24.3(encoding@0.1.13) '@dqbd/tiktoken': specifier: ^1.0.14 version: 1.0.15 @@ -22,10 +22,10 @@ importers: version: 16.4.5 jest: specifier: ^29.7.0 - version: 29.7.0(@types/node@20.14.9) + version: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) openai: specifier: ^4.40.2 - version: 4.52.2 + version: 4.52.2(encoding@0.1.13) playwright: specifier: ^1.43.1 version: 1.45.0 @@ -34,7 +34,7 @@ importers: version: 7.0.0 ts-jest: specifier: ^29.1.2 - version: 29.1.5(@babel/core@7.24.5)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.5))(jest@29.7.0(@types/node@20.14.9))(typescript@5.4.5) + version: 29.1.5(@babel/core@7.24.5)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.5))(jest@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)))(typescript@5.4.5) devDependencies: '@types/jest': specifier: ^29.5.12 @@ -42,12 +42,19 @@ importers: '@types/supertest': specifier: ^6.0.2 version: 6.0.2 + artillery: + specifier: ^2.0.19 + version: 2.0.19(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5) typescript: specifier: ^5.4.5 version: 5.4.5 packages: + '@alcalzone/ansi-tokenize@0.1.3': + resolution: {integrity: sha512-3yWxPTq3UQ/FY9p1ErPxIyfT64elWaMvM9lIHnaqpyft63tkxodF5aUElYHrdisWve5cETkh1+KBw1yJuW0aRw==} + engines: {node: '>=14.13.1'} + '@ampproject/remapping@2.3.0': resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} @@ -55,6 +62,220 @@ packages: '@anthropic-ai/sdk@0.24.3': resolution: {integrity: sha512-916wJXO6T6k8R6BAAcLhLPv/pnLGy7YSEBZXZ1XTFbLcTZE8oTy3oDW9WJf9KKZwMvVcePIfoTSvzXHRcGxkQQ==} + '@artilleryio/int-commons@2.10.0': + resolution: {integrity: sha512-CukRix3yxcsbjPTPhIyXN7qZ6f/3W+LQtF96RxuZ7L3P0F7y7t4NswPSll2+zDkAMvvBgFojgPL+bFf2EDIiOA==} + + '@artilleryio/int-core@2.14.0': + resolution: {integrity: sha512-u0q5p5aWE7DRNRnfmj8JzXqNFitoBKQ4/N/Uur4PXcZCPzB7yQWppRPRJqFUh14zmC/UybDMui1EnbDhBqGGIg==} + + '@artilleryio/sketches-js@2.1.1': + resolution: {integrity: sha512-H3D50vDb37E3NGYXY0eUFAm5++moElaqoAu0MWYZhgzaA3IT2E67bRCL8U4LKHuVf/MgDZk14uawIjc4WVjOUQ==} + + '@aws-crypto/sha256-browser@5.2.0': + resolution: {integrity: sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==} + + '@aws-crypto/sha256-js@5.2.0': + resolution: {integrity: sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==} + engines: {node: '>=16.0.0'} + + '@aws-crypto/supports-web-crypto@5.2.0': + resolution: {integrity: sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==} + + '@aws-crypto/util@5.2.0': + resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} + + '@aws-sdk/client-cloudwatch@3.629.0': + resolution: {integrity: sha512-dMEyyA9EQCLCsZMAMyM2wL/gX99AVxRjhlgap53XkkGi9GgiCer4wLMK+2Nhpu+ncCcQEzEMlVV35YC843T8BQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-cognito-identity@3.629.0': + resolution: {integrity: sha512-bWwp3f8XLCQgXmSWqDMjIb4WHWaEgMEX5D/WLAq7FHMiikdeJgpZIhWcDV05nEW/f9wGS6cxT3ZyXc/PRXMWrQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sso-oidc@3.629.0': + resolution: {integrity: sha512-3if0LauNJPqubGYf8vnlkp+B3yAeKRuRNxfNbHlE6l510xWGcKK/ZsEmiFmfePzKKSRrDh/cxMFMScgOrXptNg==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.629.0 + + '@aws-sdk/client-sso@3.629.0': + resolution: {integrity: sha512-2w8xU4O0Grca5HmT2dXZ5fF0g39RxODtmoqHJDsK5DSt750LqDG4w3ktmBvQs3+SrpkkJOjlX5v/hb2PCxVbww==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sts@3.629.0': + resolution: {integrity: sha512-RjOs371YwnSVGxhPjuluJKaxl4gcPYTAky0nPjwBime0i9/iS9nI8R8l5j7k7ec9tpFWjBPvNnThCU07pvjdzw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/core@3.629.0': + resolution: {integrity: sha512-+/ShPU/tyIBM3oY1cnjgNA/tFyHtlWq+wXF9xEKRv19NOpYbWQ+xzNwVjGq8vR07cCRqy/sDQLWPhxjtuV/FiQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-cognito-identity@3.629.0': + resolution: {integrity: sha512-Cdq7Q0OGdze0/GWClJMcyQRfQmZFryxDIHVFP1FVoQPeKucHU5ZFVPEs+U4UOfs7XquG/+NYCX/jTPdI4ATfgQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-env@3.620.1': + resolution: {integrity: sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-http@3.622.0': + resolution: {integrity: sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-ini@3.629.0': + resolution: {integrity: sha512-r9fI7BABARvVDp77DBUImQzYdvarAIdhbvpCEZib0rlpvfWu3zxE9KZcapCAAi0MPjxeDfb7RMehFQIkAP7mYw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.629.0 + + '@aws-sdk/credential-provider-node@3.629.0': + resolution: {integrity: sha512-868hnVOLlXOBHk91Rl0jZIRgr/M4WJCa0nOrW9A9yidsQxuZp9P0vshDmm4hMvNZadmPIfo0Rra2MpA4RELoCw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-process@3.620.1': + resolution: {integrity: sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-sso@3.629.0': + resolution: {integrity: sha512-Lf4XOuj6jamxgGZGrVojERh5S+NS2t2S4CUOnAu6tJ5U0GPlpjhINUKlcVxJBpsIXudMGW1nkumAd3+kazCPig==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-web-identity@3.621.0': + resolution: {integrity: sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.621.0 + + '@aws-sdk/credential-providers@3.630.0': + resolution: {integrity: sha512-fuF/lAZ5pj0DooT8wA1lcrR8vtzBNdTHA5dAmmV930eDoT5XZ+/NeQnclWds40xGj2KDi5YEhuMwOqEZqEQ0Aw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-host-header@3.620.0': + resolution: {integrity: sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-logger@3.609.0': + resolution: {integrity: sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-recursion-detection@3.620.0': + resolution: {integrity: sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-user-agent@3.620.0': + resolution: {integrity: sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/region-config-resolver@3.614.0': + resolution: {integrity: sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/token-providers@3.614.0': + resolution: {integrity: sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sso-oidc': ^3.614.0 + + '@aws-sdk/types@3.609.0': + resolution: {integrity: sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-endpoints@3.614.0': + resolution: {integrity: sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-locate-window@3.568.0': + resolution: {integrity: sha512-3nh4TINkXYr+H41QaPelCceEB2FXP3fxp93YZXB/kqJvX0U9j0N0Uk45gvsjmEPzG8XxkPEeLIfT2I1M7A6Lig==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-user-agent-browser@3.609.0': + resolution: {integrity: sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==} + + '@aws-sdk/util-user-agent-node@3.614.0': + resolution: {integrity: sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==} + engines: {node: '>=16.0.0'} + peerDependencies: + aws-crt: '>=1.0.0' + peerDependenciesMeta: + aws-crt: + optional: true + + '@azure/abort-controller@1.1.0': + resolution: {integrity: sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==} + engines: {node: '>=12.0.0'} + + '@azure/abort-controller@2.1.2': + resolution: {integrity: sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==} + engines: {node: '>=18.0.0'} + + '@azure/arm-containerinstance@9.1.0': + resolution: {integrity: sha512-N9T3/HJwWXvJuz7tin+nO+DYYCTGHILJ5Die3TtdF8Wd1ITfXGqB0vY/wOnspUu/AGojhaIKGmawAfPdw2kX8w==} + engines: {node: '>=14.0.0'} + + '@azure/core-auth@1.7.2': + resolution: {integrity: sha512-Igm/S3fDYmnMq1uKS38Ae1/m37B3zigdlZw+kocwEhh5GjyKjPrXKO2J6rzpC1wAxrNil/jX9BJRqBshyjnF3g==} + engines: {node: '>=18.0.0'} + + '@azure/core-client@1.9.2': + resolution: {integrity: sha512-kRdry/rav3fUKHl/aDLd/pDLcB+4pOFwPPTVEExuMyaI5r+JBbMWqRbCY1pn5BniDaU3lRxO9eaQ1AmSMehl/w==} + engines: {node: '>=18.0.0'} + + '@azure/core-http-compat@2.1.2': + resolution: {integrity: sha512-5MnV1yqzZwgNLLjlizsU3QqOeQChkIXw781Fwh1xdAqJR5AA32IUaq6xv1BICJvfbHoa+JYcaij2HFkhLbNTJQ==} + engines: {node: '>=18.0.0'} + + '@azure/core-lro@2.7.2': + resolution: {integrity: sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==} + engines: {node: '>=18.0.0'} + + '@azure/core-paging@1.6.2': + resolution: {integrity: sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==} + engines: {node: '>=18.0.0'} + + '@azure/core-rest-pipeline@1.16.3': + resolution: {integrity: sha512-VxLk4AHLyqcHsfKe4MZ6IQ+D+ShuByy+RfStKfSjxJoL3WBWq17VNmrz8aT8etKzqc2nAeIyLxScjpzsS4fz8w==} + engines: {node: '>=18.0.0'} + + '@azure/core-tracing@1.1.2': + resolution: {integrity: sha512-dawW9ifvWAWmUm9/h+/UQ2jrdvjCJ7VJEuCJ6XVNudzcOwm53BFZH4Q845vjfgoUAM8ZxokvVNxNxAITc502YA==} + engines: {node: '>=18.0.0'} + + '@azure/core-util@1.9.2': + resolution: {integrity: sha512-l1Qrqhi4x1aekkV+OlcqsJa4AnAkj5p0JV8omgwjaV9OAbP41lvrMvs+CptfetKkeEaGRGSzby7sjPZEX7+kkQ==} + engines: {node: '>=18.0.0'} + + '@azure/core-xml@1.4.3': + resolution: {integrity: sha512-D6G7FEmDiTctPKuWegX2WTrS1enKZwqYwdKTO6ZN6JMigcCehlT0/CYl+zWpI9vQ9frwwp7GQT3/owaEXgnOsA==} + engines: {node: '>=18.0.0'} + + '@azure/identity@4.4.1': + resolution: {integrity: sha512-DwnG4cKFEM7S3T+9u05NstXU/HN0dk45kPOinUyNKsn5VWwpXd9sbPKEg6kgJzGbm1lMuhx9o31PVbCtM5sfBA==} + engines: {node: '>=18.0.0'} + + '@azure/logger@1.1.4': + resolution: {integrity: sha512-4IXXzcCdLdlXuCG+8UKEwLA1T1NHqUfanhXYHiQTn+6sfWCZXduqbtXDGceg3Ce5QxTGo7EqmbV6Bi+aqKuClQ==} + engines: {node: '>=18.0.0'} + + '@azure/msal-browser@3.21.0': + resolution: {integrity: sha512-BAwcFsVvOrYzKuUZHhFuvRykUmQGq6lDxst2qGnjxnpNZc3d/tnVPcmhgvUdeKl28VSE0ltgBzT3HkdpDtz9rg==} + engines: {node: '>=0.8.0'} + + '@azure/msal-common@14.14.1': + resolution: {integrity: sha512-2Q3tqNz/PZLfSr8BvcHZVpRRfSn4MjGSqjj9J+HlBsmbf1Uu4P0WeXnemjTJwwx9KrmplsrN3UkZ/LPOR720rw==} + engines: {node: '>=0.8.0'} + + '@azure/msal-node@2.13.0': + resolution: {integrity: sha512-DhP97ycs7qlCVzzzWGzJiwAFyFj5okno74E4FUZ61oCLfKh4IxA1kxirqzrWuYZWpBe9HVPL6GA4NvmlEOBN5Q==} + engines: {node: '>=16'} + + '@azure/storage-blob@12.24.0': + resolution: {integrity: sha512-l8cmWM4C7RoNCBOImoFMxhTXe1Lr+8uQ/IgnhRNMpfoA9bAFWoLG4XrWm6O5rKXortreVQuD+fc1hbzWklOZbw==} + engines: {node: '>=18.0.0'} + + '@azure/storage-queue@12.23.0': + resolution: {integrity: sha512-koVDpx/lXl3bx6GiyitIsLZ4rtywpTlfwKXiuTDif+dY6PhgSyN9mrq9AsHXaHQnx2CCpmoIzRSV5n4GoQGcmg==} + engines: {node: '>=18.0.0'} + '@babel/code-frame@7.24.2': resolution: {integrity: sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==} engines: {node: '>=6.9.0'} @@ -219,12 +440,60 @@ packages: resolution: {integrity: sha512-6mQNsaLeXTw0nxYUYu+NSa4Hx4BlF1x1x8/PMFbiR+GBSr+2DkECc69b8hgy2frEodNcvPffeH8YfWd3LI6jhQ==} engines: {node: '>=6.9.0'} + '@base2/pretty-print-object@1.0.1': + resolution: {integrity: sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA==} + '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + '@colors/colors@1.5.0': + resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} + engines: {node: '>=0.1.90'} + + '@cspotcode/source-map-support@0.8.1': + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + + '@dependents/detective-less@4.1.0': + resolution: {integrity: sha512-KrkT6qO5NxqNfy68sBl6CTSoJ4SNDIS5iQArkibhlbGU4LaDukZ3q2HIkh8aUKDio6o4itU4xDR7t82Y2eP1Bg==} + engines: {node: '>=14'} + '@dqbd/tiktoken@1.0.15': resolution: {integrity: sha512-a6I67K1xUkuqcuwulobIJiLikkoE7egMaviI1Jg5bxSn2V7QGqXsGE3jTKr8UIOU/o74mAAd5TkeXFNBtaKF4A==} + '@grpc/grpc-js@1.11.1': + resolution: {integrity: sha512-gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.7.13': + resolution: {integrity: sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==} + engines: {node: '>=6'} + hasBin: true + + '@hapi/hoek@9.3.0': + resolution: {integrity: sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==} + + '@hapi/topo@5.1.0': + resolution: {integrity: sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==} + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@isaacs/ts-node-temp-fork-for-pr-2009@10.9.7': + resolution: {integrity: sha512-9f0bhUr9TnwwpgUhEpr3FjxSaH/OHaARkE2F9fM0lS4nIs2GNerrvGwQz493dk0JKlTaGYVrKbq36vA/whZ34g==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=4.2' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + '@istanbuljs/load-nyc-config@1.1.0': resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} engines: {node: '>=8'} @@ -317,15 +586,540 @@ packages: '@jridgewell/trace-mapping@0.3.25': resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} + '@jridgewell/trace-mapping@0.3.9': + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + + '@ngneat/falso@7.2.0': + resolution: {integrity: sha512-283EXBFd05kCbGuGSXgmvhCsQYEYzvD/eJaE7lxd05qRB0tgREvZX7TRlJ1KSp8nHxoK6Ws029G1Y30mt4IVAA==} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@npmcli/agent@2.2.2': + resolution: {integrity: sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/fs@3.1.1': + resolution: {integrity: sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/git@5.0.8': + resolution: {integrity: sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/installed-package-contents@2.1.0': + resolution: {integrity: sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + hasBin: true + + '@npmcli/node-gyp@3.0.0': + resolution: {integrity: sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/package-json@5.2.0': + resolution: {integrity: sha512-qe/kiqqkW0AGtvBjL8TJKZk/eBBSpnJkUWvHdQ9jM2lKHXRYYJuyNpJPlJw3c8QjC2ow6NZYiLExhUaeJelbxQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/promise-spawn@7.0.2': + resolution: {integrity: sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/redact@1.1.0': + resolution: {integrity: sha512-PfnWuOkQgu7gCbnSsAisaX7hKOdZ4wSAhAzH3/ph5dSGau52kCRrMMGbiSQLwyTZpgldkZ49b0brkOr1AzGBHQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/run-script@7.0.4': + resolution: {integrity: sha512-9ApYM/3+rBt9V80aYg6tZfzj3UWdiYyCt7gJUD1VJKvWF5nwKDSICXbYIQbspFTq6TOpbsEtIC0LArB8d9PFmg==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@oclif/core@2.16.0': + resolution: {integrity: sha512-dL6atBH0zCZl1A1IXCKJgLPrM/wR7K+Wi401E/IvqsK8m2iCHW+0TEOGrans/cuN3oTW+uxIyJFHJ8Im0k4qBw==} + engines: {node: '>=14.0.0'} + + '@oclif/plugin-help@5.2.20': + resolution: {integrity: sha512-u+GXX/KAGL9S10LxAwNUaWdzbEBARJ92ogmM7g3gDVud2HioCmvWQCDohNRVZ9GYV9oKwZ/M8xwd6a1d95rEKQ==} + engines: {node: '>=12.0.0'} + + '@oclif/plugin-not-found@2.4.3': + resolution: {integrity: sha512-nIyaR4y692frwh7wIHZ3fb+2L6XEecQwRDIb4zbEam0TvaVmBQWZoColQyWA84ljFBPZ8XWiQyTz+ixSwdRkqg==} + engines: {node: '>=12.0.0'} + + '@opentelemetry/api-logs@0.41.2': + resolution: {integrity: sha512-JEV2RAqijAFdWeT6HddYymfnkiRu2ASxoTBr4WsnGJhOjWZkEy6vp+Sx9ozr1NaIODOa2HUyckExIqQjn6qywQ==} + engines: {node: '>=14'} + + '@opentelemetry/api-logs@0.43.0': + resolution: {integrity: sha512-0CXMOYPXgAdLM2OzVkiUfAL6QQwWVhnMfUXCqLsITY42FZ9TxAhZIHkoc4mfVxvPuXsBnRYGR8UQZX86p87z4A==} + engines: {node: '>=14'} + + '@opentelemetry/api@1.9.0': + resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/context-async-hooks@1.25.1': + resolution: {integrity: sha512-UW/ge9zjvAEmRWVapOP0qyCvPulWU6cQxGxDbWEFfGOj1VBBZAuOqTo3X6yWmDTD3Xe15ysCZChHncr2xFMIfQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/core@1.15.2': + resolution: {integrity: sha512-+gBv15ta96WqkHZaPpcDHiaz0utiiHZVfm2YOYSqFGrUaJpPkMoSuLBB58YFQGi6Rsb9EHos84X6X5+9JspmLw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.5.0' + + '@opentelemetry/core@1.17.0': + resolution: {integrity: sha512-tfnl3h+UefCgx1aeN2xtrmr6BmdWGKXypk0pflQR0urFS40aE88trnkOMc2HTJZbMrqEEl4HsaBeFhwLVXsrJg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + + '@opentelemetry/core@1.25.1': + resolution: {integrity: sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/exporter-metrics-otlp-grpc@0.41.2': + resolution: {integrity: sha512-gQuCcd5QSMkfi1XIriWAoak/vaRvFzpvtzh2hjziIvbnA3VtoGD3bDb2dzEzOA1iSWO0/tHwnBsSmmUZsETyOA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-http@0.41.2': + resolution: {integrity: sha512-+YeIcL4nuldWE89K8NBLImpXCvih04u1MBnn8EzvoywG2TKR5JC3CZEPepODIxlsfGSgP8W5khCEP1NHZzftYw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-proto@0.41.2': + resolution: {integrity: sha512-OLNs6wF84uhxn8TJ8Bv1q2ltdJqjKA9oUEtICcUDDzXIiztPxZ9ur/4xdMk9T3ZJeFMfrhj8eYDkpETBy+fjCg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-grpc@0.43.0': + resolution: {integrity: sha512-h/oofzwyONMcAeBXD6+E6+foFQg9CPadBFcKAGoMIyVSK7iZgtK5DLEwAF4jz5MhfxWNmwZjHXFRc0GqCRx/tA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/exporter-trace-otlp-http@0.41.2': + resolution: {integrity: sha512-Y0fGLipjZXLMelWtlS1/MDtrPxf25oM408KukRdkN31a1MEFo4h/ZkNwS7ZfmqHGUa+4rWRt2bi6JBiqy7Ytgw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/exporter-trace-otlp-proto@0.41.2': + resolution: {integrity: sha512-IGZga9IIckqYE3IpRE9FO9G5umabObIrChlXUHYpMJtDgx797dsb3qXCvLeuAwB+HoB8NsEZstlzmLnoa6/HmA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/exporter-zipkin@1.25.1': + resolution: {integrity: sha512-RmOwSvkimg7ETwJbUOPTMhJm9A9bG1U8s7Zo3ajDh4zM7eYcycQ0dM7FbLD6NXWbI2yj7UY4q8BKinKYBQksyw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-exporter-base@0.41.2': + resolution: {integrity: sha512-pfwa6d+Dax3itZcGWiA0AoXeVaCuZbbqUTsCtOysd2re8C2PWXNxDONUfBWsn+KgxAdi+ljwTjJGiaVLDaIEvQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-exporter-base@0.43.0': + resolution: {integrity: sha512-LXNtRFVuPRXB9q0qdvrLikQ3NtT9Jmv255Idryz3RJPhOh/Fa03sBASQoj3D55OH3xazmA90KFHfhJ/d8D8y4A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-grpc-exporter-base@0.41.2': + resolution: {integrity: sha512-OErK8dYjXG01XIMIpmOV2SzL9ctkZ0Nyhf2UumICOAKtgLvR5dG1JMlsNVp8Jn0RzpsKc6Urv7JpP69wzRXN+A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-grpc-exporter-base@0.43.0': + resolution: {integrity: sha512-oOpqtDJo9BBa1+nD6ID1qZ55ZdTwEwSSn2idMobw8jmByJKaanVLdr9SJKsn5T9OBqo/c5QY2brMf0TNZkobJQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-proto-exporter-base@0.41.2': + resolution: {integrity: sha512-BxmEMiP6tHiFroe5/dTt9BsxCci7BTLtF7A6d4DKHLiLweWWZxQ9l7hON7qt/IhpKrQcAFD1OzZ1Gq2ZkNzhCw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-transformer@0.41.2': + resolution: {integrity: sha512-jJbPwB0tNu2v+Xi0c/v/R3YBLJKLonw1p+v3RVjT2VfzeUyzSp/tBeVdY7RZtL6dzZpA9XSmp8UEfWIFQo33yA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.5.0' + + '@opentelemetry/otlp-transformer@0.43.0': + resolution: {integrity: sha512-KXYmgzWdVBOD5NvPmGW1nEMJjyQ8gK3N8r6pi4HvmEhTp0v4T13qDSax4q0HfsqmbPJR355oqQSJUnu1dHNutw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.7.0' + + '@opentelemetry/resources@1.15.2': + resolution: {integrity: sha512-xmMRLenT9CXmm5HMbzpZ1hWhaUowQf8UB4jMjFlAxx1QzQcsD3KFNAVX/CAWzFPtllTyTplrA4JrQ7sCH3qmYw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.5.0' + + '@opentelemetry/resources@1.17.0': + resolution: {integrity: sha512-+u0ciVnj8lhuL/qGRBPeVYvk7fL+H/vOddfvmOeJaA1KC+5/3UED1c9KoZQlRsNT5Kw1FaK8LkY2NVLYfOVZQw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + + '@opentelemetry/resources@1.25.1': + resolution: {integrity: sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/sdk-logs@0.41.2': + resolution: {integrity: sha512-smqKIw0tTW15waj7BAPHFomii5c3aHnSE4LQYTszGoK5P9nZs8tEAIpu15UBxi3aG31ZfsLmm4EUQkjckdlFrw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.5.0' + '@opentelemetry/api-logs': '>=0.39.1' + + '@opentelemetry/sdk-logs@0.43.0': + resolution: {integrity: sha512-JyJ2BBRKm37Mc4cSEhFmsMl5ASQn1dkGhEWzAAMSlhPtLRTv5PfvJwhR+Mboaic/eDLAlciwsgijq8IFlf6IgQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.7.0' + '@opentelemetry/api-logs': '>=0.39.1' + + '@opentelemetry/sdk-metrics@1.15.2': + resolution: {integrity: sha512-9aIlcX8GnhcsAHW/Wl8bzk4ZnWTpNlLtud+fxUfBtFATu6OZ6TrGrF4JkT9EVrnoxwtPIDtjHdEsSjOqisY/iA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.5.0' + + '@opentelemetry/sdk-metrics@1.17.0': + resolution: {integrity: sha512-HlWM27yGmYuwCoVRe3yg2PqKnIsq0kEF0HQgvkeDWz2NYkq9fFaSspR6kvjxUTbghAlZrabiqbgyKoYpYaXS3w==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.7.0' + + '@opentelemetry/sdk-metrics@1.25.1': + resolution: {integrity: sha512-9Mb7q5ioFL4E4dDrc4wC/A3NTHDat44v4I3p2pLPSxRvqUbDIQyMVr9uK+EU69+HWhlET1VaSrRzwdckWqY15Q==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-base@1.15.2': + resolution: {integrity: sha512-BEaxGZbWtvnSPchV98qqqqa96AOcb41pjgvhfzDij10tkBhIu9m0Jd6tZ1tJB5ZHfHbTffqYVYE0AOGobec/EQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.5.0' + + '@opentelemetry/sdk-trace-base@1.17.0': + resolution: {integrity: sha512-2T5HA1/1iE36Q9eg6D4zYlC4Y4GcycI1J6NsHPKZY9oWfAxWsoYnRlkPfUqyY5XVtocCo/xHpnJvGNHwzT70oQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + + '@opentelemetry/sdk-trace-base@1.25.1': + resolution: {integrity: sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/semantic-conventions@1.15.2': + resolution: {integrity: sha512-CjbOKwk2s+3xPIMcd5UNYQzsf+v94RczbdNix9/kQh38WiQkM90sUOi3if8eyHFgiBjBjhwXrA7W3ydiSQP9mw==} + engines: {node: '>=14'} + + '@opentelemetry/semantic-conventions@1.17.0': + resolution: {integrity: sha512-+fguCd2d8d2qruk0H0DsCEy2CTK3t0Tugg7MhZ/UQMvmewbZLNnJ6heSYyzIZWG5IPfAXzoj4f4F/qpM7l4VBA==} + engines: {node: '>=14'} + + '@opentelemetry/semantic-conventions@1.25.1': + resolution: {integrity: sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==} + engines: {node: '>=14'} + + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@playwright/browser-chromium@1.45.3': + resolution: {integrity: sha512-UVPW8HveE8SghaahoMy8CfG0QdJ2mO0BZLOcPT8nlQh7Z97Gkv4e3Ad69D1oCqM3m3zYkDPAiGB+hOASNS0d/g==} + engines: {node: '>=18'} + + '@playwright/test@1.45.3': + resolution: {integrity: sha512-UKF4XsBfy+u3MFWEH44hva1Q8Da28G6RFtR2+5saw+jgAFQV5yYnB1fu68Mz7fO+5GJF3wgwAIs0UelU8TxFrA==} + engines: {node: '>=18'} + hasBin: true + + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.4': + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.0': + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.0': + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + + '@sideway/address@4.1.5': + resolution: {integrity: sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==} + + '@sideway/formula@3.0.1': + resolution: {integrity: sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==} + + '@sideway/pinpoint@2.0.0': + resolution: {integrity: sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==} + + '@sigstore/bundle@2.3.2': + resolution: {integrity: sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/core@1.1.0': + resolution: {integrity: sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/protobuf-specs@0.3.2': + resolution: {integrity: sha512-c6B0ehIWxMI8wiS/bj6rHMPqeFvngFV7cDU/MY+B16P9Z3Mp9k8L93eYZ7BYzSickzuqAQqAq0V956b3Ju6mLw==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/sign@2.3.2': + resolution: {integrity: sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/tuf@2.3.4': + resolution: {integrity: sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/verify@1.2.1': + resolution: {integrity: sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==} + engines: {node: ^16.14.0 || >=18.0.0} + '@sinclair/typebox@0.27.8': resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} + '@sindresorhus/is@4.6.0': + resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} + engines: {node: '>=10'} + '@sinonjs/commons@3.0.1': resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==} '@sinonjs/fake-timers@10.3.0': resolution: {integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==} + '@smithy/abort-controller@3.1.1': + resolution: {integrity: sha512-MBJBiidoe+0cTFhyxT8g+9g7CeVccLM0IOKKUMCNQ1CNMJ/eIfoo0RTfVrXOONEI1UCN1W+zkiHSbzUNE9dZtQ==} + engines: {node: '>=16.0.0'} + + '@smithy/config-resolver@3.0.5': + resolution: {integrity: sha512-SkW5LxfkSI1bUC74OtfBbdz+grQXYiPYolyu8VfpLIjEoN/sHVBlLeGXMQ1vX4ejkgfv6sxVbQJ32yF2cl1veA==} + engines: {node: '>=16.0.0'} + + '@smithy/core@2.3.2': + resolution: {integrity: sha512-in5wwt6chDBcUv1Lw1+QzZxN9fBffi+qOixfb65yK4sDuKG7zAUO9HAFqmVzsZM3N+3tTyvZjtnDXePpvp007Q==} + engines: {node: '>=16.0.0'} + + '@smithy/credential-provider-imds@3.2.0': + resolution: {integrity: sha512-0SCIzgd8LYZ9EJxUjLXBmEKSZR/P/w6l7Rz/pab9culE/RWuqelAKGJvn5qUOl8BgX8Yj5HWM50A5hiB/RzsgA==} + engines: {node: '>=16.0.0'} + + '@smithy/fetch-http-handler@3.2.4': + resolution: {integrity: sha512-kBprh5Gs5h7ug4nBWZi1FZthdqSM+T7zMmsZxx0IBvWUn7dK3diz2SHn7Bs4dQGFDk8plDv375gzenDoNwrXjg==} + + '@smithy/hash-node@3.0.3': + resolution: {integrity: sha512-2ctBXpPMG+B3BtWSGNnKELJ7SH9e4TNefJS0cd2eSkOOROeBnnVBnAy9LtJ8tY4vUEoe55N4CNPxzbWvR39iBw==} + engines: {node: '>=16.0.0'} + + '@smithy/invalid-dependency@3.0.3': + resolution: {integrity: sha512-ID1eL/zpDULmHJbflb864k72/SNOZCADRc9i7Exq3RUNJw6raWUSlFEQ+3PX3EYs++bTxZB2dE9mEHTQLv61tw==} + + '@smithy/is-array-buffer@2.2.0': + resolution: {integrity: sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==} + engines: {node: '>=14.0.0'} + + '@smithy/is-array-buffer@3.0.0': + resolution: {integrity: sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-compression@3.0.7': + resolution: {integrity: sha512-ide8RSj0HWHq8uGryx1PuhI/0p+xgrrG+atDBgmv1ScIVIBrH7hqk2cfXyZ3+zQYeD2z95iDn75U1BHwlSwhag==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-content-length@3.0.5': + resolution: {integrity: sha512-ILEzC2eyxx6ncej3zZSwMpB5RJ0zuqH7eMptxC4KN3f+v9bqT8ohssKbhNR78k/2tWW+KS5Spw+tbPF4Ejyqvw==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-endpoint@3.1.0': + resolution: {integrity: sha512-5y5aiKCEwg9TDPB4yFE7H6tYvGFf1OJHNczeY10/EFF8Ir8jZbNntQJxMWNfeQjC1mxPsaQ6mR9cvQbf+0YeMw==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-retry@3.0.14': + resolution: {integrity: sha512-7ZaWZJOjUxa5hgmuMspyt8v/zVsh0GXYuF7OvCmdcbVa/xbnKQoYC+uYKunAqRGTkxjOyuOCw9rmFUFOqqC0eQ==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-serde@3.0.3': + resolution: {integrity: sha512-puUbyJQBcg9eSErFXjKNiGILJGtiqmuuNKEYNYfUD57fUl4i9+mfmThtQhvFXU0hCVG0iEJhvQUipUf+/SsFdA==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-stack@3.0.3': + resolution: {integrity: sha512-r4klY9nFudB0r9UdSMaGSyjyQK5adUyPnQN/ZM6M75phTxOdnc/AhpvGD1fQUvgmqjQEBGCwpnPbDm8pH5PapA==} + engines: {node: '>=16.0.0'} + + '@smithy/node-config-provider@3.1.4': + resolution: {integrity: sha512-YvnElQy8HR4vDcAjoy7Xkx9YT8xZP4cBXcbJSgm/kxmiQu08DwUwj8rkGnyoJTpfl/3xYHH+d8zE+eHqoDCSdQ==} + engines: {node: '>=16.0.0'} + + '@smithy/node-http-handler@3.1.4': + resolution: {integrity: sha512-+UmxgixgOr/yLsUxcEKGH0fMNVteJFGkmRltYFHnBMlogyFdpzn2CwqWmxOrfJELhV34v0WSlaqG1UtE1uXlJg==} + engines: {node: '>=16.0.0'} + + '@smithy/property-provider@3.1.3': + resolution: {integrity: sha512-zahyOVR9Q4PEoguJ/NrFP4O7SMAfYO1HLhB18M+q+Z4KFd4V2obiMnlVoUFzFLSPeVt1POyNWneHHrZaTMoc/g==} + engines: {node: '>=16.0.0'} + + '@smithy/protocol-http@4.1.0': + resolution: {integrity: sha512-dPVoHYQ2wcHooGXg3LQisa1hH0e4y0pAddPMeeUPipI1tEOqL6A4N0/G7abeq+K8wrwSgjk4C0wnD1XZpJm5aA==} + engines: {node: '>=16.0.0'} + + '@smithy/querystring-builder@3.0.3': + resolution: {integrity: sha512-vyWckeUeesFKzCDaRwWLUA1Xym9McaA6XpFfAK5qI9DKJ4M33ooQGqvM4J+LalH4u/Dq9nFiC8U6Qn1qi0+9zw==} + engines: {node: '>=16.0.0'} + + '@smithy/querystring-parser@3.0.3': + resolution: {integrity: sha512-zahM1lQv2YjmznnfQsWbYojFe55l0SLG/988brlLv1i8z3dubloLF+75ATRsqPBboUXsW6I9CPGE5rQgLfY0vQ==} + engines: {node: '>=16.0.0'} + + '@smithy/service-error-classification@3.0.3': + resolution: {integrity: sha512-Jn39sSl8cim/VlkLsUhRFq/dKDnRUFlfRkvhOJaUbLBXUsLRLNf9WaxDv/z9BjuQ3A6k/qE8af1lsqcwm7+DaQ==} + engines: {node: '>=16.0.0'} + + '@smithy/shared-ini-file-loader@3.1.4': + resolution: {integrity: sha512-qMxS4hBGB8FY2GQqshcRUy1K6k8aBWP5vwm8qKkCT3A9K2dawUwOIJfqh9Yste/Bl0J2lzosVyrXDj68kLcHXQ==} + engines: {node: '>=16.0.0'} + + '@smithy/signature-v4@4.1.0': + resolution: {integrity: sha512-aRryp2XNZeRcOtuJoxjydO6QTaVhxx/vjaR+gx7ZjaFgrgPRyZ3HCTbfwqYj6ZWEBHkCSUfcaymKPURaByukag==} + engines: {node: '>=16.0.0'} + + '@smithy/smithy-client@3.1.12': + resolution: {integrity: sha512-wtm8JtsycthkHy1YA4zjIh2thJgIQ9vGkoR639DBx5lLlLNU0v4GARpQZkr2WjXue74nZ7MiTSWfVrLkyD8RkA==} + engines: {node: '>=16.0.0'} + + '@smithy/types@3.3.0': + resolution: {integrity: sha512-IxvBBCTFDHbVoK7zIxqA1ZOdc4QfM5HM7rGleCuHi7L1wnKv5Pn69xXJQ9hgxH60ZVygH9/JG0jRgtUncE3QUA==} + engines: {node: '>=16.0.0'} + + '@smithy/url-parser@3.0.3': + resolution: {integrity: sha512-pw3VtZtX2rg+s6HMs6/+u9+hu6oY6U7IohGhVNnjbgKy86wcIsSZwgHrFR+t67Uyxvp4Xz3p3kGXXIpTNisq8A==} + + '@smithy/util-base64@3.0.0': + resolution: {integrity: sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-body-length-browser@3.0.0': + resolution: {integrity: sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ==} + + '@smithy/util-body-length-node@3.0.0': + resolution: {integrity: sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-buffer-from@2.2.0': + resolution: {integrity: sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==} + engines: {node: '>=14.0.0'} + + '@smithy/util-buffer-from@3.0.0': + resolution: {integrity: sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-config-provider@3.0.0': + resolution: {integrity: sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-defaults-mode-browser@3.0.14': + resolution: {integrity: sha512-0iwTgKKmAIf+vFLV8fji21Jb2px11ktKVxbX6LIDPAUJyWQqGqBVfwba7xwa1f2FZUoolYQgLvxQEpJycXuQ5w==} + engines: {node: '>= 10.0.0'} + + '@smithy/util-defaults-mode-node@3.0.14': + resolution: {integrity: sha512-e9uQarJKfXApkTMMruIdxHprhcXivH1flYCe8JRDTzkkLx8dA3V5J8GZlST9yfDiRWkJpZJlUXGN9Rc9Ade3OQ==} + engines: {node: '>= 10.0.0'} + + '@smithy/util-endpoints@2.0.5': + resolution: {integrity: sha512-ReQP0BWihIE68OAblC/WQmDD40Gx+QY1Ez8mTdFMXpmjfxSyz2fVQu3A4zXRfQU9sZXtewk3GmhfOHswvX+eNg==} + engines: {node: '>=16.0.0'} + + '@smithy/util-hex-encoding@3.0.0': + resolution: {integrity: sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-middleware@3.0.3': + resolution: {integrity: sha512-l+StyYYK/eO3DlVPbU+4Bi06Jjal+PFLSMmlWM1BEwyLxZ3aKkf1ROnoIakfaA7mC6uw3ny7JBkau4Yc+5zfWw==} + engines: {node: '>=16.0.0'} + + '@smithy/util-retry@3.0.3': + resolution: {integrity: sha512-AFw+hjpbtVApzpNDhbjNG5NA3kyoMs7vx0gsgmlJF4s+yz1Zlepde7J58zpIRIsdjc+emhpAITxA88qLkPF26w==} + engines: {node: '>=16.0.0'} + + '@smithy/util-stream@3.1.3': + resolution: {integrity: sha512-FIv/bRhIlAxC0U7xM1BCnF2aDRPq0UaelqBHkM2lsCp26mcBbgI0tCVTv+jGdsQLUmAMybua/bjDsSu8RQHbmw==} + engines: {node: '>=16.0.0'} + + '@smithy/util-uri-escape@3.0.0': + resolution: {integrity: sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg==} + engines: {node: '>=16.0.0'} + + '@smithy/util-utf8@2.3.0': + resolution: {integrity: sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==} + engines: {node: '>=14.0.0'} + + '@smithy/util-utf8@3.0.0': + resolution: {integrity: sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-waiter@3.1.2': + resolution: {integrity: sha512-4pP0EV3iTsexDx+8PPGAKCQpd/6hsQBaQhqWzU4hqKPHN5epPsxKbvUTIiYIHTxaKt6/kEaqPBpu/ufvfbrRzw==} + engines: {node: '>=16.0.0'} + + '@socket.io/component-emitter@3.1.2': + resolution: {integrity: sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==} + '@supabase/auth-js@2.64.2': resolution: {integrity: sha512-s+lkHEdGiczDrzXJ1YWt2y3bxRi+qIUnXcgkpLSrId7yjBeaXBFygNjTaoZLG02KNcYwbuZ9qkEIqmj2hF7svw==} @@ -348,6 +1142,181 @@ packages: '@supabase/supabase-js@2.44.2': resolution: {integrity: sha512-fouCwL1OxqftOwLNgdDUPlNnFuCnt30nS4kLcnTpe6NYKn1PmjxRRBFmKscgHs6FjWyU+32ZG4uBJ29+/BWiDw==} + '@szmarczak/http-timer@4.0.6': + resolution: {integrity: sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==} + engines: {node: '>=10'} + + '@tapjs/after-each@2.0.8': + resolution: {integrity: sha512-btkpQ/BhmRyG50rezduxEZb3pMJblECvTQa41+U2ln2te1prDTlllHlpq4lOjceUksl8KFF1avDqcBqIqPzneQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/after@1.1.31': + resolution: {integrity: sha512-531NkYOls9PvqfnLsEDRzIWwjynoFRbUVq7pTYuA3PRIw4Ka7jA9uUjILeUurcWjaHrQNzUua0jj/Yu94f6YYw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/asserts@2.0.8': + resolution: {integrity: sha512-57VrI0p2kAqfgHHUwowDvd31eTfDHw3HO4FSSVUCvngPGWa96R6eH9gXa9fNig4qIp4Dup+nI7gJlJfU0R80SA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/before-each@2.0.8': + resolution: {integrity: sha512-Xjgk8/fuP7iFa5CYjFDl05p5PZGRe//VyHJNuYNzWpF1K9PNMtVdlmwplfpFmbrNrw/bIPq7R6LuiPmTBgzuOw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/before@2.0.8': + resolution: {integrity: sha512-22ZdGSn/zOKf8J8cb3yfw5R4I/ozdHEDKL8lBWon/zsxxMMvaRTgOtFXEjb4RE+5SDrqQ4NM7ZRYPGhE7T97dw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/chdir@1.1.4': + resolution: {integrity: sha512-axXkT5kWp2/X8l6inKyrqzUhqgvsgrWI8/0xLAdmirpFZ8H6gFxrl763Ozdm27EAmkLnnnWgFITPqUQCuB/tMA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/config@3.1.6': + resolution: {integrity: sha512-5gkDMSLXL5798bbCdX4RdLpB4OUQeu9TXftzKmL1+1T2xbcd4q7zfDnCfOB9zTk50x2f04+4h6Q7Z1NcSKIspg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + '@tapjs/test': 2.2.4 + + '@tapjs/core@2.1.6': + resolution: {integrity: sha512-NYMp0bl52DxXfcLmivMKvOIE14aaB9qJjdHeUbs6GZ9yxgD5w0yeiOT+gWEL+1PzZgGWRxSFEpghID1YfXAc4w==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + '@tapjs/error-serdes@2.0.1': + resolution: {integrity: sha512-P+M4rtcfkDsUveKKmoRNF+07xpbPnRY5KrstIUOnyn483clQ7BJhsnWr162yYNCsyOj4zEfZmAJI1f8Bi7h/ZA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + '@tapjs/filter@2.0.8': + resolution: {integrity: sha512-/ps6nOS3CTh1WLfCjJnU7tS4PH4KFgEasFSVPCIFN+BasyoqDapzj4JKIlzQvppZOGTQadKH3wUakafZl7uz8w==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/fixture@2.0.8': + resolution: {integrity: sha512-LJnjeAMSozPFXzu+wQw2HJsjA9djHbTcyeMnsgiRL/Q8ffcLqAawV3SN6XKdDLdWYUg3e1fXhHspnbsouZj+xA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/intercept@2.0.8': + resolution: {integrity: sha512-OF2Q35jtZ20bwV4hRNoca7vqIrzPFR3JR25G2rGru+fgPmq4heN0RLoh0d1O34AbrtXqra2lXkacMB/DPgb01A==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/mock@2.1.6': + resolution: {integrity: sha512-bNXKrjg/r+i/gfKij5Oo/5Md2DvGNHPSRCHQmjz3VQjpyxqK7S1FGcR0kyqJ8Nof6Wc8yIhpNOCuibj19200IQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/node-serialize@2.0.8': + resolution: {integrity: sha512-92oqhkmIz5wr0yRs1CPQfim5JSwHPSmoDWnQmJlYUZsY1OYgYouQm3ifnPkqK/9hJpVYzlZEQmefxehxbs2WNQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/processinfo@3.1.8': + resolution: {integrity: sha512-FIriEB+qqArPhmVYc1PZwRHD99myRdl7C9Oe/uts04Q2LOxQ5MEmqP9XOP8vVYzpDOYwmL8OmL6eOYt9eZlQKQ==} + engines: {node: '>=16.17'} + + '@tapjs/reporter@2.0.8': + resolution: {integrity: sha512-tZn5ZHIrFwjbi59djtdXHBwgSIZSBXdJpz2i9CZ9HEC1nFhWtIr2Jczvrz4ScfixUgA0GNFirz+q+9iA4IFMvw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/run@2.1.7': + resolution: {integrity: sha512-Hk41E68f1x4eLBm6Rrxx4ARzZzrjwaLbKThb16+f3bGYiajmqAvBdeyNEoQpEWmW+Sv2HSlueOk2SS2P4fyetg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + hasBin: true + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/snapshot@2.0.8': + resolution: {integrity: sha512-L0vtqWKkgnQt/XNQkvHOme9Np7ffteCNf1P0F9mz2YiJion4er1nv6pZuJoKVxXFQsbNd2k+LGyx0Iw+bIzwFg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/spawn@2.0.8': + resolution: {integrity: sha512-vCYwynIYJNijY87uHFANe+gCu9rdGoe4GOBmghl6kwDy7eISmcN/FW5TlmrjePMNhTvrDMeYqOIAzqh3WRYmPA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/stack@2.0.1': + resolution: {integrity: sha512-3rKbZkRkLeJl9ilV/6b80YfI4C4+OYf7iEz5/d0MIVhmVvxv0ttIy5JnZutAc4Gy9eRp5Ne5UTAIFOVY5k36cg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + '@tapjs/stdin@2.0.8': + resolution: {integrity: sha512-tW/exLXuDqjtH2wjptiPHXBahkdSyoppxDY56l9MG4tiz66dMN6NTCZFvQxp7+3t+lsQKqJp/74z8T/ayp+vZA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/test@2.2.4': + resolution: {integrity: sha512-QIgq2BhMpwO9SN8I0qlwZYXAllO4xWCfJ0MgAGhc+J7p69B5p9dDNPmyOreHeXWMmk6VlNj3oWveoXb5Zn9xZQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + hasBin: true + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/typescript@1.4.13': + resolution: {integrity: sha512-MNs7zlhM6G3pNUIjkKXDxgNCwCGZt2bUCGtVunSTDVIrKiUlHAl4QSjQ1oTjumHlCi9gFIWiwFAvpHekzFti0w==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/worker@2.0.8': + resolution: {integrity: sha512-AySf2kV6OHvwgD3DrLdT2az2g4hRdoRtKsFCLdZo3jOoKte+ft/IQJEnOW7CPT0RYUskS3elv6eabYgSyTH4tg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tsconfig/node10@1.0.11': + resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==} + + '@tsconfig/node12@1.0.11': + resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + + '@tsconfig/node14@1.0.3': + resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + + '@tsconfig/node14@14.1.2': + resolution: {integrity: sha512-1vncsbfCZ3TBLPxesRYz02Rn7SNJfbLoDVkcZ7F/ixOV6nwxwgdhD1mdPcc5YQ413qBJ8CvMxXMFfJ7oawjo7Q==} + + '@tsconfig/node16@1.0.4': + resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} + + '@tsconfig/node16@16.1.3': + resolution: {integrity: sha512-9nTOUBn+EMKO6rtSZJk+DcqsfgtlERGT9XPJ5PRj/HNENPCBY1yu/JEj5wT6GLtbCLBO2k46SeXDaY0pjMqypw==} + + '@tsconfig/node18@18.2.4': + resolution: {integrity: sha512-5xxU8vVs9/FNcvm3gE07fPbn9tl6tqGGWA9tSlwsUEkBxtRnTsNmwrV8gasZ9F/EobaSv9+nu8AxUKccw77JpQ==} + + '@tsconfig/node20@20.1.4': + resolution: {integrity: sha512-sqgsT69YFeLWf5NtJ4Xq/xAF8p4ZQHlmGW74Nu2tD4+g5fAsposc4ZfaaPixVu4y01BEiDCWLRDCvDM5JOsRxg==} + + '@tufjs/canonical-json@2.0.0': + resolution: {integrity: sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@tufjs/models@2.0.1': + resolution: {integrity: sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==} + engines: {node: ^16.14.0 || >=18.0.0} + '@types/babel__core@7.20.5': resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -360,12 +1329,21 @@ packages: '@types/babel__traverse@7.20.5': resolution: {integrity: sha512-WXCyOcRtH37HAUkpXhUduaxdm82b4GSlyTqajXviN4EfiuPgNYR109xMCKvpl6zPIpua0DGlMEDCq+g8EdoheQ==} + '@types/cacheable-request@6.0.3': + resolution: {integrity: sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==} + + '@types/cli-progress@3.11.6': + resolution: {integrity: sha512-cE3+jb9WRlu+uOSAugewNpITJDt1VF8dHOopPO4IABFc3SXYL5WE/+PTz/FCdZRRfIujiWW3n3aMbv1eIGVRWA==} + '@types/cookiejar@2.1.5': resolution: {integrity: sha512-he+DHOWReW0nghN24E1WUqM0efK4kI9oTqDm6XmK8ZPe2djZ90BSNdGnIyCLzCPw7/pogPlGbzI2wHGGmi4O/Q==} '@types/graceful-fs@4.1.9': resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==} + '@types/http-cache-semantics@4.0.4': + resolution: {integrity: sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==} + '@types/istanbul-lib-coverage@2.0.6': resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} @@ -378,6 +1356,9 @@ packages: '@types/jest@29.5.12': resolution: {integrity: sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==} + '@types/keyv@3.1.4': + resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==} + '@types/methods@1.1.4': resolution: {integrity: sha512-ymXWVrDiCxTBE3+RIrrP533E70eA+9qu7zdWoHuOmGujkYtzf4HQF96b8nwHLqhuf4ykX61IGRIB38CC6/sImQ==} @@ -396,6 +1377,9 @@ packages: '@types/phoenix@1.6.5': resolution: {integrity: sha512-xegpDuR+z0UqG9fwHqNoy3rI7JDlvaPh2TY47Fl80oq6g+hXT+c/LEuE43X48clZ6lOfANl5WrPur9fYO1RJ/w==} + '@types/responselike@1.0.3': + resolution: {integrity: sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==} + '@types/stack-utils@2.0.3': resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==} @@ -414,22 +1398,81 @@ packages: '@types/yargs@17.0.32': resolution: {integrity: sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==} + '@typescript-eslint/types@5.62.0': + resolution: {integrity: sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + '@typescript-eslint/typescript-estree@5.62.0': + resolution: {integrity: sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/visitor-keys@5.62.0': + resolution: {integrity: sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + abbrev@2.0.0: + resolution: {integrity: sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + abort-controller@3.0.0: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn-walk@8.3.3: + resolution: {integrity: sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==} + engines: {node: '>=0.4.0'} + + acorn@8.12.1: + resolution: {integrity: sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==} + engines: {node: '>=0.4.0'} + hasBin: true + + agent-base@6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + + agent-base@7.1.1: + resolution: {integrity: sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==} + engines: {node: '>= 14'} + agentkeepalive@4.5.0: resolution: {integrity: sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==} engines: {node: '>= 8.0.0'} + aggregate-error@3.1.0: + resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} + engines: {node: '>=8'} + + amdefine@1.0.1: + resolution: {integrity: sha512-S2Hw0TtNkMJhIabBwIojKL9YHO5T0n5eNqWJ7Lrlel/zDbftQpxpapi8tZs3X1HWa+u+QeydGmzzNU0m09+Rcg==} + engines: {node: '>=0.4.2'} + ansi-escapes@4.3.2: resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} engines: {node: '>=8'} + ansi-escapes@6.2.1: + resolution: {integrity: sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==} + engines: {node: '>=14.16'} + ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} + ansi-regex@6.0.1: + resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} + engines: {node: '>=12'} + ansi-styles@3.2.1: resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} engines: {node: '>=4'} @@ -442,19 +1485,120 @@ packages: resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} engines: {node: '>=10'} + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} + + ansicolors@0.3.2: + resolution: {integrity: sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg==} + anymatch@3.1.3: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} + app-module-path@2.2.0: + resolution: {integrity: sha512-gkco+qxENJV+8vFcDiiFhuoSvRXb2a/QPqpSoWhVz829VNJfOTnELbBmPmNKFxf3xdNnw4DWCkzkDaavcX/1YQ==} + + archiver-utils@2.1.0: + resolution: {integrity: sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==} + engines: {node: '>= 6'} + + archiver-utils@3.0.4: + resolution: {integrity: sha512-KVgf4XQVrTjhyWmx6cte4RxonPLR9onExufI1jhvw/MQ4BB6IsZD5gT8Lq+u/+pRkWna/6JoHpiQioaqFP5Rzw==} + engines: {node: '>= 10'} + + archiver@5.3.2: + resolution: {integrity: sha512-+25nxyyznAXF7Nef3y0EbBeqmGZgeN/BxHX29Rs39djAfaFalmQ89SE6CWyDCHzGL0yt/ycBtNOmGTW0FyGWNw==} + engines: {node: '>= 10'} + + arg@4.1.3: + resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + argparse@1.0.10: resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + + arrivals@2.1.2: + resolution: {integrity: sha512-g3+rxhxUen2H4+PPBOz6U6pkQ4esBuQPna1rPskgK1jamBdDZeoppyB2vPUM/l0ccunwRrq4r2rKgCvc2FnrFA==} + + artillery-engine-playwright@1.16.0: + resolution: {integrity: sha512-90Gka/neaKABygcWANL/wOrI3U75Xll4yAZmBywQiTONorGL3SIizEEujGXosHLUeOgzc+3OEldP5qXfCynMOg==} + + artillery-plugin-apdex@1.10.0: + resolution: {integrity: sha512-TabM/LXhp5n3AKiCXQHl3ivwCuh7QfdV5vjYpT8di32Rd42f9AahFiOIje4aInW9u5S8qNsB78UU3ov084GxwA==} + + artillery-plugin-ensure@1.13.0: + resolution: {integrity: sha512-/FwOj4a2npaUkNsB+dtHGa5euRqi1ly0mvcqz2UawNia+5SQXVJauL0ue84uQrU0O8ercH/gzsb7cG2/RKYkwg==} + + artillery-plugin-expect@2.13.0: + resolution: {integrity: sha512-j7beHovPaR9b4tKxn6gq6QXXK+XXtZ2B6DzX3PERqPcZPA07zSPrTJfzmtbwmnAGvQZYLK5jucQ6H+ZzyimiQg==} + engines: {node: '>= 14.17.6'} + + artillery-plugin-fake-data@1.10.0: + resolution: {integrity: sha512-EQeeiIGJfxpXszn1zH91EyNprblpkME/HuHYloILExTc6My9+tcY5fezd1SEBbQ+jJ4qKB5KJyqQ6RS6HE+oBQ==} + + artillery-plugin-metrics-by-endpoint@1.13.0: + resolution: {integrity: sha512-1zKp+kIZusPDLIcYE9Yheua5RYekAMNkJr/fQ2odQaeJdSkWyS/gURvroORhYAv41LKRfAvYazW668uUY6WkKA==} + + artillery-plugin-publish-metrics@2.24.0: + resolution: {integrity: sha512-7a6vykigjZ5zdk41ma8supGmownU31SdQRD9hxfpKv8gLIOAlTTD25OxnjGmddF4JArhztSAqrPb+J8s/7xXRw==} + + artillery-plugin-slack@1.8.0: + resolution: {integrity: sha512-BpZZonGQRBZo1oXw0XNx7itoGKlZDClE+SzNt3SDTTFcQuvdPD6FD05Y9hDfSfG3zdEuuc9joAtCuKMmZALaeg==} + + artillery@2.0.19: + resolution: {integrity: sha512-NeD5+D7U5l8hZ3lHtUseFTwqxILN2qfl4XlQt4cH0PukA/wsOri7cR0Qg2925usCa5EkD240Dfh9r9wYvuHxlw==} + engines: {node: '>= 18.16.1'} + hasBin: true + asap@2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} + ast-module-types@5.0.0: + resolution: {integrity: sha512-JvqziE0Wc0rXQfma0HZC/aY7URXHFuZV84fJRtP8u+lhp0JYCNd5wJzVXP45t0PH0Mej3ynlzvdyITYIu0G4LQ==} + engines: {node: '>=14'} + + astral-regex@2.0.0: + resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} + engines: {node: '>=8'} + + async-hook-domain@4.0.1: + resolution: {integrity: sha512-bSktexGodAjfHWIrSrrqxqWzf1hWBZBpmPNZv+TYUMyWa2eoefFc6q6H1+KtdHYSz35lrhWdmXt/XK9wNEZvww==} + engines: {node: '>=16'} + + async-limiter@1.0.1: + resolution: {integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==} + + async@1.5.0: + resolution: {integrity: sha512-m9nMwCtLtz29LszVaR0q/FqsJWkrxVoQL95p7JU0us7qUx4WEcySQgwvuneYSGVyvirl81gz7agflS3V1yW14g==} + + async@2.6.4: + resolution: {integrity: sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==} + + async@3.2.5: + resolution: {integrity: sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==} + asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + auto-bind@5.0.1: + resolution: {integrity: sha512-ooviqdwwgfIfNmDwo94wlshcdzfO64XV0Cg6oDsDYBJfITDz1EngD2z7DkbvCWn+XIMsIqW27sEVF6qcpJrRcg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + available-typed-arrays@1.0.7: + resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==} + engines: {node: '>= 0.4'} + + aws-sdk@2.1674.0: + resolution: {integrity: sha512-VTijN8+pKrf4sfM2t+ISXjypJ+k3AiP6OMzyLoWJ7jfMBtBfWbQc1rN07OndNb0CZRBBukOHoBhYDPuyae+/1Q==} + engines: {node: '>= 10.0.0'} + + axios@0.27.2: + resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==} + babel-jest@29.7.0: resolution: {integrity: sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -483,13 +1627,44 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + + bignumber.js@9.1.2: + resolution: {integrity: sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==} + + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} + engines: {node: '>=8'} + + bindings@1.5.0: + resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} + + bintrees@1.0.2: + resolution: {integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==} + + bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + + boolbase@1.0.0: + resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + + bowser@2.11.0: + resolution: {integrity: sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==} + brace-expansion@1.1.11: resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + braces@3.0.2: resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} engines: {node: '>=8'} + browser-or-node@1.3.0: + resolution: {integrity: sha512-0F2z/VSnLbmEeBcUrSuDH5l0HxTXdQQzLjkmBR4cYfvg1zJrKSlmIZFqyFR8oX0NrwPhy3c3HQ6i3OxMbew4Tg==} + browserslist@4.23.0: resolution: {integrity: sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} @@ -502,9 +1677,38 @@ packages: bser@2.1.1: resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + buffer-crc32@0.2.13: + resolution: {integrity: sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==} + + buffer-equal-constant-time@1.0.1: + resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} + buffer-from@1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + buffer@4.9.2: + resolution: {integrity: sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==} + + buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + + c8@9.1.0: + resolution: {integrity: sha512-mBWcT5iqNir1zIkzSPyI3NCR9EZCVI3WUD+AVO17MVWTSFNyUueXE82qTeampNtTr+ilN/5Ua3j24LgbCKjDVg==} + engines: {node: '>=14.14.0'} + hasBin: true + + cacache@18.0.4: + resolution: {integrity: sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + cacheable-lookup@5.0.4: + resolution: {integrity: sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==} + engines: {node: '>=10.6.0'} + + cacheable-request@7.0.4: + resolution: {integrity: sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==} + engines: {node: '>=8'} + call-bind@1.0.7: resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==} engines: {node: '>= 0.4'} @@ -524,18 +1728,45 @@ packages: caniuse-lite@1.0.30001615: resolution: {integrity: sha512-1IpazM5G3r38meiae0bHRnPhz+CBQ3ZLqbQMtrg+AsTPKAXgW38JNsXkyZ+v8waCsDmPq87lmfun5Q2AGysNEQ==} + cardinal@2.1.1: + resolution: {integrity: sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==} + hasBin: true + chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} + chalk@3.0.0: + resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==} + engines: {node: '>=8'} + chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} + chalk@5.3.0: + resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + char-regex@1.0.2: resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} engines: {node: '>=10'} + cheerio-select@2.1.0: + resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} + + cheerio@1.0.0: + resolution: {integrity: sha512-quS9HgjQpdaXOvsZz82Oz7uxtXiy6UIsIQcpBj7HRw2M63Skasm9qlDocAM7jNuaxdhpPU7c4kJN+gA5MCu4ww==} + engines: {node: '>=18.17'} + + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + + chownr@2.0.0: + resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} + engines: {node: '>=10'} + ci-info@3.9.0: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} @@ -543,14 +1774,61 @@ packages: cjs-module-lexer@1.3.1: resolution: {integrity: sha512-a3KdPAANPbNE4ZUv9h6LckSl9zLsYOP4MBmhIPkRaeyybt+r4UghLvq+xw/YwUcC1gqylCkL4rdVs3Lwupjm4Q==} + clean-stack@2.2.0: + resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} + engines: {node: '>=6'} + + clean-stack@3.0.1: + resolution: {integrity: sha512-lR9wNiMRcVQjSB3a7xXGLuz4cr4wJuuXlaAEbRutGowQTmlp7R72/DOgN21e8jdwblMWl9UOJMJXarX94pzKdg==} + engines: {node: '>=10'} + + cli-boxes@3.0.0: + resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==} + engines: {node: '>=10'} + + cli-cursor@3.1.0: + resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} + engines: {node: '>=8'} + + cli-cursor@4.0.0: + resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + cli-progress@3.12.0: + resolution: {integrity: sha512-tRkV3HJ1ASwm19THiiLIXLO7Im7wlTuKnvkYaTkyoAPefqjNg7W7DHKUlGRxy9vxDvbyCYQkQozvptuMkGCg8A==} + engines: {node: '>=4'} + + cli-spinners@2.9.2: + resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} + engines: {node: '>=6'} + + cli-table3@0.6.5: + resolution: {integrity: sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==} + engines: {node: 10.* || >= 12.*} + + cli-truncate@3.1.0: + resolution: {integrity: sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} + clone-response@1.0.3: + resolution: {integrity: sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==} + + clone@1.0.4: + resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} + engines: {node: '>=0.8'} + co@4.6.0: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} + code-excerpt@4.0.0: + resolution: {integrity: sha512-xxodCmBen3iy2i0WtAK8FlFNrRzjUqjRsMfho58xT/wvZU1YTM3fCnRjcy1gJPMepaRlgm/0e6w8SpWHpn3/cA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + collect-v8-coverage@1.0.2: resolution: {integrity: sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==} @@ -571,27 +1849,86 @@ packages: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} + commander@10.0.1: + resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} + engines: {node: '>=14'} + component-emitter@1.3.1: resolution: {integrity: sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==} + compress-commons@4.1.2: + resolution: {integrity: sha512-D3uMHtGc/fcO1Gt1/L7i1e33VOvD4A9hfQLP+6ewd+BvG/gQ84Yh4oftEhAdjSMgBgwGL+jsppT7JYNpo6MHHg==} + engines: {node: '>= 10'} + concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} convert-source-map@2.0.0: resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + convert-to-spaces@2.0.1: + resolution: {integrity: sha512-rcQ1bsQO9799wq24uE5AM2tAILy4gXGIK/njFWcVQkGNZ96edlpY+A7bjwvzjYvLDyzmG1MmMLZhpcsb+klNMQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + cookie-parser@1.4.6: + resolution: {integrity: sha512-z3IzaNjdwUC2olLIB5/ITd0/setiaFMLYiZJle7xg5Fe9KWAceil7xszYfHHBtDFYLSgJduS2Ty0P1uJdPDJeA==} + engines: {node: '>= 0.8.0'} + + cookie-signature@1.0.6: + resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} + + cookie@0.4.1: + resolution: {integrity: sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==} + engines: {node: '>= 0.6'} + cookiejar@2.1.4: resolution: {integrity: sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==} + core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + + crc-32@1.2.2: + resolution: {integrity: sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==} + engines: {node: '>=0.8'} + hasBin: true + + crc32-stream@4.0.3: + resolution: {integrity: sha512-NT7w2JVU7DFroFdYkeq8cywxrgjPHWkdX1wjpRQXPX5Asews3tA+Ght6lddQO5Mkumffp3X7GEqku3epj2toIw==} + engines: {node: '>= 10'} + create-jest@29.7.0: resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true + create-require@1.1.1: + resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} + css-select@5.1.0: + resolution: {integrity: sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==} + + css-what@6.1.0: + resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} + engines: {node: '>= 6'} + + csv-parse@4.16.3: + resolution: {integrity: sha512-cO1I/zmz4w2dcKHVvpCr7JVRu8/FymG5OEpmvsZYlccYolPBLoVGKUHgNoc4ZGkFeFlWGEDmMyBM+TTqRdW/wg==} + + datadog-metrics@0.9.3: + resolution: {integrity: sha512-BVsBX2t+4yA3tHs7DnB5H01cHVNiGJ/bHA8y6JppJDyXG7s2DLm6JaozPGpgsgVGd42Is1CHRG/yMDQpt877Xg==} + + debug@3.1.0: + resolution: {integrity: sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} engines: {node: '>=6.0'} @@ -601,6 +1938,10 @@ packages: supports-color: optional: true + decompress-response@6.0.0: + resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} + engines: {node: '>=10'} + dedent@1.5.3: resolution: {integrity: sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==} peerDependencies: @@ -609,22 +1950,78 @@ packages: babel-plugin-macros: optional: true + deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + + deep-for-each@3.0.0: + resolution: {integrity: sha512-pPN+0f8jlnNP+z90qqOdxGghJU5XM6oBDhvAR+qdQzjCg5pk/7VPPvKK1GqoXEFkHza6ZS+Otzzvmr0g3VUaKw==} + deepmerge@4.3.1: resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} engines: {node: '>=0.10.0'} + defaults@1.0.4: + resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} + + defer-to-connect@2.0.1: + resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==} + engines: {node: '>=10'} + define-data-property@1.1.4: resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} engines: {node: '>= 0.4'} + define-lazy-prop@2.0.0: + resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} + engines: {node: '>=8'} + delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + dependency-tree@10.0.9: + resolution: {integrity: sha512-dwc59FRIsht+HfnTVM0BCjJaEWxdq2YAvEDy4/Hn6CwS3CBWMtFnL3aZGAkQn3XCYxk/YcTDE4jX2Q7bFTwCjA==} + engines: {node: '>=14'} + hasBin: true + detect-newline@3.1.0: resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} engines: {node: '>=8'} + detective-amd@5.0.2: + resolution: {integrity: sha512-XFd/VEQ76HSpym80zxM68ieB77unNuoMwopU2TFT/ErUk5n4KvUTwW4beafAVUugrjV48l4BmmR0rh2MglBaiA==} + engines: {node: '>=14'} + hasBin: true + + detective-cjs@5.0.1: + resolution: {integrity: sha512-6nTvAZtpomyz/2pmEmGX1sXNjaqgMplhQkskq2MLrar0ZAIkHMrDhLXkRiK2mvbu9wSWr0V5/IfiTrZqAQMrmQ==} + engines: {node: '>=14'} + + detective-es6@4.0.1: + resolution: {integrity: sha512-k3Z5tB4LQ8UVHkuMrFOlvb3GgFWdJ9NqAa2YLUU/jTaWJIm+JJnEh4PsMc+6dfT223Y8ACKOaC0qcj7diIhBKw==} + engines: {node: '>=14'} + + detective-postcss@6.1.3: + resolution: {integrity: sha512-7BRVvE5pPEvk2ukUWNQ+H2XOq43xENWbH0LcdCE14mwgTBEAMoAx+Fc1rdp76SmyZ4Sp48HlV7VedUnP6GA1Tw==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + detective-sass@5.0.3: + resolution: {integrity: sha512-YsYT2WuA8YIafp2RVF5CEfGhhyIVdPzlwQgxSjK+TUm3JoHP+Tcorbk3SfG0cNZ7D7+cYWa0ZBcvOaR0O8+LlA==} + engines: {node: '>=14'} + + detective-scss@4.0.3: + resolution: {integrity: sha512-VYI6cHcD0fLokwqqPFFtDQhhSnlFWvU614J42eY6G0s8c+MBhi9QAWycLwIOGxlmD8I/XvGSOUV1kIDhJ70ZPg==} + engines: {node: '>=14'} + + detective-stylus@4.0.0: + resolution: {integrity: sha512-TfPotjhszKLgFBzBhTOxNHDsutIxx9GTWjrL5Wh7Qx/ydxKhwUrlSFeLIn+ZaHPF+h0siVBkAQSuy6CADyTxgQ==} + engines: {node: '>=14'} + + detective-typescript@11.2.0: + resolution: {integrity: sha512-ARFxjzizOhPqs1fYC/2NMC3N4jrQ6HvVflnXBTRqNEqJuXwyKLRr9CrJwkRcV/SnZt1sNXgsF6FPm0x57Tq0rw==} + engines: {node: ^14.14.0 || >=16.0.0} + dezalgo@1.0.4: resolution: {integrity: sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==} @@ -632,10 +2029,53 @@ packages: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + diff@4.0.2: + resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} + engines: {node: '>=0.3.1'} + + diff@5.2.0: + resolution: {integrity: sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==} + engines: {node: '>=0.3.1'} + + dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + + dogapi@2.8.4: + resolution: {integrity: sha512-065fsvu5dB0o4+ENtLjZILvXMClDNH/yA9H6L8nsdcNiz9l0Hzpn7aQaCOPYXxqyzq4CRPOdwkFXUjDOXfRGbg==} + hasBin: true + + dom-serializer@2.0.0: + resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} + + domelementtype@2.3.0: + resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} + + domhandler@5.0.3: + resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} + engines: {node: '>= 4'} + + domutils@3.1.0: + resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==} + dotenv@16.4.5: resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} engines: {node: '>=12'} + driftless@2.0.3: + resolution: {integrity: sha512-hSDKsQphnL4O0XLAiyWQ8EiM9suXH0Qd4gMtwF86b5wygGV8r95w0JcA38FOmx9N3LjFCIHLG2winLPNken4Tg==} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + ecdsa-sig-formatter@1.0.11: + resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} + + ejs@3.1.10: + resolution: {integrity: sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==} + engines: {node: '>=0.10.0'} + hasBin: true + electron-to-chromium@1.4.754: resolution: {integrity: sha512-7Kr5jUdns5rL/M9wFFmMZAgFDuL2YOnanFH4OI4iFzUqyh3XOL7nAGbSlSMZdzKMIyyTpNSbqZsWG9odwLeKvA==} @@ -646,6 +2086,43 @@ packages: emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + encoding-sniffer@0.2.0: + resolution: {integrity: sha512-ju7Wq1kg04I3HtiYIOrUrdfdDvkyO9s5XM8QAj/bN61Yo/Vb4vgJxy5vi4Yxk01gWHbrofpPtpxM8bKger9jhg==} + + encoding@0.1.13: + resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==} + + end-of-stream@1.4.4: + resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} + + engine.io-client@6.5.4: + resolution: {integrity: sha512-GeZeeRjpD2qf49cZQ0Wvh/8NJNfeXkXXcoGh+F77oEAgo9gUHwT1fCRxSNU+YEEaysOJTnsFHmM5oAcPy4ntvQ==} + + engine.io-parser@5.2.3: + resolution: {integrity: sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==} + engines: {node: '>=10.0.0'} + + enhanced-resolve@5.17.1: + resolution: {integrity: sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==} + engines: {node: '>=10.13.0'} + + ensure-posix-path@1.1.1: + resolution: {integrity: sha512-VWU0/zXzVbeJNXvME/5EmLuEj2TauvoaTz6aFYK1Z92JCBlDlZ3Gu0tuGR42kpW1754ywTs+QB0g5TP0oj9Zaw==} + + entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + + env-paths@2.2.1: + resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} + engines: {node: '>=6'} + + err-code@2.0.3: + resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==} + error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} @@ -657,6 +2134,11 @@ packages: resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} engines: {node: '>= 0.4'} + esbuild-wasm@0.19.12: + resolution: {integrity: sha512-Zmc4hk6FibJZBcTx5/8K/4jT3/oG1vkGTEeKJUQFCUQKimD6Q7+adp/bdVQyYJFolMKaXkQnVZdV4O5ZaTYmyQ==} + engines: {node: '>=12'} + hasBin: true + escalade@3.1.2: resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==} engines: {node: '>=6'} @@ -669,15 +2151,58 @@ packages: resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} engines: {node: '>=8'} + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + escodegen@2.1.0: + resolution: {integrity: sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==} + engines: {node: '>=6.0'} + hasBin: true + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + espree@9.6.1: + resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + esprima@4.0.1: resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} engines: {node: '>=4'} hasBin: true + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + event-target-shim@5.0.1: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} + eventemitter3@1.1.1: + resolution: {integrity: sha512-idmH3G0vJjQv2a5N74b+oXcOUKYBqSGJGN1eVV6ELGdUnesAO8RZsU74eaS3VfldRet8N9pFupxppBUKztrBdQ==} + + eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + + events-to-array@2.0.3: + resolution: {integrity: sha512-f/qE2gImHRa4Cp2y1stEOSgw8wTFyUdVJX7G//bMwbaV9JqISFxg99NbmVQeP7YLnDUZ2un851jlaDrlpmGehQ==} + engines: {node: '>=12'} + + events@1.1.1: + resolution: {integrity: sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==} + engines: {node: '>=0.4.x'} + + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + execa@5.1.1: resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} engines: {node: '>=10'} @@ -690,26 +2215,97 @@ packages: resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + exponential-backoff@3.1.1: + resolution: {integrity: sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==} + + extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-glob@3.3.2: + resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} + engines: {node: '>=8.6.0'} + fast-json-stable-stringify@2.1.0: resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + fast-levenshtein@3.0.0: + resolution: {integrity: sha512-hKKNajm46uNmTlhHSyZkmToAc56uZJwYq7yrciZjqOxnlfQwERDQJmHPUp7m1m9wx8vgOe8IaCKZ5Kv2k1DdCQ==} + fast-safe-stringify@2.1.1: resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} + fast-xml-parser@4.4.1: + resolution: {integrity: sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==} + hasBin: true + + fastest-levenshtein@1.0.16: + resolution: {integrity: sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==} + engines: {node: '>= 4.9.1'} + + fastq@1.17.1: + resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + fb-watchman@2.0.2: resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + fflate@0.8.1: + resolution: {integrity: sha512-/exOvEuc+/iaUm105QIiOt4LpBdMTWsXxqR0HDF35vx3fmaKzw7354gTilCh5rkzEt8WYyG//ku3h3nRmd7CHQ==} + + file-uri-to-path@1.0.0: + resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} + + filelist@1.0.4: + resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} + + filing-cabinet@4.2.0: + resolution: {integrity: sha512-YZ21ryzRcyqxpyKggdYSoXx//d3sCJzM3lsYoaeg/FyXdADGJrUl+BW1KIglaVLJN5BBcMtWylkygY8zBp2MrQ==} + engines: {node: '>=14'} + hasBin: true + fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} + filtrex@0.5.4: + resolution: {integrity: sha512-2phGAjWOYRf96Al6s+w/hMjObP1cRyQ95hoZApjeFO75DXN4Flh9uuUAtL3LI4fkryLa2QWdA8MArvt0GMU0pA==} + + filtrex@2.2.3: + resolution: {integrity: sha512-TL12R6SckvJdZLibXqyp4D//wXZNyCalVYGqaWwQk9zucq9dRxmrJV4oyuRq4PHFHCeV5ZdzncIc/Ybqv1Lr6Q==} + find-up@4.1.0: resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} engines: {node: '>=8'} + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + follow-redirects@1.15.6: + resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + for-each@0.3.3: + resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} + + foreground-child@3.3.0: + resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==} + engines: {node: '>=14'} + form-data-encoder@1.7.2: resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==} + form-data@3.0.1: + resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==} + engines: {node: '>= 6'} + form-data@4.0.0: resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} engines: {node: '>= 6'} @@ -721,6 +2317,24 @@ packages: formidable@3.5.1: resolution: {integrity: sha512-WJWKelbRHN41m5dumb0/k8TeAx7Id/y3a+Z7QfhxP/htI9Js5zYaEDtG8uMgG0vM0lOlqnmjE99/kfpOYi/0Og==} + fromentries@1.3.2: + resolution: {integrity: sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==} + + fs-constants@1.0.0: + resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + + fs-extra@10.1.0: + resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} + engines: {node: '>=12'} + + fs-minipass@2.1.0: + resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} + engines: {node: '>= 8'} + + fs-minipass@3.0.3: + resolution: {integrity: sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -737,10 +2351,17 @@ packages: function-bind@1.1.2: resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + function-loop@4.0.0: + resolution: {integrity: sha512-f34iQBedYF3XcI93uewZZOnyscDragxgTK/eTvVB74k3fCD0ZorOi5BV9GS4M8rz/JoNi0Kl3qX5Y9MH3S/CLQ==} + gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} + get-amd-module-type@5.0.1: + resolution: {integrity: sha512-jb65zDeHyDjFR1loOVk0HQGM5WNwoGB8aLWy3LKCieMKol0/ProHkhO2X1JxojuN10vbz1qNn09MJ7tNp7qMzw==} + engines: {node: '>=14'} + get-caller-file@2.0.5: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} @@ -749,14 +2370,29 @@ packages: resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==} engines: {node: '>= 0.4'} + get-own-enumerable-property-symbols@3.0.2: + resolution: {integrity: sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==} + get-package-type@0.1.0: resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} engines: {node: '>=8.0.0'} + get-stream@5.2.0: + resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==} + engines: {node: '>=8'} + get-stream@6.0.1: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} engines: {node: '>=10'} + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob@10.4.5: + resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + hasBin: true + glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} deprecated: Glob versions prior to v9 are no longer supported @@ -765,9 +2401,25 @@ packages: resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} engines: {node: '>=4'} + globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + + gonzales-pe@4.3.0: + resolution: {integrity: sha512-otgSPpUmdWJ43VXyiNgEYE4luzHCL2pz4wQ0OnDluC6Eg4Ko3Vexy/SrSynglw/eR+OhkzmqFCZa/OFa/RgAOQ==} + engines: {node: '>=0.6.0'} + hasBin: true + + google-protobuf@3.6.1: + resolution: {integrity: sha512-SJYemeX5GjDLPnadcmCNQePQHCS4Hl5fOcI/JawqDIYFhCmrtYAjcx/oTQx/Wi8UuCuZQhfvftbmPePPAYHFtA==} + gopd@1.0.1: resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} + got@11.8.6: + resolution: {integrity: sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==} + engines: {node: '>=10.19.0'} + graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -790,17 +2442,61 @@ packages: resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} engines: {node: '>= 0.4'} + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + hasown@2.0.2: resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} engines: {node: '>= 0.4'} + hex2dec@1.0.1: + resolution: {integrity: sha512-F9QO0+ZI8r1VZudxw21bD/U5pb2Y9LZY3TsnVqCPaijvw5mIhH5jsH29acLPijl5fECfD8FetJtgX8GN5YPM9Q==} + hexoid@1.0.0: resolution: {integrity: sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==} engines: {node: '>=8'} + hosted-git-info@7.0.2: + resolution: {integrity: sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==} + engines: {node: ^16.14.0 || >=18.0.0} + + hot-shots@6.8.7: + resolution: {integrity: sha512-XH8iezBSZgVw2jegu96pUfF1Zv0VZ/iXjb7L5yE3F7mn7/bdhf4qeniXjO0wQWeefe433rhOsazNKLxM+XMI9w==} + engines: {node: '>=6.0.0'} + + hpagent@0.1.2: + resolution: {integrity: sha512-ePqFXHtSQWAFXYmj+JtOTHr84iNrII4/QRlAAPPE+zqnKy4xJo7Ie1Y4kC7AdB+LxLxSTTzBMASsEcy0q8YyvQ==} + html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + htmlparser2@9.1.0: + resolution: {integrity: sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ==} + + http-cache-semantics@4.1.1: + resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==} + + http-proxy-agent@7.0.2: + resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} + engines: {node: '>= 14'} + + http2-wrapper@1.0.3: + resolution: {integrity: sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==} + engines: {node: '>=10.19.0'} + + https-proxy-agent@5.0.0: + resolution: {integrity: sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==} + engines: {node: '>= 6'} + + https-proxy-agent@5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + + https-proxy-agent@7.0.5: + resolution: {integrity: sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==} + engines: {node: '>= 14'} + human-signals@2.1.0: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} engines: {node: '>=10.17.0'} @@ -808,6 +2504,28 @@ packages: humanize-ms@1.2.1: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} + hyperlinker@1.0.0: + resolution: {integrity: sha512-Ty8UblRWFEcfSuIaajM34LdPXIhbs1ajEX/BBPv24J+enSVaEVY63xQ6lTO9VRYS5LAoghIG0IDJ+p+IPzKUQQ==} + engines: {node: '>=4'} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + ieee754@1.1.13: + resolution: {integrity: sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==} + + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + + ignore-walk@6.0.5: + resolution: {integrity: sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + import-local@3.1.0: resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} engines: {node: '>=8'} @@ -817,6 +2535,14 @@ packages: resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} engines: {node: '>=0.8.19'} + indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + + indent-string@5.0.0: + resolution: {integrity: sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==} + engines: {node: '>=12'} + inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. @@ -824,31 +2550,150 @@ packages: inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + + ini@4.1.3: + resolution: {integrity: sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + ink@4.4.1: + resolution: {integrity: sha512-rXckvqPBB0Krifk5rn/5LvQGmyXwCUpBfmTwbkQNBY9JY8RSl3b8OftBNEYxg4+SWUhEKcPifgope28uL9inlA==} + engines: {node: '>=14.16'} + peerDependencies: + '@types/react': '>=18.0.0' + react: '>=18.0.0' + react-devtools-core: ^4.19.1 + peerDependenciesMeta: + '@types/react': + optional: true + react-devtools-core: + optional: true + + ip-address@9.0.5: + resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==} + engines: {node: '>= 12'} + + is-actual-promise@1.0.2: + resolution: {integrity: sha512-xsFiO1of0CLsQnPZ1iXHNTyR9YszOeWKYv+q6n8oSFW3ipooFJ1j1lbRMgiMCr+pp2gLruESI4zb5Ak6eK5OnQ==} + + is-arguments@1.1.1: + resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} + engines: {node: '>= 0.4'} + is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + + is-callable@1.2.7: + resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} + engines: {node: '>= 0.4'} + + is-ci@3.0.1: + resolution: {integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==} + hasBin: true + is-core-module@2.13.1: resolution: {integrity: sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==} + is-docker@2.2.1: + resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} + engines: {node: '>=8'} + hasBin: true + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + is-fullwidth-code-point@3.0.0: resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} engines: {node: '>=8'} + is-fullwidth-code-point@4.0.0: + resolution: {integrity: sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==} + engines: {node: '>=12'} + is-generator-fn@2.1.0: resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} engines: {node: '>=6'} + is-generator-function@1.0.10: + resolution: {integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==} + engines: {node: '>= 0.4'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-interactive@1.0.0: + resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} + engines: {node: '>=8'} + + is-lambda@1.0.1: + resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==} + + is-lower-case@2.0.2: + resolution: {integrity: sha512-bVcMJy4X5Og6VZfdOZstSexlEy20Sr0k/p/b2IlQJlfdKAQuMpiv5w2Ccxb8sKdRUNAG1PnHVHjFSdRDVS6NlQ==} + is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} + is-obj@1.0.1: + resolution: {integrity: sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==} + engines: {node: '>=0.10.0'} + + is-plain-object@5.0.0: + resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} + engines: {node: '>=0.10.0'} + + is-regexp@1.0.0: + resolution: {integrity: sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==} + engines: {node: '>=0.10.0'} + + is-relative-path@1.0.2: + resolution: {integrity: sha512-i1h+y50g+0hRbBD+dbnInl3JlJ702aar58snAeX+MxBAPvzXGej7sYoPMhlnykabt0ZzCJNBEyzMlekuQZN7fA==} + is-stream@2.0.1: resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} engines: {node: '>=8'} + is-typed-array@1.1.13: + resolution: {integrity: sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==} + engines: {node: '>= 0.4'} + + is-upper-case@2.0.2: + resolution: {integrity: sha512-44pxmxAvnnAOwBg4tHPnkfvgjPwbc5QIsSstNU+YcJ1ovxVzCWpSGosPJOZh/a1tdl81fbgnLc9LLv+x2ywbPQ==} + + is-url-superb@4.0.0: + resolution: {integrity: sha512-GI+WjezhPPcbM+tqE9LnmsY5qqjwHzTvjJ36wxYX5ujNXefSUJ/T17r5bqDV8yLhcgB59KTPNOc9O9cmHTPWsA==} + engines: {node: '>=10'} + + is-url@1.2.4: + resolution: {integrity: sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==} + + is-wsl@2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} + + isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + isexe@3.1.1: + resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==} + engines: {node: '>=16'} + + isomorphic-ws@4.0.1: + resolution: {integrity: sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==} + peerDependencies: + ws: '*' + istanbul-lib-coverage@3.2.2: resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} engines: {node: '>=8'} @@ -873,6 +2718,14 @@ packages: resolution: {integrity: sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==} engines: {node: '>=8'} + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + + jake@10.9.2: + resolution: {integrity: sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==} + engines: {node: '>=10'} + hasBin: true + jest-changed-files@29.7.0: resolution: {integrity: sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -1002,6 +2855,13 @@ packages: node-notifier: optional: true + jmespath@0.16.0: + resolution: {integrity: sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==} + engines: {node: '>= 0.6.0'} + + joi@17.13.3: + resolution: {integrity: sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA==} + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -1009,27 +2869,78 @@ packages: resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} hasBin: true + jsbn@1.1.0: + resolution: {integrity: sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==} + jsesc@2.5.2: resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} engines: {node: '>=4'} hasBin: true + json-bigint@1.0.0: + resolution: {integrity: sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==} + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + json-parse-even-better-errors@2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + json-parse-even-better-errors@3.0.2: + resolution: {integrity: sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + json5@2.2.3: resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} engines: {node: '>=6'} hasBin: true + jsonfile@6.1.0: + resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + + jsonparse@1.3.1: + resolution: {integrity: sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==} + engines: {'0': node >= 0.2.0} + + jsonpath-plus@7.2.0: + resolution: {integrity: sha512-zBfiUPM5nD0YZSBT/o/fbCUlCcepMIdP0CJZxM1+KgA4f2T206f6VAg9e7mX35+KlMaIc5qXW34f3BnwJ3w+RA==} + engines: {node: '>=12.0.0'} + + jsonwebtoken@9.0.2: + resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==} + engines: {node: '>=12', npm: '>=6'} + + jwa@1.4.1: + resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==} + + jwa@2.0.0: + resolution: {integrity: sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==} + + jws@3.2.2: + resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} + + jws@4.0.0: + resolution: {integrity: sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==} + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + kleur@3.0.3: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} engines: {node: '>=6'} + lazystream@1.0.1: + resolution: {integrity: sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==} + engines: {node: '>= 0.6.3'} + leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} + lightstep-tracer@0.31.2: + resolution: {integrity: sha512-DRdyUrASPkr+hxyHQJ9ImPSIxpUCpqQvfgHwxoZ42G6iEJ2g0/2chCw39tuz60JUmLfTlVp1LFzLscII6YPRoA==} + engines: {node: '>=8.0.0'} + lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} @@ -1037,9 +2948,73 @@ packages: resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} engines: {node: '>=8'} + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + + lodash.defaults@4.2.0: + resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==} + + lodash.difference@4.5.0: + resolution: {integrity: sha512-dS2j+W26TQ7taQBGN8Lbbq04ssV3emRw4NY58WErlTO29pIqS0HmoT5aJ9+TUQ1N3G+JOZSji4eugsWwGp9yPA==} + + lodash.flatten@4.4.0: + resolution: {integrity: sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==} + + lodash.includes@4.3.0: + resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} + + lodash.isboolean@3.0.3: + resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} + + lodash.isinteger@4.0.4: + resolution: {integrity: sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==} + + lodash.isnumber@3.0.3: + resolution: {integrity: sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==} + + lodash.isplainobject@4.0.6: + resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} + + lodash.isstring@4.0.1: + resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==} + lodash.memoize@4.1.2: resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + lodash.once@4.1.1: + resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==} + + lodash.union@4.6.0: + resolution: {integrity: sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw==} + + lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + + log-symbols@3.0.0: + resolution: {integrity: sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==} + engines: {node: '>=8'} + + long@5.2.3: + resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==} + + loose-envify@1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true + + lowercase-keys@2.0.0: + resolution: {integrity: sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==} + engines: {node: '>=8'} + + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} @@ -1050,12 +3025,23 @@ packages: make-error@1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + make-fetch-happen@13.0.1: + resolution: {integrity: sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==} + engines: {node: ^16.14.0 || >=18.0.0} + makeerror@1.0.12: resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} + matcher-collection@1.1.2: + resolution: {integrity: sha512-YQ/teqaOIIfUHedRam08PB3NK7Mjct6BvzRnJmpGDm8uFXpNr1sbY4yuflI5JcEs6COpYA0FpRQhSDBf1tT95g==} + merge-stream@2.0.0: resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + methods@1.1.2: resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} engines: {node: '>= 0.6'} @@ -1081,18 +3067,131 @@ packages: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} + mimic-response@1.0.1: + resolution: {integrity: sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==} + engines: {node: '>=4'} + + mimic-response@3.1.0: + resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} + engines: {node: '>=10'} + minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + minimatch@5.1.6: + resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} + engines: {node: '>=10'} + + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + minipass-collect@2.0.1: + resolution: {integrity: sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==} + engines: {node: '>=16 || 14 >=14.17'} + + minipass-fetch@3.0.5: + resolution: {integrity: sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + minipass-flush@1.0.5: + resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==} + engines: {node: '>= 8'} + + minipass-json-stream@1.0.2: + resolution: {integrity: sha512-myxeeTm57lYs8pH2nxPzmEEg8DGIgW+9mv6D4JZD2pa81I/OBjeU7PtICXV6c9eRGTA5JMDsuIPUZRCyBMYNhg==} + + minipass-pipeline@1.2.4: + resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==} + engines: {node: '>=8'} + + minipass-sized@1.0.3: + resolution: {integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==} + engines: {node: '>=8'} + + minipass@3.3.6: + resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} + engines: {node: '>=8'} + + minipass@5.0.0: + resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==} + engines: {node: '>=8'} + + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + + minizlib@2.1.2: + resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} + engines: {node: '>= 8'} + + mixpanel@0.13.0: + resolution: {integrity: sha512-YOWmpr/o4+zJ8LPjuLUkWLc2ImFeIkX6hF1t62Wlvq6loC6e8EK8qieYO4gYPTPxxtjAryl7xmIvf/7qnPwjrQ==} + engines: {node: '>=10.0'} + + mkdirp@0.5.6: + resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} + hasBin: true + + mkdirp@1.0.4: + resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} + engines: {node: '>=10'} + hasBin: true + + mkdirp@3.0.1: + resolution: {integrity: sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==} + engines: {node: '>=10'} + hasBin: true + + module-definition@5.0.1: + resolution: {integrity: sha512-kvw3B4G19IXk+BOXnYq/D/VeO9qfHaapMeuS7w7sNUqmGaA6hywdFHMi+VWeR9wUScXM7XjoryTffCZ5B0/8IA==} + engines: {node: '>=14'} + hasBin: true + + module-lookup-amd@8.0.5: + resolution: {integrity: sha512-vc3rYLjDo5Frjox8NZpiyLXsNWJ5BWshztc/5KSOMzpg9k5cHH652YsJ7VKKmtM4SvaxuE9RkrYGhiSjH3Ehow==} + engines: {node: '>=14'} + hasBin: true + + moment@2.30.1: + resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==} + + ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + mute-stream@0.0.8: + resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} + + nan@2.20.0: + resolution: {integrity: sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==} + + nanoid@3.3.7: + resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + nanotimer@0.3.14: + resolution: {integrity: sha512-NpKXdP6ZLwZcODvDeyfoDBVoncbrgvC12txO3F4l9BxMycQjZD29AnasGAy7uSi3dcsTGnGn6/zzvQRwbjS4uw==} + natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + natural-orderby@2.0.3: + resolution: {integrity: sha512-p7KTHxU0CUrcOXe62Zfrb5Z13nLvPhSWR/so3kFulUQU0sgUll2Z0LwpsLN351eOOD+hRGu/F1g+6xDfPeD++Q==} + + negotiator@0.6.3: + resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} + engines: {node: '>= 0.6'} + node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} @@ -1106,23 +3205,80 @@ packages: encoding: optional: true + node-gyp@10.2.0: + resolution: {integrity: sha512-sp3FonBAaFe4aYTcFdZUn2NYkbP7xroPGYvQmP4Nl5PxamznItBnNCgjrVTKrEfQynInMsJvZrdmqUnysCJ8rw==} + engines: {node: ^16.14.0 || >=18.0.0} + hasBin: true + node-int64@0.4.0: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} node-releases@2.0.14: resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} + node-source-walk@6.0.2: + resolution: {integrity: sha512-jn9vOIK/nfqoFCcpK89/VCVaLg1IHE6UVfDOzvqmANaJ/rWCTEdH8RZ1V278nv2jr36BJdyQXIAavBLXpzdlag==} + engines: {node: '>=14'} + + nopt@7.2.1: + resolution: {integrity: sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + hasBin: true + + normalize-package-data@6.0.2: + resolution: {integrity: sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==} + engines: {node: ^16.14.0 || >=18.0.0} + normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} + normalize-url@6.1.0: + resolution: {integrity: sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==} + engines: {node: '>=10'} + + npm-bundled@3.0.1: + resolution: {integrity: sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-install-checks@6.3.0: + resolution: {integrity: sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-normalize-package-bin@3.0.1: + resolution: {integrity: sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-package-arg@11.0.3: + resolution: {integrity: sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==} + engines: {node: ^16.14.0 || >=18.0.0} + + npm-packlist@8.0.2: + resolution: {integrity: sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-pick-manifest@9.1.0: + resolution: {integrity: sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==} + engines: {node: ^16.14.0 || >=18.0.0} + + npm-registry-fetch@16.2.1: + resolution: {integrity: sha512-8l+7jxhim55S85fjiDGJ1rZXBWGtRLi1OSb4Z3BPLObPuIaeKRlPRiYMSHU4/81ck3t71Z+UwDDl47gcpmfQQA==} + engines: {node: ^16.14.0 || >=18.0.0} + npm-run-path@4.0.1: resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} engines: {node: '>=8'} + nth-check@2.1.1: + resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} + object-inspect@1.13.1: resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==} + object-treeify@1.1.33: + resolution: {integrity: sha512-EFVjAYfzWqWsBMRHPMAXLCDIJnpMhdWAqR7xG6M6a2cs6PMFpl/+Z20w9zDW4vkxOFfddegBKq9Rehd0bxWE7A==} + engines: {node: '>= 10'} + once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -1130,10 +3286,30 @@ packages: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} + open@8.4.2: + resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} + engines: {node: '>=12'} + openai@4.52.2: resolution: {integrity: sha512-mMc0XgFuVSkcm0lRIi8zaw++otC82ZlfkCur1qguXYWPETr/+ZwL9A/vvp3YahX+shpaT6j03dwsmUyLAfmEfg==} hasBin: true + opener@1.5.2: + resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} + hasBin: true + + opentracing@0.14.7: + resolution: {integrity: sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q==} + engines: {node: '>=0.10'} + + ora@4.1.1: + resolution: {integrity: sha512-sjYP8QyVWBpBZWD6Vr1M/KwknSw6kJOz41tvGMlwWeClHBtYKTbHMki1PsLZnxKpXMPbTKv9b3pjQu3REib96A==} + engines: {node: '>=8'} + + p-cancelable@2.1.1: + resolution: {integrity: sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==} + engines: {node: '>=8'} + p-limit@2.3.0: resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} engines: {node: '>=6'} @@ -1146,14 +3322,46 @@ packages: resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} engines: {node: '>=8'} + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + p-map@4.0.0: + resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} + engines: {node: '>=10'} + p-try@2.2.0: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} + package-json-from-dist@1.0.0: + resolution: {integrity: sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==} + + pacote@17.0.7: + resolution: {integrity: sha512-sgvnoUMlkv9xHwDUKjKQFXVyUi8dtJGKp3vg6sYy+TxbDic5RjZCHF3ygv0EJgNRZ2GfRONjlKPUfokJ9lDpwQ==} + engines: {node: ^16.14.0 || >=18.0.0} + hasBin: true + parse-json@5.2.0: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} + parse5-htmlparser2-tree-adapter@7.0.0: + resolution: {integrity: sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==} + + parse5-parser-stream@7.1.2: + resolution: {integrity: sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==} + + parse5@7.1.2: + resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==} + + password-prompt@1.1.3: + resolution: {integrity: sha512-HkrjG2aJlvF0t2BMH0e2LB/EHf3Lcq3fNMzy4GYHcQblAvOl+QQji1Lx7WRBMqpVK8p+KR7bCg7oqAMXtdgqyw==} + + patch-console@2.0.0: + resolution: {integrity: sha512-0YNdUceMdaQwoKce1gatDScmMo5pu/tfABfnzEqeG0gtTmd7mh/WcwgUjtAeOU7N8nFFlbQBnFK2gXW5fGvmMA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} @@ -1169,9 +3377,20 @@ packages: path-parse@1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + picocolors@1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} + picocolors@1.0.1: + resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} + picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} @@ -1189,41 +3408,227 @@ packages: engines: {node: '>=18'} hasBin: true + playwright-core@1.45.3: + resolution: {integrity: sha512-+ym0jNbcjikaOwwSZycFbwkWgfruWvYlJfThKYAlImbxUgdWFO2oW70ojPm4OpE4t6TAo2FY/smM+hpVTtkhDA==} + engines: {node: '>=18'} + hasBin: true + playwright@1.45.0: resolution: {integrity: sha512-4z3ac3plDfYzGB6r0Q3LF8POPR20Z8D0aXcxbJvmfMgSSq1hkcgvFRXJk9rUq5H/MJ0Ktal869hhOdI/zUTeLA==} engines: {node: '>=18'} hasBin: true + playwright@1.45.3: + resolution: {integrity: sha512-QhVaS+lpluxCaioejDZ95l4Y4jSFCsBvl2UZkpeXlzxmqS+aABr5c82YmfMHrL6x27nvrvykJAFpkzT2eWdJww==} + engines: {node: '>=18'} + hasBin: true + + polite-json@4.0.1: + resolution: {integrity: sha512-8LI5ZeCPBEb4uBbcYKNVwk4jgqNx1yHReWoW4H4uUihWlSqZsUDfSITrRhjliuPgxsNPFhNSudGO2Zu4cbWinQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + polite-json@5.0.0: + resolution: {integrity: sha512-OLS/0XeUAcE8a2fdwemNja+udKgXNnY6yKVIXqAD2zVRx1KvY6Ato/rZ2vdzbxqYwPW0u6SCNC/bAMPNzpzxbw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + possible-typed-array-names@1.0.0: + resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==} + engines: {node: '>= 0.4'} + + postcss-values-parser@6.0.2: + resolution: {integrity: sha512-YLJpK0N1brcNJrs9WatuJFtHaV9q5aAOj+S4DI5S7jgHlRfm0PIbDCAFRYMQD5SHq7Fy6xsDhyutgS0QOAs0qw==} + engines: {node: '>=10'} + peerDependencies: + postcss: ^8.2.9 + + postcss@8.4.41: + resolution: {integrity: sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==} + engines: {node: ^10 || ^12 || >=14} + + posthog-node@2.6.0: + resolution: {integrity: sha512-/BiFw/jwdP0uJSRAIoYqLoBTjZ612xv74b1L/a3T/p1nJVL8e0OrHuxbJW56c6WVW/IKm9gBF/zhbqfaz0XgJQ==} + engines: {node: '>=15.0.0'} + + precinct@11.0.5: + resolution: {integrity: sha512-oHSWLC8cL/0znFhvln26D14KfCQFFn4KOLSw6hmLhd+LQ2SKt9Ljm89but76Pc7flM9Ty1TnXyrA2u16MfRV3w==} + engines: {node: ^14.14.0 || >=16.0.0} + hasBin: true + + present@0.0.3: + resolution: {integrity: sha512-d0QMXYTKHuAO0n0IfI/x2lbNwybdNWjRQ08hQySzqMQ2M0gwh/IetTv2glkPJihFn+cMDYjK/BiVgcLcjsASgg==} + pretty-format@29.7.0: resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + prismjs-terminal@1.2.3: + resolution: {integrity: sha512-xc0zuJ5FMqvW+DpiRkvxURlz98DdfDsZcFHdO699+oL+ykbFfgI7O4VDEgUyc07BSL2NHl3zdb8m/tZ/aaqUrw==} + engines: {node: '>=16'} + + prismjs@1.29.0: + resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} + engines: {node: '>=6'} + + proc-log@4.2.0: + resolution: {integrity: sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + + process-on-spawn@1.0.0: + resolution: {integrity: sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==} + engines: {node: '>=8'} + + prom-client@14.2.0: + resolution: {integrity: sha512-sF308EhTenb/pDRPakm+WgiN+VdM/T1RaHj1x+MvAuT8UiQP8JmOEbxVqtkbfR4LrvOg5n7ic01kRBDGXjYikA==} + engines: {node: '>=10'} + + promise-inflight@1.0.1: + resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} + peerDependencies: + bluebird: '*' + peerDependenciesMeta: + bluebird: + optional: true + + promise-retry@2.0.1: + resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==} + engines: {node: '>=10'} + prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} + protobufjs@7.3.2: + resolution: {integrity: sha512-RXyHaACeqXeqAKGLDl68rQKbmObRsTIn4TYVUUug1KfS47YWCo5MacGITEryugIgZqORCvJWEk4l449POg5Txg==} + engines: {node: '>=12.0.0'} + + pump@3.0.0: + resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} + + punycode@1.3.2: + resolution: {integrity: sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==} + pure-rand@6.1.0: resolution: {integrity: sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==} + q@1.5.1: + resolution: {integrity: sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==} + engines: {node: '>=0.6.0', teleport: '>=0.2.0'} + deprecated: |- + You or someone you depend on is using Q, the JavaScript Promise library that gave JavaScript developers strong feelings about promises. They can almost certainly migrate to the native JavaScript promise now. Thank you literally everyone for joining me in this bet against the odds. Be excellent to each other. + + (For a CapTP with native promises, see @endo/eventual-send and @endo/captp) + qs@6.12.1: resolution: {integrity: sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==} engines: {node: '>=0.6'} + querystring@0.2.0: + resolution: {integrity: sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==} + engines: {node: '>=0.4.x'} + deprecated: The querystring API is considered Legacy. new code should use the URLSearchParams API instead. + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + quick-lru@5.1.1: + resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} + engines: {node: '>=10'} + + quote-unquote@1.0.0: + resolution: {integrity: sha512-twwRO/ilhlG/FIgYeKGFqyHhoEhqgnKVkcmqMKi2r524gz3ZbDTcyFt38E9xjJI2vT+KbRNHVbnJ/e0I25Azwg==} + + rc@1.2.8: + resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} + hasBin: true + + react-dom@18.3.1: + resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==} + peerDependencies: + react: ^18.3.1 + + react-element-to-jsx-string@15.0.0: + resolution: {integrity: sha512-UDg4lXB6BzlobN60P8fHWVPX3Kyw8ORrTeBtClmIlGdkOOE+GYQSFvmEU5iLLpwp/6v42DINwNcwOhOLfQ//FQ==} + peerDependencies: + react: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 + react-dom: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 + + react-is@18.1.0: + resolution: {integrity: sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==} + react-is@18.3.1: resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + react-reconciler@0.29.2: + resolution: {integrity: sha512-zZQqIiYgDCTP/f1N/mAR10nJGrPD2ZR+jDSEsKWJHYC7Cm2wodlwbR3upZRdC3cjIjSlTLNVyO7Iu0Yy7t2AYg==} + engines: {node: '>=0.10.0'} + peerDependencies: + react: ^18.3.1 + + react@18.3.1: + resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} + engines: {node: '>=0.10.0'} + + read-package-json-fast@3.0.2: + resolution: {integrity: sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + read-package-json@7.0.1: + resolution: {integrity: sha512-8PcDiZ8DXUjLf687Ol4BR8Bpm2umR7vhoZOzNRt+uxD9GpBh/K+CAAALVIiYFknmvlmyg7hM7BSNUXPaCCqd0Q==} + engines: {node: ^16.14.0 || >=18.0.0} + deprecated: This package is no longer supported. Please use @npmcli/package-json instead. + + readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + readdir-glob@1.1.3: + resolution: {integrity: sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==} + + readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + + redeyed@2.1.1: + resolution: {integrity: sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ==} + require-directory@2.1.1: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} + requirejs-config-file@4.0.0: + resolution: {integrity: sha512-jnIre8cbWOyvr8a5F2KuqBnY+SDA4NXr/hzEZJG79Mxm2WiFQz2dzhC8ibtPJS7zkmBEl1mxSwp5HhC1W4qpxw==} + engines: {node: '>=10.13.0'} + + requirejs@2.3.7: + resolution: {integrity: sha512-DouTG8T1WanGok6Qjg2SXuCMzszOo0eHeH9hDZ5Y4x8Je+9JB38HdTLT4/VA8OaUhBa0JPVHJ0pyBkM1z+pDsw==} + engines: {node: '>=0.4.0'} + hasBin: true + + resolve-alpn@1.2.1: + resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==} + resolve-cwd@3.0.0: resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} engines: {node: '>=8'} + resolve-dependency-path@3.0.2: + resolution: {integrity: sha512-Tz7zfjhLfsvR39ADOSk9us4421J/1ztVBo4rWUkF38hgHK5m0OCZ3NxFVpqHRkjctnwVa15igEUHFJp8MCS7vA==} + engines: {node: '>=14'} + resolve-from@5.0.0: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} + resolve-import@1.4.6: + resolution: {integrity: sha512-CIw9e64QcKcCFUj9+KxUCJPy8hYofv6eVfo3U9wdhCm2E4IjvFnZ6G4/yIC4yP3f11+h6uU5b3LdS7O64LgqrA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + resolve.exports@2.0.2: resolution: {integrity: sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==} engines: {node: '>=10'} @@ -1232,6 +3637,65 @@ packages: resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} hasBin: true + responselike@2.0.1: + resolution: {integrity: sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==} + + restore-cursor@3.1.0: + resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} + engines: {node: '>=8'} + + restore-cursor@4.0.0: + resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + retry@0.12.0: + resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} + engines: {node: '>= 4'} + + reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rimraf@2.6.3: + resolution: {integrity: sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==} + deprecated: Rimraf versions prior to v4 are no longer supported + hasBin: true + + rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + deprecated: Rimraf versions prior to v4 are no longer supported + hasBin: true + + rimraf@5.0.10: + resolution: {integrity: sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==} + hasBin: true + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + sass-lookup@5.0.1: + resolution: {integrity: sha512-t0X5PaizPc2H4+rCwszAqHZRtr4bugo4pgiCvrBFvIX0XFxnr29g77LJcpyj9A0DcKf7gXMLcgvRjsonYI6x4g==} + engines: {node: '>=14'} + hasBin: true + + sax@1.2.1: + resolution: {integrity: sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==} + + scheduler@0.23.2: + resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==} + + seedrandom@3.0.5: + resolution: {integrity: sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==} + semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true @@ -1260,6 +3724,14 @@ packages: signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + sigstore@2.3.1: + resolution: {integrity: sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==} + engines: {node: ^16.14.0 || >=18.0.0} + sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} @@ -1267,32 +3739,132 @@ packages: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} + slice-ansi@4.0.0: + resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} + engines: {node: '>=10'} + + slice-ansi@5.0.0: + resolution: {integrity: sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==} + engines: {node: '>=12'} + + slice-ansi@6.0.0: + resolution: {integrity: sha512-6bn4hRfkTvDfUoEQYkERg0BVF1D0vrX9HEkMl08uDiNWvVvjylLHvZFZWkDo6wjT8tUctbYl1nCOuE66ZTaUtA==} + engines: {node: '>=14.16'} + + smart-buffer@4.2.0: + resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} + engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + + socket.io-client@4.7.5: + resolution: {integrity: sha512-sJ/tqHOCe7Z50JCBCXrsY3I2k03iOiUe+tj1OmKeD2lXPiGH/RUCdTZFoqVyN7l1MnpIzPrGtLcijffmeouNlQ==} + engines: {node: '>=10.0.0'} + + socket.io-parser@4.2.4: + resolution: {integrity: sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==} + engines: {node: '>=10.0.0'} + + socketio-wildcard@2.0.0: + resolution: {integrity: sha512-Bf3ioZq15Z2yhFLDasRvbYitg82rwm+5AuER5kQvEQHhNFf4R4K5o/h57nEpN7A59T9FyRtTj34HZfMWAruw/A==} + + socks-proxy-agent@8.0.4: + resolution: {integrity: sha512-GNAq/eg8Udq2x0eNiFkr9gRg5bA7PXEWagQdeRX4cPSG+X/8V38v637gim9bjFptMk1QWsCTr0ttrJEiXbNnRw==} + engines: {node: '>= 14'} + + socks@2.8.3: + resolution: {integrity: sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==} + engines: {node: '>= 10.0.0', npm: '>= 3.0.0'} + + source-map-js@1.2.0: + resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} + engines: {node: '>=0.10.0'} + + source-map-support@0.3.3: + resolution: {integrity: sha512-9O4+y9n64RewmFoKUZ/5Tx9IHIcXM6Q+RTSw6ehnqybUz4a7iwR3Eaw80uLtqqQ5D0C+5H03D4KKGo9PdP33Gg==} + source-map-support@0.5.13: resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} + source-map@0.1.32: + resolution: {integrity: sha512-htQyLrrRLkQ87Zfrir4/yN+vAUd6DNjVayEjTSHXu29AYQJw57I4/xEL/M6p6E/woPNJwvZt6rVlzc7gFEJccQ==} + engines: {node: '>=0.8.0'} + source-map@0.6.1: resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} engines: {node: '>=0.10.0'} + spdx-correct@3.2.0: + resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==} + + spdx-exceptions@2.5.0: + resolution: {integrity: sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==} + + spdx-expression-parse@3.0.1: + resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} + + spdx-license-ids@3.0.18: + resolution: {integrity: sha512-xxRs31BqRYHwiMzudOrpSiHtZ8i/GeionCBDSilhYRj+9gIcI8wCZTlXZKu9vZIVqViP3dcp9qE5G6AlIaD+TQ==} + sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + sprintf-js@1.1.3: + resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==} + + sqs-consumer@5.8.0: + resolution: {integrity: sha512-pJReMEtDM9/xzQTffb7dxMD5MKagBfOW65m+ITsbpNk0oZmJ38tTC4LPmj0/7ZcKSOqi2LrpA1b0qGYOwxlHJg==} + peerDependencies: + aws-sdk: ^2.1271.0 + + ssri@10.0.6: + resolution: {integrity: sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + stack-utils@2.0.6: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} engines: {node: '>=10'} + stoppable@1.1.0: + resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==} + engines: {node: '>=4', npm: '>=6'} + string-length@4.0.2: resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} engines: {node: '>=10'} + string-length@6.0.0: + resolution: {integrity: sha512-1U361pxZHEQ+FeSjzqRpV+cu2vTzYeWeafXFLykiFlv4Vc0n3njgU8HrMbyik5uwm77naWMuVG8fhEF+Ovb1Kg==} + engines: {node: '>=16'} + string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + stringify-object@3.3.0: + resolution: {integrity: sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==} + engines: {node: '>=4'} + strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + strip-bom@4.0.0: resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} engines: {node: '>=8'} @@ -1301,10 +3873,22 @@ packages: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} engines: {node: '>=6'} + strip-json-comments@2.0.1: + resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} + engines: {node: '>=0.10.0'} + strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} + strnum@1.0.5: + resolution: {integrity: sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==} + + stylus-lookup@5.0.1: + resolution: {integrity: sha512-tLtJEd5AGvnVy4f9UHQMw4bkJJtaAcmo54N+ovQBjDY3DuWyK9Eltxzr5+KG0q4ew6v2EHyuWWNnHeiw/Eo7rQ==} + engines: {node: '>=14'} + hasBin: true + superagent@9.0.2: resolution: {integrity: sha512-xuW7dzkUpcJq7QnhOsnNUgtYp3xRwpt2F7abdRYIpCsAt0hhUqia0EdxyXZQQpNmGtsCzYHryaKSV3q3GJnq7w==} engines: {node: '>=14.18.0'} @@ -1325,14 +3909,75 @@ packages: resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} engines: {node: '>=10'} + supports-hyperlinks@2.3.0: + resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} + engines: {node: '>=8'} + supports-preserve-symlinks-flag@1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} + sync-content@1.0.2: + resolution: {integrity: sha512-znd3rYiiSxU3WteWyS9a6FXkTA/Wjk8WQsOyzHbineeL837dLn3DA4MRhsIX3qGcxDMH6+uuFV4axztssk7wEQ==} + engines: {node: '>=14'} + hasBin: true + + tap-parser@16.0.1: + resolution: {integrity: sha512-vKianJzSSzLkJ3bHBwzvZDDRi9yGMwkRANJxwPAjAue50owB8rlluYySmTN4tZVH0nsh6stvrQbg9kuCL5svdg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + hasBin: true + + tap-yaml@2.2.2: + resolution: {integrity: sha512-MWG4OpAKtNoNVjCz/BqlDJiwTM99tiHRhHPS4iGOe1ZS0CgM4jSFH92lthSFvvy4EdDjQZDV7uYqUFlU9JuNhw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + tap@19.2.5: + resolution: {integrity: sha512-Mz7MznUuKCqrN9dr0s8REt6zLg6WLNrvGXwDSaUyPO73dpXXjakYA7YVKRWu6TBnj7NsSYKuHXpQFROlqZ2KTg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + hasBin: true + + tapable@2.2.1: + resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==} + engines: {node: '>=6'} + + tar-stream@2.2.0: + resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} + engines: {node: '>=6'} + + tar@6.2.1: + resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} + engines: {node: '>=10'} + + tcompare@7.0.1: + resolution: {integrity: sha512-JN5s7hgmg/Ya5HxZqCnywT+XiOGRFcJRgYhtMyt/1m+h0yWpWwApO7HIM8Bpwyno9hI151ljjp5eAPCHhIGbpQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + tdigest@0.1.2: + resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} + + temp@0.9.4: + resolution: {integrity: sha512-yYrrsWnrXMcdsnu/7YMYAofM1ktpL5By7vZhf15CrXijWWrEYZks5AXBudalfSWJLlnen/QUJUB5aoB0kqZUGA==} + engines: {node: '>=6.0.0'} + test-exclude@6.0.0: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} engines: {node: '>=8'} + thrift@0.14.2: + resolution: {integrity: sha512-bW8EaE6iw3hSt4HB2HpBdHW86Xpb9IUJfqufx4NwEu7OGuIpS0ISj+Yy1Z1Wvhfno6SPNhKRJ1qFXea84HcrOQ==} + engines: {node: '>= 10.18.0'} + + tldts-core@6.1.39: + resolution: {integrity: sha512-+Qib8VaRq6F56UjP4CJXd30PI4s3hFumDywUlsbiEWoA8+lfAaWNTLr3e6/zZOgHzVyon4snHaybeFHd8C0j/A==} + + tldts@6.1.39: + resolution: {integrity: sha512-UCGXcPhYIUELc+FifEeDXYkoTWNU6iOEdM/Q5LsvkTz2SnpQ3q5onA+DiiZlR5YDskMhfK1YBQDeWL7PH9/miQ==} + hasBin: true + + tmp@0.2.1: + resolution: {integrity: sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==} + engines: {node: '>=8.17.0'} + tmpl@1.0.5: resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} @@ -1344,9 +3989,20 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} + tough-cookie@5.0.0-rc.4: + resolution: {integrity: sha512-EN59UG6X/O6Nz2p21O6UK8R97zvLETOZ9+FGNdo56VuJZ8cftVCZ6tyxvedkQBfcX22avA1HY+4n04OVT2q6cw==} + engines: {node: '>=16'} + tr46@0.0.3: resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + trivial-deferred@2.0.0: + resolution: {integrity: sha512-iGbM7X2slv9ORDVj2y2FFUq3cP/ypbtu2nQ8S38ufjL0glBABvmR9pTdsib1XtS2LUhhLMbelaBUaf/s5J3dSw==} + engines: {node: '>= 8'} + + try-require@1.2.1: + resolution: {integrity: sha512-aMzrGUIA/R2LwUgvsOusx+GTy8ERyNjpBzbWgS1Qx4oTFlXCMxY3PyyXbPE1pvrvK/CXpO+BBREEqrTkNroC+A==} + ts-jest@29.1.5: resolution: {integrity: sha512-UuClSYxM7byvvYfyWdFI+/2UxMmwNyJb0NPkZPQE2hew3RurV7l7zURgOHAd/1I1ZdPpe3GUsXNXAcN8TFKSIg==} engines: {node: ^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0} @@ -1371,10 +4027,53 @@ packages: esbuild: optional: true + ts-node@10.9.2: + resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + + tsconfig-paths@4.2.0: + resolution: {integrity: sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==} + engines: {node: '>=6'} + + tshy@1.18.0: + resolution: {integrity: sha512-FQudIujBazHRu7CVPHKQE9/Xq1Wc7lezxD/FCnTXx2PTcnoSN32DVpb/ZXvzV2NJBTDB3XKjqX8Cdm+2UK1DlQ==} + engines: {node: 16 >=16.17 || 18 >=18.15.0 || >=20.6.1} + hasBin: true + + tslib@1.14.1: + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + + tslib@2.6.3: + resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==} + + tsutils@3.21.0: + resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} + engines: {node: '>= 6'} + peerDependencies: + typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' + + tuf-js@2.2.1: + resolution: {integrity: sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==} + engines: {node: ^16.14.0 || >=18.0.0} + type-detect@4.0.8: resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} engines: {node: '>=4'} + type-fest@0.12.0: + resolution: {integrity: sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==} + engines: {node: '>=10'} + type-fest@0.21.3: resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} engines: {node: '>=10'} @@ -1387,19 +4086,79 @@ packages: undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + undici@6.19.7: + resolution: {integrity: sha512-HR3W/bMGPSr90i8AAp2C4DM3wChFdJPLrWYpIS++LxS8K+W535qftjt+4MyjNYHeWabMj1nvtmLIi7l++iq91A==} + engines: {node: '>=18.17'} + + unique-filename@3.0.0: + resolution: {integrity: sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + unique-slug@4.0.0: + resolution: {integrity: sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + universalify@2.0.1: + resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} + engines: {node: '>= 10.0.0'} + + unix-dgram@2.0.6: + resolution: {integrity: sha512-AURroAsb73BZ6CdAyMrTk/hYKNj3DuYYEuOaB8bYMOHGKupRNScw90Q5C71tWJc3uE7dIeXRyuwN0xLLq3vDTg==} + engines: {node: '>=0.10.48'} + update-browserslist-db@1.0.14: resolution: {integrity: sha512-JixKH8GR2pWYshIPUg/NujK3JO7JiqEEUiNArE86NQyrgUuZeTlZQN3xuS/yiV5Kb48ev9K6RqNkaJjXsdg7Jw==} hasBin: true peerDependencies: browserslist: '>= 4.21.0' + url@0.10.3: + resolution: {integrity: sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==} + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + util@0.12.5: + resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==} + + uuid@8.0.0: + resolution: {integrity: sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==} + hasBin: true + + uuid@8.3.2: + resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} + hasBin: true + + uuid@9.0.1: + resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} + hasBin: true + + v8-compile-cache-lib@3.0.1: + resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + v8-to-istanbul@9.2.0: resolution: {integrity: sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA==} engines: {node: '>=10.12.0'} + validate-npm-package-license@3.0.4: + resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} + + validate-npm-package-name@5.0.1: + resolution: {integrity: sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + walk-sync@0.2.7: + resolution: {integrity: sha512-OH8GdRMowEFr0XSHQeX5fGweO6zSVHo7bG/0yJQx6LAj9Oukz0C8heI3/FYectT66gY0IPGe89kOvU410/UNpg==} + + walk-up-path@3.0.1: + resolution: {integrity: sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==} + walker@1.0.8: resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + wcwidth@1.0.1: + resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} + web-streams-polyfill@3.3.3: resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==} engines: {node: '>= 8'} @@ -1411,18 +4170,50 @@ packages: webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + whatwg-encoding@3.1.1: + resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} + engines: {node: '>=18'} + + whatwg-mimetype@4.0.0: + resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} + engines: {node: '>=18'} + whatwg-url@5.0.0: resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + which-typed-array@1.1.15: + resolution: {integrity: sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==} + engines: {node: '>= 0.4'} + which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} hasBin: true + which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + + widest-line@3.1.0: + resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} + engines: {node: '>=8'} + + widest-line@4.0.1: + resolution: {integrity: sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==} + engines: {node: '>=12'} + + wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} @@ -1430,6 +4221,29 @@ packages: resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + ws@5.2.4: + resolution: {integrity: sha512-fFCejsuC8f9kOSu9FYaOw8CdO68O3h5v0lg4p74o8JqWpwTf9tniOD+nOB78aWoVSS6WptVUmDrp/KPsMVBWFQ==} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@7.5.10: + resolution: {integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==} + engines: {node: '>=8.3.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + ws@8.17.1: resolution: {integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==} engines: {node: '>=10.0.0'} @@ -1442,6 +4256,18 @@ packages: utf-8-validate: optional: true + xml2js@0.6.2: + resolution: {integrity: sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==} + engines: {node: '>=4.0.0'} + + xmlbuilder@11.0.1: + resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==} + engines: {node: '>=4.0'} + + xmlhttprequest-ssl@2.0.0: + resolution: {integrity: sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==} + engines: {node: '>=0.4.0'} + y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} @@ -1449,6 +4275,23 @@ packages: yallist@3.1.1: resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + yallist@4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + + yaml-js@0.2.3: + resolution: {integrity: sha512-6xUQtVKl1qcd0EXtTEzUDVJy9Ji1fYa47LtkDtYKlIjhibPE9knNPmoRyf6SGREFHlOAUyDe9OdYqRP4DuSi5Q==} + + yaml-types@0.3.0: + resolution: {integrity: sha512-i9RxAO/LZBiE0NJUy9pbN5jFz5EasYDImzRkj8Y81kkInTi1laia3P3K/wlMKzOxFQutZip8TejvQP/DwgbU7A==} + engines: {node: '>= 16', npm: '>= 7'} + peerDependencies: + yaml: ^2.3.0 + + yaml@2.5.0: + resolution: {integrity: sha512-2wWLbGbYDiSqqIKoPjar3MPgB94ErzCtrNE1FdqGuaO0pi2JGjmE8aW8TDZwzU7vuxcGRdL/4gPQwQ7hD5AMSw==} + engines: {node: '>= 14'} + hasBin: true + yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} @@ -1457,18 +4300,34 @@ packages: resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} engines: {node: '>=12'} + yn@3.1.1: + resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} + engines: {node: '>=6'} + yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + yoga-wasm-web@0.3.3: + resolution: {integrity: sha512-N+d4UJSJbt/R3wqY7Coqs5pcV0aUj2j9IaQ3rNj9bVCLld8tTGKRa2USARjnvZJWVx1NDmQev8EknoczaOQDOA==} + + zip-stream@4.1.1: + resolution: {integrity: sha512-9qv4rlDiopXg4E69k+vMHjNN63YFMe9sZMrdlvKnCjlCRWeCBswPPMPUfx+ipsAWq1LXHe70RcbaHdJJpS6hyQ==} + engines: {node: '>= 10'} + snapshots: + '@alcalzone/ansi-tokenize@0.1.3': + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 4.0.0 + '@ampproject/remapping@2.3.0': dependencies: '@jridgewell/gen-mapping': 0.3.5 '@jridgewell/trace-mapping': 0.3.25 - '@anthropic-ai/sdk@0.24.3': + '@anthropic-ai/sdk@0.24.3(encoding@0.1.13)': dependencies: '@types/node': 18.19.39 '@types/node-fetch': 2.6.11 @@ -1476,11 +4335,675 @@ snapshots: agentkeepalive: 4.5.0 form-data-encoder: 1.7.2 formdata-node: 4.4.1 - node-fetch: 2.7.0 + node-fetch: 2.7.0(encoding@0.1.13) web-streams-polyfill: 3.3.3 transitivePeerDependencies: - encoding + '@artilleryio/int-commons@2.10.0': + dependencies: + async: 2.6.4 + cheerio: 1.0.0 + debug: 4.3.4(supports-color@8.1.1) + deep-for-each: 3.0.0 + espree: 9.6.1 + jsonpath-plus: 7.2.0 + lodash: 4.17.21 + ms: 2.1.3 + transitivePeerDependencies: + - supports-color + + '@artilleryio/int-core@2.14.0': + dependencies: + '@artilleryio/int-commons': 2.10.0 + '@artilleryio/sketches-js': 2.1.1 + agentkeepalive: 4.5.0 + arrivals: 2.1.2 + async: 2.6.4 + chalk: 2.4.2 + cheerio: 1.0.0 + cookie-parser: 1.4.6 + csv-parse: 4.16.3 + debug: 4.3.4(supports-color@8.1.1) + decompress-response: 6.0.0 + deep-for-each: 3.0.0 + driftless: 2.0.3 + esprima: 4.0.1 + eventemitter3: 4.0.7 + fast-deep-equal: 3.1.3 + filtrex: 0.5.4 + form-data: 3.0.1 + got: 11.8.6 + hpagent: 0.1.2 + https-proxy-agent: 5.0.1 + lodash: 4.17.21 + ms: 2.1.3 + protobufjs: 7.3.2 + socket.io-client: 4.7.5 + socketio-wildcard: 2.0.0 + tough-cookie: 5.0.0-rc.4 + try-require: 1.2.1 + uuid: 8.3.2 + ws: 7.5.10 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + '@artilleryio/sketches-js@2.1.1': {} + + '@aws-crypto/sha256-browser@5.2.0': + dependencies: + '@aws-crypto/sha256-js': 5.2.0 + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-locate-window': 3.568.0 + '@smithy/util-utf8': 2.3.0 + tslib: 2.6.3 + + '@aws-crypto/sha256-js@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.609.0 + tslib: 2.6.3 + + '@aws-crypto/supports-web-crypto@5.2.0': + dependencies: + tslib: 2.6.3 + + '@aws-crypto/util@5.2.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/util-utf8': 2.3.0 + tslib: 2.6.3 + + '@aws-sdk/client-cloudwatch@3.629.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.629.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/core': 3.629.0 + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-compression': 3.0.7 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + '@smithy/util-waiter': 3.1.2 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-cognito-identity@3.629.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.629.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/core': 3.629.0 + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/core': 3.629.0 + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sso@3.629.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.629.0 + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sts@3.629.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.629.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/core': 3.629.0 + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/core@3.629.0': + dependencies: + '@smithy/core': 2.3.2 + '@smithy/node-config-provider': 3.1.4 + '@smithy/property-provider': 3.1.3 + '@smithy/protocol-http': 4.1.0 + '@smithy/signature-v4': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/util-middleware': 3.0.3 + fast-xml-parser: 4.4.1 + tslib: 2.6.3 + + '@aws-sdk/credential-provider-cognito-identity@3.629.0': + dependencies: + '@aws-sdk/client-cognito-identity': 3.629.0 + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-env@3.620.1': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/credential-provider-http@3.622.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/property-provider': 3.1.3 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/util-stream': 3.1.3 + tslib: 2.6.3 + + '@aws-sdk/credential-provider-ini@3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0)': + dependencies: + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/credential-provider-env': 3.620.1 + '@aws-sdk/credential-provider-http': 3.622.0 + '@aws-sdk/credential-provider-process': 3.620.1 + '@aws-sdk/credential-provider-sso': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@aws-sdk/credential-provider-web-identity': 3.621.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/types': 3.609.0 + '@smithy/credential-provider-imds': 3.2.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-node@3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0)': + dependencies: + '@aws-sdk/credential-provider-env': 3.620.1 + '@aws-sdk/credential-provider-http': 3.622.0 + '@aws-sdk/credential-provider-ini': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/credential-provider-process': 3.620.1 + '@aws-sdk/credential-provider-sso': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@aws-sdk/credential-provider-web-identity': 3.621.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/types': 3.609.0 + '@smithy/credential-provider-imds': 3.2.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + + '@aws-sdk/credential-provider-process@3.620.1': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/credential-provider-sso@3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))': + dependencies: + '@aws-sdk/client-sso': 3.629.0 + '@aws-sdk/token-providers': 3.614.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-web-identity@3.621.0(@aws-sdk/client-sts@3.629.0)': + dependencies: + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/credential-providers@3.630.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))': + dependencies: + '@aws-sdk/client-cognito-identity': 3.629.0 + '@aws-sdk/client-sso': 3.629.0 + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/credential-provider-cognito-identity': 3.629.0 + '@aws-sdk/credential-provider-env': 3.620.1 + '@aws-sdk/credential-provider-http': 3.622.0 + '@aws-sdk/credential-provider-ini': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/credential-provider-process': 3.620.1 + '@aws-sdk/credential-provider-sso': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@aws-sdk/credential-provider-web-identity': 3.621.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/types': 3.609.0 + '@smithy/credential-provider-imds': 3.2.0 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/middleware-host-header@3.620.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/middleware-logger@3.609.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/middleware-recursion-detection@3.620.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/middleware-user-agent@3.620.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/region-config-resolver@3.614.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/node-config-provider': 3.1.4 + '@smithy/types': 3.3.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.3 + tslib: 2.6.3 + + '@aws-sdk/token-providers@3.614.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))': + dependencies: + '@aws-sdk/client-sso-oidc': 3.629.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/types@3.609.0': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/util-endpoints@3.614.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/types': 3.3.0 + '@smithy/util-endpoints': 2.0.5 + tslib: 2.6.3 + + '@aws-sdk/util-locate-window@3.568.0': + dependencies: + tslib: 2.6.3 + + '@aws-sdk/util-user-agent-browser@3.609.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/types': 3.3.0 + bowser: 2.11.0 + tslib: 2.6.3 + + '@aws-sdk/util-user-agent-node@3.614.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/node-config-provider': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@azure/abort-controller@1.1.0': + dependencies: + tslib: 2.6.3 + + '@azure/abort-controller@2.1.2': + dependencies: + tslib: 2.6.3 + + '@azure/arm-containerinstance@9.1.0': + dependencies: + '@azure/abort-controller': 1.1.0 + '@azure/core-auth': 1.7.2 + '@azure/core-client': 1.9.2 + '@azure/core-lro': 2.7.2 + '@azure/core-paging': 1.6.2 + '@azure/core-rest-pipeline': 1.16.3 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/core-auth@1.7.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-util': 1.9.2 + tslib: 2.6.3 + + '@azure/core-client@1.9.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.7.2 + '@azure/core-rest-pipeline': 1.16.3 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/logger': 1.1.4 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/core-http-compat@2.1.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-client': 1.9.2 + '@azure/core-rest-pipeline': 1.16.3 + transitivePeerDependencies: + - supports-color + + '@azure/core-lro@2.7.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-util': 1.9.2 + '@azure/logger': 1.1.4 + tslib: 2.6.3 + + '@azure/core-paging@1.6.2': + dependencies: + tslib: 2.6.3 + + '@azure/core-rest-pipeline@1.16.3': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.7.2 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/logger': 1.1.4 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.5 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/core-tracing@1.1.2': + dependencies: + tslib: 2.6.3 + + '@azure/core-util@1.9.2': + dependencies: + '@azure/abort-controller': 2.1.2 + tslib: 2.6.3 + + '@azure/core-xml@1.4.3': + dependencies: + fast-xml-parser: 4.4.1 + tslib: 2.6.3 + + '@azure/identity@4.4.1': + dependencies: + '@azure/abort-controller': 1.1.0 + '@azure/core-auth': 1.7.2 + '@azure/core-client': 1.9.2 + '@azure/core-rest-pipeline': 1.16.3 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/logger': 1.1.4 + '@azure/msal-browser': 3.21.0 + '@azure/msal-node': 2.13.0 + events: 3.3.0 + jws: 4.0.0 + open: 8.4.2 + stoppable: 1.1.0 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/logger@1.1.4': + dependencies: + tslib: 2.6.3 + + '@azure/msal-browser@3.21.0': + dependencies: + '@azure/msal-common': 14.14.1 + + '@azure/msal-common@14.14.1': {} + + '@azure/msal-node@2.13.0': + dependencies: + '@azure/msal-common': 14.14.1 + jsonwebtoken: 9.0.2 + uuid: 8.3.2 + + '@azure/storage-blob@12.24.0': + dependencies: + '@azure/abort-controller': 1.1.0 + '@azure/core-auth': 1.7.2 + '@azure/core-client': 1.9.2 + '@azure/core-http-compat': 2.1.2 + '@azure/core-lro': 2.7.2 + '@azure/core-paging': 1.6.2 + '@azure/core-rest-pipeline': 1.16.3 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/core-xml': 1.4.3 + '@azure/logger': 1.1.4 + events: 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/storage-queue@12.23.0': + dependencies: + '@azure/abort-controller': 1.1.0 + '@azure/core-auth': 1.7.2 + '@azure/core-client': 1.9.2 + '@azure/core-http-compat': 2.1.2 + '@azure/core-paging': 1.6.2 + '@azure/core-rest-pipeline': 1.16.3 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/core-xml': 1.4.3 + '@azure/logger': 1.1.4 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + '@babel/code-frame@7.24.2': dependencies: '@babel/highlight': 7.24.5 @@ -1501,7 +5024,7 @@ snapshots: '@babel/traverse': 7.24.5 '@babel/types': 7.24.5 convert-source-map: 2.0.0 - debug: 4.3.4 + debug: 4.3.4(supports-color@8.1.1) gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -1668,7 +5191,7 @@ snapshots: '@babel/helper-split-export-declaration': 7.24.5 '@babel/parser': 7.24.5 '@babel/types': 7.24.5 - debug: 4.3.4 + debug: 4.3.4(supports-color@8.1.1) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -1679,10 +5202,67 @@ snapshots: '@babel/helper-validator-identifier': 7.24.5 to-fast-properties: 2.0.0 + '@base2/pretty-print-object@1.0.1': {} + '@bcoe/v8-coverage@0.2.3': {} + '@colors/colors@1.5.0': + optional: true + + '@cspotcode/source-map-support@0.8.1': + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + + '@dependents/detective-less@4.1.0': + dependencies: + gonzales-pe: 4.3.0 + node-source-walk: 6.0.2 + '@dqbd/tiktoken@1.0.15': {} + '@grpc/grpc-js@1.11.1': + dependencies: + '@grpc/proto-loader': 0.7.13 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.13': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.2.3 + protobufjs: 7.3.2 + yargs: 17.7.2 + + '@hapi/hoek@9.3.0': {} + + '@hapi/topo@5.1.0': + dependencies: + '@hapi/hoek': 9.3.0 + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@isaacs/ts-node-temp-fork-for-pr-2009@10.9.7(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node14': 14.1.2 + '@tsconfig/node16': 16.1.3 + '@tsconfig/node18': 18.2.4 + '@tsconfig/node20': 20.1.4 + '@types/node': 20.14.9 + acorn: 8.12.1 + acorn-walk: 8.3.3 + arg: 4.1.3 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 5.4.5 + v8-compile-cache-lib: 3.0.1 + '@istanbuljs/load-nyc-config@1.1.0': dependencies: camelcase: 5.3.1 @@ -1702,7 +5282,7 @@ snapshots: jest-util: 29.7.0 slash: 3.0.0 - '@jest/core@29.7.0': + '@jest/core@29.7.0(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5))': dependencies: '@jest/console': 29.7.0 '@jest/reporters': 29.7.0 @@ -1716,7 +5296,7 @@ snapshots: exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 29.7.0 - jest-config: 29.7.0(@types/node@20.14.9) + jest-config: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) jest-haste-map: 29.7.0 jest-message-util: 29.7.0 jest-regex-util: 29.6.3 @@ -1872,8 +5452,458 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.4.15 + '@jridgewell/trace-mapping@0.3.9': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.4.15 + + '@js-sdsl/ordered-map@4.4.2': {} + + '@ngneat/falso@7.2.0': + dependencies: + seedrandom: 3.0.5 + uuid: 8.3.2 + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.17.1 + + '@npmcli/agent@2.2.2': + dependencies: + agent-base: 7.1.1 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.5 + lru-cache: 10.4.3 + socks-proxy-agent: 8.0.4 + transitivePeerDependencies: + - supports-color + + '@npmcli/fs@3.1.1': + dependencies: + semver: 7.6.2 + + '@npmcli/git@5.0.8': + dependencies: + '@npmcli/promise-spawn': 7.0.2 + ini: 4.1.3 + lru-cache: 10.4.3 + npm-pick-manifest: 9.1.0 + proc-log: 4.2.0 + promise-inflight: 1.0.1 + promise-retry: 2.0.1 + semver: 7.6.2 + which: 4.0.0 + transitivePeerDependencies: + - bluebird + + '@npmcli/installed-package-contents@2.1.0': + dependencies: + npm-bundled: 3.0.1 + npm-normalize-package-bin: 3.0.1 + + '@npmcli/node-gyp@3.0.0': {} + + '@npmcli/package-json@5.2.0': + dependencies: + '@npmcli/git': 5.0.8 + glob: 10.4.5 + hosted-git-info: 7.0.2 + json-parse-even-better-errors: 3.0.2 + normalize-package-data: 6.0.2 + proc-log: 4.2.0 + semver: 7.6.2 + transitivePeerDependencies: + - bluebird + + '@npmcli/promise-spawn@7.0.2': + dependencies: + which: 4.0.0 + + '@npmcli/redact@1.1.0': {} + + '@npmcli/run-script@7.0.4': + dependencies: + '@npmcli/node-gyp': 3.0.0 + '@npmcli/package-json': 5.2.0 + '@npmcli/promise-spawn': 7.0.2 + node-gyp: 10.2.0 + which: 4.0.0 + transitivePeerDependencies: + - bluebird + - supports-color + + '@oclif/core@2.16.0(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@types/cli-progress': 3.11.6 + ansi-escapes: 4.3.2 + ansi-styles: 4.3.0 + cardinal: 2.1.1 + chalk: 4.1.2 + clean-stack: 3.0.1 + cli-progress: 3.12.0 + debug: 4.3.4(supports-color@8.1.1) + ejs: 3.1.10 + get-package-type: 0.1.0 + globby: 11.1.0 + hyperlinker: 1.0.0 + indent-string: 4.0.0 + is-wsl: 2.2.0 + js-yaml: 3.14.1 + natural-orderby: 2.0.3 + object-treeify: 1.1.33 + password-prompt: 1.1.3 + slice-ansi: 4.0.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + supports-color: 8.1.1 + supports-hyperlinks: 2.3.0 + ts-node: 10.9.2(@types/node@20.14.9)(typescript@5.4.5) + tslib: 2.6.3 + widest-line: 3.1.0 + wordwrap: 1.0.0 + wrap-ansi: 7.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript + + '@oclif/plugin-help@5.2.20(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@oclif/core': 2.16.0(@types/node@20.14.9)(typescript@5.4.5) + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript + + '@oclif/plugin-not-found@2.4.3(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@oclif/core': 2.16.0(@types/node@20.14.9)(typescript@5.4.5) + chalk: 4.1.2 + fast-levenshtein: 3.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript + + '@opentelemetry/api-logs@0.41.2': + dependencies: + '@opentelemetry/api': 1.9.0 + + '@opentelemetry/api-logs@0.43.0': + dependencies: + '@opentelemetry/api': 1.9.0 + + '@opentelemetry/api@1.9.0': {} + + '@opentelemetry/context-async-hooks@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + + '@opentelemetry/core@1.15.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.15.2 + + '@opentelemetry/core@1.17.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.17.0 + + '@opentelemetry/core@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.25.1 + + '@opentelemetry/exporter-metrics-otlp-grpc@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.1 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-metrics-otlp-http@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-metrics-otlp-proto@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-proto-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-grpc@0.43.0(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.1 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.17.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-http@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-proto@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-proto-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-zipkin@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + + '@opentelemetry/otlp-exporter-base@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/otlp-exporter-base@0.43.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/otlp-grpc-exporter-base@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.1 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + protobufjs: 7.3.2 + + '@opentelemetry/otlp-grpc-exporter-base@0.43.0(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.1 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.43.0(@opentelemetry/api@1.9.0) + protobufjs: 7.3.2 + + '@opentelemetry/otlp-proto-exporter-base@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + protobufjs: 7.3.2 + + '@opentelemetry/otlp-transformer@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.41.2 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.41.2(@opentelemetry/api-logs@0.41.2)(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/otlp-transformer@0.43.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.43.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.43.0(@opentelemetry/api-logs@0.43.0)(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.17.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/resources@1.15.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.15.2 + + '@opentelemetry/resources@1.17.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.17.0 + + '@opentelemetry/resources@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + + '@opentelemetry/sdk-logs@0.41.2(@opentelemetry/api-logs@0.41.2)(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.41.2 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/sdk-logs@0.43.0(@opentelemetry/api-logs@0.43.0)(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.43.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/sdk-metrics@1.15.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + + '@opentelemetry/sdk-metrics@1.17.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + + '@opentelemetry/sdk-metrics@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + + '@opentelemetry/sdk-trace-base@1.15.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.15.2 + + '@opentelemetry/sdk-trace-base@1.17.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.17.0 + + '@opentelemetry/sdk-trace-base@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + + '@opentelemetry/semantic-conventions@1.15.2': {} + + '@opentelemetry/semantic-conventions@1.17.0': {} + + '@opentelemetry/semantic-conventions@1.25.1': {} + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@playwright/browser-chromium@1.45.3': + dependencies: + playwright-core: 1.45.3 + + '@playwright/test@1.45.3': + dependencies: + playwright: 1.45.3 + + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + + '@sideway/address@4.1.5': + dependencies: + '@hapi/hoek': 9.3.0 + + '@sideway/formula@3.0.1': {} + + '@sideway/pinpoint@2.0.0': {} + + '@sigstore/bundle@2.3.2': + dependencies: + '@sigstore/protobuf-specs': 0.3.2 + + '@sigstore/core@1.1.0': {} + + '@sigstore/protobuf-specs@0.3.2': {} + + '@sigstore/sign@2.3.2': + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.2 + make-fetch-happen: 13.0.1 + proc-log: 4.2.0 + promise-retry: 2.0.1 + transitivePeerDependencies: + - supports-color + + '@sigstore/tuf@2.3.4': + dependencies: + '@sigstore/protobuf-specs': 0.3.2 + tuf-js: 2.2.1 + transitivePeerDependencies: + - supports-color + + '@sigstore/verify@1.2.1': + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.2 + '@sinclair/typebox@0.27.8': {} + '@sindresorhus/is@4.6.0': {} + '@sinonjs/commons@3.0.1': dependencies: type-detect: 4.0.8 @@ -1882,6 +5912,291 @@ snapshots: dependencies: '@sinonjs/commons': 3.0.1 + '@smithy/abort-controller@3.1.1': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/config-resolver@3.0.5': + dependencies: + '@smithy/node-config-provider': 3.1.4 + '@smithy/types': 3.3.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.3 + tslib: 2.6.3 + + '@smithy/core@2.3.2': + dependencies: + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/util-middleware': 3.0.3 + tslib: 2.6.3 + + '@smithy/credential-provider-imds@3.2.0': + dependencies: + '@smithy/node-config-provider': 3.1.4 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + tslib: 2.6.3 + + '@smithy/fetch-http-handler@3.2.4': + dependencies: + '@smithy/protocol-http': 4.1.0 + '@smithy/querystring-builder': 3.0.3 + '@smithy/types': 3.3.0 + '@smithy/util-base64': 3.0.0 + tslib: 2.6.3 + + '@smithy/hash-node@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + + '@smithy/invalid-dependency@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/is-array-buffer@2.2.0': + dependencies: + tslib: 2.6.3 + + '@smithy/is-array-buffer@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/middleware-compression@3.0.7': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + '@smithy/node-config-provider': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-utf8': 3.0.0 + fflate: 0.8.1 + tslib: 2.6.3 + + '@smithy/middleware-content-length@3.0.5': + dependencies: + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/middleware-endpoint@3.1.0': + dependencies: + '@smithy/middleware-serde': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-middleware': 3.0.3 + tslib: 2.6.3 + + '@smithy/middleware-retry@3.0.14': + dependencies: + '@smithy/node-config-provider': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/service-error-classification': 3.0.3 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + tslib: 2.6.3 + uuid: 9.0.1 + + '@smithy/middleware-serde@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/middleware-stack@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/node-config-provider@3.1.4': + dependencies: + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/node-http-handler@3.1.4': + dependencies: + '@smithy/abort-controller': 3.1.1 + '@smithy/protocol-http': 4.1.0 + '@smithy/querystring-builder': 3.0.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/property-provider@3.1.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/protocol-http@4.1.0': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/querystring-builder@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + '@smithy/util-uri-escape': 3.0.0 + tslib: 2.6.3 + + '@smithy/querystring-parser@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/service-error-classification@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + + '@smithy/shared-ini-file-loader@3.1.4': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/signature-v4@4.1.0': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-uri-escape': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + + '@smithy/smithy-client@3.1.12': + dependencies: + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-stack': 3.0.3 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + '@smithy/util-stream': 3.1.3 + tslib: 2.6.3 + + '@smithy/types@3.3.0': + dependencies: + tslib: 2.6.3 + + '@smithy/url-parser@3.0.3': + dependencies: + '@smithy/querystring-parser': 3.0.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-base64@3.0.0': + dependencies: + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + + '@smithy/util-body-length-browser@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-body-length-node@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-buffer-from@2.2.0': + dependencies: + '@smithy/is-array-buffer': 2.2.0 + tslib: 2.6.3 + + '@smithy/util-buffer-from@3.0.0': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + tslib: 2.6.3 + + '@smithy/util-config-provider@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-defaults-mode-browser@3.0.14': + dependencies: + '@smithy/property-provider': 3.1.3 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + bowser: 2.11.0 + tslib: 2.6.3 + + '@smithy/util-defaults-mode-node@3.0.14': + dependencies: + '@smithy/config-resolver': 3.0.5 + '@smithy/credential-provider-imds': 3.2.0 + '@smithy/node-config-provider': 3.1.4 + '@smithy/property-provider': 3.1.3 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-endpoints@2.0.5': + dependencies: + '@smithy/node-config-provider': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-hex-encoding@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-middleware@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-retry@3.0.3': + dependencies: + '@smithy/service-error-classification': 3.0.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-stream@3.1.3': + dependencies: + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/types': 3.3.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + + '@smithy/util-uri-escape@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-utf8@2.3.0': + dependencies: + '@smithy/util-buffer-from': 2.2.0 + tslib: 2.6.3 + + '@smithy/util-utf8@3.0.0': + dependencies: + '@smithy/util-buffer-from': 3.0.0 + tslib: 2.6.3 + + '@smithy/util-waiter@3.1.2': + dependencies: + '@smithy/abort-controller': 3.1.1 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@socket.io/component-emitter@3.1.2': {} + '@supabase/auth-js@2.64.2': dependencies: '@supabase/node-fetch': 2.6.15 @@ -1924,6 +6239,279 @@ snapshots: - bufferutil - utf-8-validate + '@szmarczak/http-timer@4.0.6': + dependencies: + defer-to-connect: 2.0.1 + + '@tapjs/after-each@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + function-loop: 4.0.0 + + '@tapjs/after@1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + is-actual-promise: 1.0.2 + + '@tapjs/asserts@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/stack': 2.0.1 + is-actual-promise: 1.0.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + trivial-deferred: 2.0.0 + transitivePeerDependencies: + - react + - react-dom + + '@tapjs/before-each@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + function-loop: 4.0.0 + + '@tapjs/before@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + is-actual-promise: 1.0.2 + + '@tapjs/chdir@1.1.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tapjs/config@3.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/test': 2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + chalk: 5.3.0 + jackspeak: 3.4.3 + polite-json: 4.0.1 + tap-yaml: 2.2.2 + walk-up-path: 3.0.1 + + '@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tapjs/processinfo': 3.1.8 + '@tapjs/stack': 2.0.1 + '@tapjs/test': 2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + async-hook-domain: 4.0.1 + diff: 5.2.0 + is-actual-promise: 1.0.2 + minipass: 7.1.2 + signal-exit: 4.1.0 + tap-parser: 16.0.1 + tap-yaml: 2.2.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + trivial-deferred: 2.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - react + - react-dom + + '@tapjs/error-serdes@2.0.1': + dependencies: + minipass: 7.1.2 + + '@tapjs/filter@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tapjs/fixture@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + mkdirp: 3.0.1 + rimraf: 5.0.10 + + '@tapjs/intercept@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/stack': 2.0.1 + + '@tapjs/mock@2.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/stack': 2.0.1 + resolve-import: 1.4.6 + walk-up-path: 3.0.1 + + '@tapjs/node-serialize@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/error-serdes': 2.0.1 + '@tapjs/stack': 2.0.1 + tap-parser: 16.0.1 + + '@tapjs/processinfo@3.1.8': + dependencies: + pirates: 4.0.6 + process-on-spawn: 1.0.0 + signal-exit: 4.1.0 + uuid: 8.3.2 + + '@tapjs/reporter@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))': + dependencies: + '@tapjs/config': 3.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/stack': 2.0.1 + chalk: 5.3.0 + ink: 4.4.1(react@18.3.1) + minipass: 7.1.2 + ms: 2.1.3 + patch-console: 2.0.0 + prismjs-terminal: 1.2.3 + react: 18.3.1 + string-length: 6.0.0 + tap-parser: 16.0.1 + tap-yaml: 2.2.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + transitivePeerDependencies: + - '@tapjs/test' + - '@types/react' + - bufferutil + - react-devtools-core + - react-dom + - utf-8-validate + + '@tapjs/run@2.1.7(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/before': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/config': 3.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/processinfo': 3.1.8 + '@tapjs/reporter': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1)) + '@tapjs/spawn': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/stdin': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/test': 2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + c8: 9.1.0 + chalk: 5.3.0 + chokidar: 3.6.0 + foreground-child: 3.3.0 + glob: 10.4.5 + minipass: 7.1.2 + mkdirp: 3.0.1 + opener: 1.5.2 + pacote: 17.0.7 + resolve-import: 1.4.6 + rimraf: 5.0.10 + semver: 7.6.2 + signal-exit: 4.1.0 + tap-parser: 16.0.1 + tap-yaml: 2.2.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + trivial-deferred: 2.0.0 + which: 4.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - '@types/react' + - bluebird + - bufferutil + - react + - react-devtools-core + - react-dom + - supports-color + - utf-8-validate + + '@tapjs/snapshot@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + is-actual-promise: 1.0.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + trivial-deferred: 2.0.0 + transitivePeerDependencies: + - react + - react-dom + + '@tapjs/spawn@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tapjs/stack@2.0.1': {} + + '@tapjs/stdin@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@isaacs/ts-node-temp-fork-for-pr-2009': 10.9.7(@types/node@20.14.9)(typescript@5.4.5) + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/after-each': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/asserts': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/before': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/before-each': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/chdir': 1.1.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/filter': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/fixture': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/intercept': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/mock': 2.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/node-serialize': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/snapshot': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/spawn': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/stdin': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/typescript': 1.4.13(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(typescript@5.4.5) + '@tapjs/worker': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + glob: 10.4.5 + jackspeak: 3.4.3 + mkdirp: 3.0.1 + package-json-from-dist: 1.0.0 + resolve-import: 1.4.6 + rimraf: 5.0.10 + sync-content: 1.0.2 + tap-parser: 16.0.1 + tshy: 1.18.0 + typescript: 5.4.5 + walk-up-path: 3.0.1 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - react + - react-dom + + '@tapjs/typescript@1.4.13(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@isaacs/ts-node-temp-fork-for-pr-2009': 10.9.7(@types/node@20.14.9)(typescript@5.4.5) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript + + '@tapjs/worker@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tsconfig/node10@1.0.11': {} + + '@tsconfig/node12@1.0.11': {} + + '@tsconfig/node14@1.0.3': {} + + '@tsconfig/node14@14.1.2': {} + + '@tsconfig/node16@1.0.4': {} + + '@tsconfig/node16@16.1.3': {} + + '@tsconfig/node18@18.2.4': {} + + '@tsconfig/node20@20.1.4': {} + + '@tufjs/canonical-json@2.0.0': {} + + '@tufjs/models@2.0.1': + dependencies: + '@tufjs/canonical-json': 2.0.0 + minimatch: 9.0.5 + '@types/babel__core@7.20.5': dependencies: '@babel/parser': 7.24.5 @@ -1945,12 +6533,25 @@ snapshots: dependencies: '@babel/types': 7.24.5 + '@types/cacheable-request@6.0.3': + dependencies: + '@types/http-cache-semantics': 4.0.4 + '@types/keyv': 3.1.4 + '@types/node': 20.14.9 + '@types/responselike': 1.0.3 + + '@types/cli-progress@3.11.6': + dependencies: + '@types/node': 20.14.9 + '@types/cookiejar@2.1.5': {} '@types/graceful-fs@4.1.9': dependencies: '@types/node': 20.14.9 + '@types/http-cache-semantics@4.0.4': {} + '@types/istanbul-lib-coverage@2.0.6': {} '@types/istanbul-lib-report@3.0.3': @@ -1966,6 +6567,10 @@ snapshots: expect: 29.7.0 pretty-format: 29.7.0 + '@types/keyv@3.1.4': + dependencies: + '@types/node': 20.14.9 + '@types/methods@1.1.4': {} '@types/node-fetch@2.6.11': @@ -1987,6 +6592,10 @@ snapshots: '@types/phoenix@1.6.5': {} + '@types/responselike@1.0.3': + dependencies: + '@types/node': 20.14.9 + '@types/stack-utils@2.0.3': {} '@types/superagent@8.1.6': @@ -2010,20 +6619,76 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.3 + '@typescript-eslint/types@5.62.0': {} + + '@typescript-eslint/typescript-estree@5.62.0(typescript@5.4.5)': + dependencies: + '@typescript-eslint/types': 5.62.0 + '@typescript-eslint/visitor-keys': 5.62.0 + debug: 4.3.4(supports-color@8.1.1) + globby: 11.1.0 + is-glob: 4.0.3 + semver: 7.6.2 + tsutils: 3.21.0(typescript@5.4.5) + optionalDependencies: + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/visitor-keys@5.62.0': + dependencies: + '@typescript-eslint/types': 5.62.0 + eslint-visitor-keys: 3.4.3 + + abbrev@2.0.0: {} + abort-controller@3.0.0: dependencies: event-target-shim: 5.0.1 + acorn-jsx@5.3.2(acorn@8.12.1): + dependencies: + acorn: 8.12.1 + + acorn-walk@8.3.3: + dependencies: + acorn: 8.12.1 + + acorn@8.12.1: {} + + agent-base@6.0.2: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + agent-base@7.1.1: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + agentkeepalive@4.5.0: dependencies: humanize-ms: 1.2.1 + aggregate-error@3.1.0: + dependencies: + clean-stack: 2.2.0 + indent-string: 4.0.0 + + amdefine@1.0.1: {} + ansi-escapes@4.3.2: dependencies: type-fest: 0.21.3 + ansi-escapes@6.2.1: {} + ansi-regex@5.0.1: {} + ansi-regex@6.0.1: {} + ansi-styles@3.2.1: dependencies: color-convert: 1.9.3 @@ -2034,19 +6699,276 @@ snapshots: ansi-styles@5.2.0: {} + ansi-styles@6.2.1: {} + + ansicolors@0.3.2: {} + anymatch@3.1.3: dependencies: normalize-path: 3.0.0 picomatch: 2.3.1 + app-module-path@2.2.0: {} + + archiver-utils@2.1.0: + dependencies: + glob: 7.2.3 + graceful-fs: 4.2.11 + lazystream: 1.0.1 + lodash.defaults: 4.2.0 + lodash.difference: 4.5.0 + lodash.flatten: 4.4.0 + lodash.isplainobject: 4.0.6 + lodash.union: 4.6.0 + normalize-path: 3.0.0 + readable-stream: 2.3.8 + + archiver-utils@3.0.4: + dependencies: + glob: 7.2.3 + graceful-fs: 4.2.11 + lazystream: 1.0.1 + lodash.defaults: 4.2.0 + lodash.difference: 4.5.0 + lodash.flatten: 4.4.0 + lodash.isplainobject: 4.0.6 + lodash.union: 4.6.0 + normalize-path: 3.0.0 + readable-stream: 3.6.2 + + archiver@5.3.2: + dependencies: + archiver-utils: 2.1.0 + async: 3.2.5 + buffer-crc32: 0.2.13 + readable-stream: 3.6.2 + readdir-glob: 1.1.3 + tar-stream: 2.2.0 + zip-stream: 4.1.1 + + arg@4.1.3: {} + argparse@1.0.10: dependencies: sprintf-js: 1.0.3 + array-union@2.1.0: {} + + arrivals@2.1.2: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + nanotimer: 0.3.14 + transitivePeerDependencies: + - supports-color + + artillery-engine-playwright@1.16.0: + dependencies: + '@playwright/browser-chromium': 1.45.3 + '@playwright/test': 1.45.3 + debug: 4.3.4(supports-color@8.1.1) + playwright: 1.45.3 + transitivePeerDependencies: + - supports-color + + artillery-plugin-apdex@1.10.0(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5): + dependencies: + tap: 19.2.5(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5) + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - '@types/react' + - bluebird + - bufferutil + - react + - react-devtools-core + - react-dom + - supports-color + - typescript + - utf-8-validate + + artillery-plugin-ensure@1.13.0: + dependencies: + chalk: 2.4.2 + debug: 4.3.4(supports-color@8.1.1) + filtrex: 2.2.3 + transitivePeerDependencies: + - supports-color + + artillery-plugin-expect@2.13.0: + dependencies: + chalk: 4.1.2 + debug: 4.3.4(supports-color@8.1.1) + jmespath: 0.16.0 + lodash: 4.17.21 + transitivePeerDependencies: + - supports-color + + artillery-plugin-fake-data@1.10.0: + dependencies: + '@ngneat/falso': 7.2.0 + + artillery-plugin-metrics-by-endpoint@1.13.0: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + artillery-plugin-publish-metrics@2.24.0: + dependencies: + '@aws-sdk/client-cloudwatch': 3.629.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-grpc': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-proto': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-grpc': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-zipkin': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + async: 2.6.4 + datadog-metrics: 0.9.3 + debug: 4.3.4(supports-color@8.1.1) + dogapi: 2.8.4 + hot-shots: 6.8.7 + lightstep-tracer: 0.31.2 + mixpanel: 0.13.0 + opentracing: 0.14.7 + prom-client: 14.2.0 + semver: 7.6.2 + uuid: 8.3.2 + transitivePeerDependencies: + - aws-crt + - bufferutil + - supports-color + - utf-8-validate + + artillery-plugin-slack@1.8.0: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + got: 11.8.6 + transitivePeerDependencies: + - supports-color + + artillery@2.0.19(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5): + dependencies: + '@artilleryio/int-commons': 2.10.0 + '@artilleryio/int-core': 2.14.0 + '@aws-sdk/credential-providers': 3.630.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@azure/arm-containerinstance': 9.1.0 + '@azure/identity': 4.4.1 + '@azure/storage-blob': 12.24.0 + '@azure/storage-queue': 12.23.0 + '@oclif/core': 2.16.0(@types/node@20.14.9)(typescript@5.4.5) + '@oclif/plugin-help': 5.2.20(@types/node@20.14.9)(typescript@5.4.5) + '@oclif/plugin-not-found': 2.4.3(@types/node@20.14.9)(typescript@5.4.5) + archiver: 5.3.2 + artillery-engine-playwright: 1.16.0 + artillery-plugin-apdex: 1.10.0(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5) + artillery-plugin-ensure: 1.13.0 + artillery-plugin-expect: 2.13.0 + artillery-plugin-fake-data: 1.10.0 + artillery-plugin-metrics-by-endpoint: 1.13.0 + artillery-plugin-publish-metrics: 2.24.0 + artillery-plugin-slack: 1.8.0 + async: 2.6.4 + aws-sdk: 2.1674.0 + chalk: 2.4.2 + chokidar: 3.6.0 + ci-info: 3.9.0 + cli-table3: 0.6.5 + cross-spawn: 7.0.3 + csv-parse: 4.16.3 + debug: 4.3.4(supports-color@8.1.1) + dependency-tree: 10.0.9 + detective-es6: 4.0.1 + dotenv: 16.4.5 + driftless: 2.0.3 + esbuild-wasm: 0.19.12 + eventemitter3: 4.0.7 + fs-extra: 10.1.0 + got: 11.8.6 + joi: 17.13.3 + js-yaml: 3.14.1 + jsonwebtoken: 9.0.2 + lodash: 4.17.21 + moment: 2.30.1 + nanoid: 3.3.7 + ora: 4.1.1 + posthog-node: 2.6.0(debug@4.3.4) + rc: 1.2.8 + sqs-consumer: 5.8.0(aws-sdk@2.1674.0) + temp: 0.9.4 + tmp: 0.2.1 + try-require: 1.2.1 + walk-sync: 0.2.7 + yaml-js: 0.2.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - '@types/react' + - aws-crt + - bluebird + - bufferutil + - react + - react-devtools-core + - react-dom + - supports-color + - typescript + - utf-8-validate + asap@2.0.6: {} + ast-module-types@5.0.0: {} + + astral-regex@2.0.0: {} + + async-hook-domain@4.0.1: {} + + async-limiter@1.0.1: {} + + async@1.5.0: {} + + async@2.6.4: + dependencies: + lodash: 4.17.21 + + async@3.2.5: {} + asynckit@0.4.0: {} + auto-bind@5.0.1: {} + + available-typed-arrays@1.0.7: + dependencies: + possible-typed-array-names: 1.0.0 + + aws-sdk@2.1674.0: + dependencies: + buffer: 4.9.2 + events: 1.1.1 + ieee754: 1.1.13 + jmespath: 0.16.0 + querystring: 0.2.0 + sax: 1.2.1 + url: 0.10.3 + util: 0.12.5 + uuid: 8.0.0 + xml2js: 0.6.2 + + axios@0.27.2(debug@4.3.4): + dependencies: + follow-redirects: 1.15.6(debug@4.3.4) + form-data: 4.0.0 + transitivePeerDependencies: + - debug + babel-jest@29.7.0(@babel/core@7.24.5): dependencies: '@babel/core': 7.24.5 @@ -2101,15 +7023,44 @@ snapshots: balanced-match@1.0.2: {} + base64-js@1.5.1: {} + + bignumber.js@9.1.2: {} + + binary-extensions@2.3.0: {} + + bindings@1.5.0: + dependencies: + file-uri-to-path: 1.0.0 + optional: true + + bintrees@1.0.2: {} + + bl@4.1.0: + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + + boolbase@1.0.0: {} + + bowser@2.11.0: {} + brace-expansion@1.1.11: dependencies: balanced-match: 1.0.2 concat-map: 0.0.1 + brace-expansion@2.0.1: + dependencies: + balanced-match: 1.0.2 + braces@3.0.2: dependencies: fill-range: 7.0.1 + browser-or-node@1.3.0: {} + browserslist@4.23.0: dependencies: caniuse-lite: 1.0.30001615 @@ -2125,8 +7076,64 @@ snapshots: dependencies: node-int64: 0.4.0 + buffer-crc32@0.2.13: {} + + buffer-equal-constant-time@1.0.1: {} + buffer-from@1.1.2: {} + buffer@4.9.2: + dependencies: + base64-js: 1.5.1 + ieee754: 1.1.13 + isarray: 1.0.0 + + buffer@5.7.1: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + c8@9.1.0: + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@istanbuljs/schema': 0.1.3 + find-up: 5.0.0 + foreground-child: 3.3.0 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-report: 3.0.1 + istanbul-reports: 3.1.7 + test-exclude: 6.0.0 + v8-to-istanbul: 9.2.0 + yargs: 17.7.2 + yargs-parser: 21.1.1 + + cacache@18.0.4: + dependencies: + '@npmcli/fs': 3.1.1 + fs-minipass: 3.0.3 + glob: 10.4.5 + lru-cache: 10.4.3 + minipass: 7.1.2 + minipass-collect: 2.0.1 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + p-map: 4.0.0 + ssri: 10.0.6 + tar: 6.2.1 + unique-filename: 3.0.0 + + cacheable-lookup@5.0.4: {} + + cacheable-request@7.0.4: + dependencies: + clone-response: 1.0.3 + get-stream: 5.2.0 + http-cache-semantics: 4.1.1 + keyv: 4.5.4 + lowercase-keys: 2.0.0 + normalize-url: 6.1.0 + responselike: 2.0.1 + call-bind@1.0.7: dependencies: es-define-property: 1.0.0 @@ -2143,31 +7150,123 @@ snapshots: caniuse-lite@1.0.30001615: {} + cardinal@2.1.1: + dependencies: + ansicolors: 0.3.2 + redeyed: 2.1.1 + chalk@2.4.2: dependencies: ansi-styles: 3.2.1 escape-string-regexp: 1.0.5 supports-color: 5.5.0 + chalk@3.0.0: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + chalk@4.1.2: dependencies: ansi-styles: 4.3.0 supports-color: 7.2.0 + chalk@5.3.0: {} + char-regex@1.0.2: {} + cheerio-select@2.1.0: + dependencies: + boolbase: 1.0.0 + css-select: 5.1.0 + css-what: 6.1.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.1.0 + + cheerio@1.0.0: + dependencies: + cheerio-select: 2.1.0 + dom-serializer: 2.0.0 + domhandler: 5.0.3 + domutils: 3.1.0 + encoding-sniffer: 0.2.0 + htmlparser2: 9.1.0 + parse5: 7.1.2 + parse5-htmlparser2-tree-adapter: 7.0.0 + parse5-parser-stream: 7.1.2 + undici: 6.19.7 + whatwg-mimetype: 4.0.0 + + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + + chownr@2.0.0: {} + ci-info@3.9.0: {} cjs-module-lexer@1.3.1: {} + clean-stack@2.2.0: {} + + clean-stack@3.0.1: + dependencies: + escape-string-regexp: 4.0.0 + + cli-boxes@3.0.0: {} + + cli-cursor@3.1.0: + dependencies: + restore-cursor: 3.1.0 + + cli-cursor@4.0.0: + dependencies: + restore-cursor: 4.0.0 + + cli-progress@3.12.0: + dependencies: + string-width: 4.2.3 + + cli-spinners@2.9.2: {} + + cli-table3@0.6.5: + dependencies: + string-width: 4.2.3 + optionalDependencies: + '@colors/colors': 1.5.0 + + cli-truncate@3.1.0: + dependencies: + slice-ansi: 5.0.0 + string-width: 5.1.2 + cliui@8.0.1: dependencies: string-width: 4.2.3 strip-ansi: 6.0.1 wrap-ansi: 7.0.0 + clone-response@1.0.3: + dependencies: + mimic-response: 1.0.1 + + clone@1.0.4: {} + co@4.6.0: {} + code-excerpt@4.0.0: + dependencies: + convert-to-spaces: 2.0.1 + collect-v8-coverage@1.0.2: {} color-convert@1.9.3: @@ -2186,21 +7285,50 @@ snapshots: dependencies: delayed-stream: 1.0.0 + commander@10.0.1: {} + component-emitter@1.3.1: {} + compress-commons@4.1.2: + dependencies: + buffer-crc32: 0.2.13 + crc32-stream: 4.0.3 + normalize-path: 3.0.0 + readable-stream: 3.6.2 + concat-map@0.0.1: {} convert-source-map@2.0.0: {} + convert-to-spaces@2.0.1: {} + + cookie-parser@1.4.6: + dependencies: + cookie: 0.4.1 + cookie-signature: 1.0.6 + + cookie-signature@1.0.6: {} + + cookie@0.4.1: {} + cookiejar@2.1.4: {} - create-jest@29.7.0(@types/node@20.14.9): + core-util-is@1.0.3: {} + + crc-32@1.2.2: {} + + crc32-stream@4.0.3: + dependencies: + crc-32: 1.2.2 + readable-stream: 3.6.2 + + create-jest@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)): dependencies: '@jest/types': 29.6.3 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.11 - jest-config: 29.7.0(@types/node@20.14.9) + jest-config: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) jest-util: 29.7.0 prompts: 2.4.2 transitivePeerDependencies: @@ -2209,30 +7337,127 @@ snapshots: - supports-color - ts-node + create-require@1.1.1: {} + cross-spawn@7.0.3: dependencies: path-key: 3.1.1 shebang-command: 2.0.0 which: 2.0.2 - debug@4.3.4: + css-select@5.1.0: + dependencies: + boolbase: 1.0.0 + css-what: 6.1.0 + domhandler: 5.0.3 + domutils: 3.1.0 + nth-check: 2.1.1 + + css-what@6.1.0: {} + + csv-parse@4.16.3: {} + + datadog-metrics@0.9.3: + dependencies: + debug: 3.1.0 + dogapi: 2.8.4 + transitivePeerDependencies: + - supports-color + + debug@3.1.0: + dependencies: + ms: 2.0.0 + + debug@4.3.4(supports-color@8.1.1): dependencies: ms: 2.1.2 + optionalDependencies: + supports-color: 8.1.1 + + decompress-response@6.0.0: + dependencies: + mimic-response: 3.1.0 dedent@1.5.3: {} + deep-extend@0.6.0: {} + + deep-for-each@3.0.0: + dependencies: + lodash.isplainobject: 4.0.6 + deepmerge@4.3.1: {} + defaults@1.0.4: + dependencies: + clone: 1.0.4 + + defer-to-connect@2.0.1: {} + define-data-property@1.1.4: dependencies: es-define-property: 1.0.0 es-errors: 1.3.0 gopd: 1.0.1 + define-lazy-prop@2.0.0: {} + delayed-stream@1.0.0: {} + dependency-tree@10.0.9: + dependencies: + commander: 10.0.1 + filing-cabinet: 4.2.0 + precinct: 11.0.5 + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + detect-newline@3.1.0: {} + detective-amd@5.0.2: + dependencies: + ast-module-types: 5.0.0 + escodegen: 2.1.0 + get-amd-module-type: 5.0.1 + node-source-walk: 6.0.2 + + detective-cjs@5.0.1: + dependencies: + ast-module-types: 5.0.0 + node-source-walk: 6.0.2 + + detective-es6@4.0.1: + dependencies: + node-source-walk: 6.0.2 + + detective-postcss@6.1.3: + dependencies: + is-url: 1.2.4 + postcss: 8.4.41 + postcss-values-parser: 6.0.2(postcss@8.4.41) + + detective-sass@5.0.3: + dependencies: + gonzales-pe: 4.3.0 + node-source-walk: 6.0.2 + + detective-scss@4.0.3: + dependencies: + gonzales-pe: 4.3.0 + node-source-walk: 6.0.2 + + detective-stylus@4.0.0: {} + + detective-typescript@11.2.0: + dependencies: + '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.4.5) + ast-module-types: 5.0.0 + node-source-walk: 6.0.2 + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + dezalgo@1.0.4: dependencies: asap: 2.0.6 @@ -2240,14 +7465,105 @@ snapshots: diff-sequences@29.6.3: {} + diff@4.0.2: {} + + diff@5.2.0: {} + + dir-glob@3.0.1: + dependencies: + path-type: 4.0.0 + + dogapi@2.8.4: + dependencies: + extend: 3.0.2 + json-bigint: 1.0.0 + lodash: 4.17.21 + minimist: 1.2.8 + rc: 1.2.8 + + dom-serializer@2.0.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + entities: 4.5.0 + + domelementtype@2.3.0: {} + + domhandler@5.0.3: + dependencies: + domelementtype: 2.3.0 + + domutils@3.1.0: + dependencies: + dom-serializer: 2.0.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + dotenv@16.4.5: {} + driftless@2.0.3: + dependencies: + present: 0.0.3 + + eastasianwidth@0.2.0: {} + + ecdsa-sig-formatter@1.0.11: + dependencies: + safe-buffer: 5.2.1 + + ejs@3.1.10: + dependencies: + jake: 10.9.2 + electron-to-chromium@1.4.754: {} emittery@0.13.1: {} emoji-regex@8.0.0: {} + emoji-regex@9.2.2: {} + + encoding-sniffer@0.2.0: + dependencies: + iconv-lite: 0.6.3 + whatwg-encoding: 3.1.1 + + encoding@0.1.13: + dependencies: + iconv-lite: 0.6.3 + optional: true + + end-of-stream@1.4.4: + dependencies: + once: 1.4.0 + + engine.io-client@6.5.4: + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.4(supports-color@8.1.1) + engine.io-parser: 5.2.3 + ws: 8.17.1 + xmlhttprequest-ssl: 2.0.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + engine.io-parser@5.2.3: {} + + enhanced-resolve@5.17.1: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.2.1 + + ensure-posix-path@1.1.1: {} + + entities@4.5.0: {} + + env-paths@2.2.1: {} + + err-code@2.0.3: {} + error-ex@1.3.2: dependencies: is-arrayish: 0.2.1 @@ -2258,16 +7574,50 @@ snapshots: es-errors@1.3.0: {} + esbuild-wasm@0.19.12: {} + escalade@3.1.2: {} escape-string-regexp@1.0.5: {} escape-string-regexp@2.0.0: {} + escape-string-regexp@4.0.0: {} + + escodegen@2.1.0: + dependencies: + esprima: 4.0.1 + estraverse: 5.3.0 + esutils: 2.0.3 + optionalDependencies: + source-map: 0.6.1 + + eslint-visitor-keys@3.4.3: {} + + espree@9.6.1: + dependencies: + acorn: 8.12.1 + acorn-jsx: 5.3.2(acorn@8.12.1) + eslint-visitor-keys: 3.4.3 + esprima@4.0.1: {} + estraverse@5.3.0: {} + + esutils@2.0.3: {} + event-target-shim@5.0.1: {} + eventemitter3@1.1.1: {} + + eventemitter3@4.0.7: {} + + events-to-array@2.0.3: {} + + events@1.1.1: {} + + events@3.3.0: {} + execa@5.1.1: dependencies: cross-spawn: 7.0.3 @@ -2290,25 +7640,105 @@ snapshots: jest-message-util: 29.7.0 jest-util: 29.7.0 + exponential-backoff@3.1.1: {} + + extend@3.0.2: {} + + fast-deep-equal@3.1.3: {} + + fast-glob@3.3.2: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + fast-json-stable-stringify@2.1.0: {} + fast-levenshtein@3.0.0: + dependencies: + fastest-levenshtein: 1.0.16 + fast-safe-stringify@2.1.1: {} + fast-xml-parser@4.4.1: + dependencies: + strnum: 1.0.5 + + fastest-levenshtein@1.0.16: {} + + fastq@1.17.1: + dependencies: + reusify: 1.0.4 + fb-watchman@2.0.2: dependencies: bser: 2.1.1 + fflate@0.8.1: {} + + file-uri-to-path@1.0.0: + optional: true + + filelist@1.0.4: + dependencies: + minimatch: 5.1.6 + + filing-cabinet@4.2.0: + dependencies: + app-module-path: 2.2.0 + commander: 10.0.1 + enhanced-resolve: 5.17.1 + is-relative-path: 1.0.2 + module-definition: 5.0.1 + module-lookup-amd: 8.0.5 + resolve: 1.22.8 + resolve-dependency-path: 3.0.2 + sass-lookup: 5.0.1 + stylus-lookup: 5.0.1 + tsconfig-paths: 4.2.0 + typescript: 5.4.5 + fill-range@7.0.1: dependencies: to-regex-range: 5.0.1 + filtrex@0.5.4: {} + + filtrex@2.2.3: {} + find-up@4.1.0: dependencies: locate-path: 5.0.0 path-exists: 4.0.0 + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + follow-redirects@1.15.6(debug@4.3.4): + optionalDependencies: + debug: 4.3.4(supports-color@8.1.1) + + for-each@0.3.3: + dependencies: + is-callable: 1.2.7 + + foreground-child@3.3.0: + dependencies: + cross-spawn: 7.0.3 + signal-exit: 4.1.0 + form-data-encoder@1.7.2: {} + form-data@3.0.1: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + form-data@4.0.0: dependencies: asynckit: 0.4.0 @@ -2326,6 +7756,24 @@ snapshots: hexoid: 1.0.0 once: 1.4.0 + fromentries@1.3.2: {} + + fs-constants@1.0.0: {} + + fs-extra@10.1.0: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.1.0 + universalify: 2.0.1 + + fs-minipass@2.1.0: + dependencies: + minipass: 3.3.6 + + fs-minipass@3.0.3: + dependencies: + minipass: 7.1.2 + fs.realpath@1.0.0: {} fsevents@2.3.2: @@ -2336,8 +7784,15 @@ snapshots: function-bind@1.1.2: {} + function-loop@4.0.0: {} + gensync@1.0.0-beta.2: {} + get-amd-module-type@5.0.1: + dependencies: + ast-module-types: 5.0.0 + node-source-walk: 6.0.2 + get-caller-file@2.0.5: {} get-intrinsic@1.2.4: @@ -2348,10 +7803,29 @@ snapshots: has-symbols: 1.0.3 hasown: 2.0.2 + get-own-enumerable-property-symbols@3.0.2: {} + get-package-type@0.1.0: {} + get-stream@5.2.0: + dependencies: + pump: 3.0.0 + get-stream@6.0.1: {} + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob@10.4.5: + dependencies: + foreground-child: 3.3.0 + jackspeak: 3.4.3 + minimatch: 9.0.5 + minipass: 7.1.2 + package-json-from-dist: 1.0.0 + path-scurry: 1.11.1 + glob@7.2.3: dependencies: fs.realpath: 1.0.0 @@ -2363,10 +7837,39 @@ snapshots: globals@11.12.0: {} + globby@11.1.0: + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.2 + ignore: 5.3.2 + merge2: 1.4.1 + slash: 3.0.0 + + gonzales-pe@4.3.0: + dependencies: + minimist: 1.2.8 + + google-protobuf@3.6.1: {} + gopd@1.0.1: dependencies: get-intrinsic: 1.2.4 + got@11.8.6: + dependencies: + '@sindresorhus/is': 4.6.0 + '@szmarczak/http-timer': 4.0.6 + '@types/cacheable-request': 6.0.3 + '@types/responselike': 1.0.3 + cacheable-lookup: 5.0.4 + cacheable-request: 7.0.4 + decompress-response: 6.0.0 + http2-wrapper: 1.0.3 + lowercase-keys: 2.0.0 + p-cancelable: 2.1.1 + responselike: 2.0.1 + graceful-fs@4.2.11: {} has-flag@3.0.0: {} @@ -2381,20 +7884,94 @@ snapshots: has-symbols@1.0.3: {} + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.0.3 + hasown@2.0.2: dependencies: function-bind: 1.1.2 + hex2dec@1.0.1: {} + hexoid@1.0.0: {} + hosted-git-info@7.0.2: + dependencies: + lru-cache: 10.4.3 + + hot-shots@6.8.7: + optionalDependencies: + unix-dgram: 2.0.6 + + hpagent@0.1.2: {} + html-escaper@2.0.2: {} + htmlparser2@9.1.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.1.0 + entities: 4.5.0 + + http-cache-semantics@4.1.1: {} + + http-proxy-agent@7.0.2: + dependencies: + agent-base: 7.1.1 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + http2-wrapper@1.0.3: + dependencies: + quick-lru: 5.1.1 + resolve-alpn: 1.2.1 + + https-proxy-agent@5.0.0: + dependencies: + agent-base: 6.0.2 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + https-proxy-agent@5.0.1: + dependencies: + agent-base: 6.0.2 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + https-proxy-agent@7.0.5: + dependencies: + agent-base: 7.1.1 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + human-signals@2.1.0: {} humanize-ms@1.2.1: dependencies: ms: 2.1.3 + hyperlinker@1.0.0: {} + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + ieee754@1.1.13: {} + + ieee754@1.2.1: {} + + ignore-walk@6.0.5: + dependencies: + minimatch: 9.0.5 + + ignore@5.3.2: {} + import-local@3.1.0: dependencies: pkg-dir: 4.2.0 @@ -2402,6 +7979,10 @@ snapshots: imurmurhash@0.1.4: {} + indent-string@4.0.0: {} + + indent-string@5.0.0: {} + inflight@1.0.6: dependencies: once: 1.4.0 @@ -2409,22 +7990,134 @@ snapshots: inherits@2.0.4: {} + ini@1.3.8: {} + + ini@4.1.3: {} + + ink@4.4.1(react@18.3.1): + dependencies: + '@alcalzone/ansi-tokenize': 0.1.3 + ansi-escapes: 6.2.1 + auto-bind: 5.0.1 + chalk: 5.3.0 + cli-boxes: 3.0.0 + cli-cursor: 4.0.0 + cli-truncate: 3.1.0 + code-excerpt: 4.0.0 + indent-string: 5.0.0 + is-ci: 3.0.1 + is-lower-case: 2.0.2 + is-upper-case: 2.0.2 + lodash: 4.17.21 + patch-console: 2.0.0 + react: 18.3.1 + react-reconciler: 0.29.2(react@18.3.1) + scheduler: 0.23.2 + signal-exit: 3.0.7 + slice-ansi: 6.0.0 + stack-utils: 2.0.6 + string-width: 5.1.2 + type-fest: 0.12.0 + widest-line: 4.0.1 + wrap-ansi: 8.1.0 + ws: 8.17.1 + yoga-wasm-web: 0.3.3 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + ip-address@9.0.5: + dependencies: + jsbn: 1.1.0 + sprintf-js: 1.1.3 + + is-actual-promise@1.0.2: {} + + is-arguments@1.1.1: + dependencies: + call-bind: 1.0.7 + has-tostringtag: 1.0.2 + is-arrayish@0.2.1: {} + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + + is-callable@1.2.7: {} + + is-ci@3.0.1: + dependencies: + ci-info: 3.9.0 + is-core-module@2.13.1: dependencies: hasown: 2.0.2 + is-docker@2.2.1: {} + + is-extglob@2.1.1: {} + is-fullwidth-code-point@3.0.0: {} + is-fullwidth-code-point@4.0.0: {} + is-generator-fn@2.1.0: {} + is-generator-function@1.0.10: + dependencies: + has-tostringtag: 1.0.2 + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-interactive@1.0.0: {} + + is-lambda@1.0.1: {} + + is-lower-case@2.0.2: + dependencies: + tslib: 2.6.3 + is-number@7.0.0: {} + is-obj@1.0.1: {} + + is-plain-object@5.0.0: {} + + is-regexp@1.0.0: {} + + is-relative-path@1.0.2: {} + is-stream@2.0.1: {} + is-typed-array@1.1.13: + dependencies: + which-typed-array: 1.1.15 + + is-upper-case@2.0.2: + dependencies: + tslib: 2.6.3 + + is-url-superb@4.0.0: {} + + is-url@1.2.4: {} + + is-wsl@2.2.0: + dependencies: + is-docker: 2.2.1 + + isarray@1.0.0: {} + isexe@2.0.0: {} + isexe@3.1.1: {} + + isomorphic-ws@4.0.1(ws@5.2.4): + dependencies: + ws: 5.2.4 + istanbul-lib-coverage@3.2.2: {} istanbul-lib-instrument@5.2.1: @@ -2455,7 +8148,7 @@ snapshots: istanbul-lib-source-maps@4.0.1: dependencies: - debug: 4.3.4 + debug: 4.3.4(supports-color@8.1.1) istanbul-lib-coverage: 3.2.2 source-map: 0.6.1 transitivePeerDependencies: @@ -2466,6 +8159,19 @@ snapshots: html-escaper: 2.0.2 istanbul-lib-report: 3.0.1 + jackspeak@3.4.3: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + jake@10.9.2: + dependencies: + async: 3.2.5 + chalk: 4.1.2 + filelist: 1.0.4 + minimatch: 3.1.2 + jest-changed-files@29.7.0: dependencies: execa: 5.1.1 @@ -2498,16 +8204,16 @@ snapshots: - babel-plugin-macros - supports-color - jest-cli@29.7.0(@types/node@20.14.9): + jest-cli@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)): dependencies: - '@jest/core': 29.7.0 + '@jest/core': 29.7.0(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 chalk: 4.1.2 - create-jest: 29.7.0(@types/node@20.14.9) + create-jest: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) exit: 0.1.2 import-local: 3.1.0 - jest-config: 29.7.0(@types/node@20.14.9) + jest-config: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) jest-util: 29.7.0 jest-validate: 29.7.0 yargs: 17.7.2 @@ -2517,7 +8223,7 @@ snapshots: - supports-color - ts-node - jest-config@29.7.0(@types/node@20.14.9): + jest-config@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)): dependencies: '@babel/core': 7.24.5 '@jest/test-sequencer': 29.7.0 @@ -2543,6 +8249,7 @@ snapshots: strip-json-comments: 3.1.1 optionalDependencies: '@types/node': 20.14.9 + ts-node: 10.9.2(@types/node@20.14.9)(typescript@5.4.5) transitivePeerDependencies: - babel-plugin-macros - supports-color @@ -2762,18 +8469,28 @@ snapshots: merge-stream: 2.0.0 supports-color: 8.1.1 - jest@29.7.0(@types/node@20.14.9): + jest@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)): dependencies: - '@jest/core': 29.7.0 + '@jest/core': 29.7.0(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) '@jest/types': 29.6.3 import-local: 3.1.0 - jest-cli: 29.7.0(@types/node@20.14.9) + jest-cli: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) transitivePeerDependencies: - '@types/node' - babel-plugin-macros - supports-color - ts-node + jmespath@0.16.0: {} + + joi@17.13.3: + dependencies: + '@hapi/hoek': 9.3.0 + '@hapi/topo': 5.1.0 + '@sideway/address': 4.1.5 + '@sideway/formula': 3.0.1 + '@sideway/pinpoint': 2.0.0 + js-tokens@4.0.0: {} js-yaml@3.14.1: @@ -2781,24 +8498,146 @@ snapshots: argparse: 1.0.10 esprima: 4.0.1 + jsbn@1.1.0: {} + jsesc@2.5.2: {} + json-bigint@1.0.0: + dependencies: + bignumber.js: 9.1.2 + + json-buffer@3.0.1: {} + json-parse-even-better-errors@2.3.1: {} + json-parse-even-better-errors@3.0.2: {} + json5@2.2.3: {} + jsonfile@6.1.0: + dependencies: + universalify: 2.0.1 + optionalDependencies: + graceful-fs: 4.2.11 + + jsonparse@1.3.1: {} + + jsonpath-plus@7.2.0: {} + + jsonwebtoken@9.0.2: + dependencies: + jws: 3.2.2 + lodash.includes: 4.3.0 + lodash.isboolean: 3.0.3 + lodash.isinteger: 4.0.4 + lodash.isnumber: 3.0.3 + lodash.isplainobject: 4.0.6 + lodash.isstring: 4.0.1 + lodash.once: 4.1.1 + ms: 2.1.3 + semver: 7.6.2 + + jwa@1.4.1: + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + + jwa@2.0.0: + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + + jws@3.2.2: + dependencies: + jwa: 1.4.1 + safe-buffer: 5.2.1 + + jws@4.0.0: + dependencies: + jwa: 2.0.0 + safe-buffer: 5.2.1 + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + kleur@3.0.3: {} + lazystream@1.0.1: + dependencies: + readable-stream: 2.3.8 + leven@3.1.0: {} + lightstep-tracer@0.31.2: + dependencies: + async: 1.5.0 + eventemitter3: 1.1.1 + google-protobuf: 3.6.1 + hex2dec: 1.0.1 + opentracing: 0.14.7 + source-map-support: 0.3.3 + thrift: 0.14.2 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + lines-and-columns@1.2.4: {} locate-path@5.0.0: dependencies: p-locate: 4.1.0 + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + lodash.camelcase@4.3.0: {} + + lodash.defaults@4.2.0: {} + + lodash.difference@4.5.0: {} + + lodash.flatten@4.4.0: {} + + lodash.includes@4.3.0: {} + + lodash.isboolean@3.0.3: {} + + lodash.isinteger@4.0.4: {} + + lodash.isnumber@3.0.3: {} + + lodash.isplainobject@4.0.6: {} + + lodash.isstring@4.0.1: {} + lodash.memoize@4.1.2: {} + lodash.merge@4.6.2: {} + + lodash.once@4.1.1: {} + + lodash.union@4.6.0: {} + + lodash@4.17.21: {} + + log-symbols@3.0.0: + dependencies: + chalk: 2.4.2 + + long@5.2.3: {} + + loose-envify@1.4.0: + dependencies: + js-tokens: 4.0.0 + + lowercase-keys@2.0.0: {} + + lru-cache@10.4.3: {} + lru-cache@5.1.1: dependencies: yallist: 3.1.1 @@ -2809,12 +8648,35 @@ snapshots: make-error@1.3.6: {} + make-fetch-happen@13.0.1: + dependencies: + '@npmcli/agent': 2.2.2 + cacache: 18.0.4 + http-cache-semantics: 4.1.1 + is-lambda: 1.0.1 + minipass: 7.1.2 + minipass-fetch: 3.0.5 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + negotiator: 0.6.3 + proc-log: 4.2.0 + promise-retry: 2.0.1 + ssri: 10.0.6 + transitivePeerDependencies: + - supports-color + makeerror@1.0.12: dependencies: tmpl: 1.0.5 + matcher-collection@1.1.2: + dependencies: + minimatch: 3.1.2 + merge-stream@2.0.0: {} + merge2@1.4.1: {} + methods@1.1.2: {} micromatch@4.0.5: @@ -2832,34 +8694,213 @@ snapshots: mimic-fn@2.1.0: {} + mimic-response@1.0.1: {} + + mimic-response@3.1.0: {} + minimatch@3.1.2: dependencies: brace-expansion: 1.1.11 + minimatch@5.1.6: + dependencies: + brace-expansion: 2.0.1 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.1 + + minimist@1.2.8: {} + + minipass-collect@2.0.1: + dependencies: + minipass: 7.1.2 + + minipass-fetch@3.0.5: + dependencies: + minipass: 7.1.2 + minipass-sized: 1.0.3 + minizlib: 2.1.2 + optionalDependencies: + encoding: 0.1.13 + + minipass-flush@1.0.5: + dependencies: + minipass: 3.3.6 + + minipass-json-stream@1.0.2: + dependencies: + jsonparse: 1.3.1 + minipass: 3.3.6 + + minipass-pipeline@1.2.4: + dependencies: + minipass: 3.3.6 + + minipass-sized@1.0.3: + dependencies: + minipass: 3.3.6 + + minipass@3.3.6: + dependencies: + yallist: 4.0.0 + + minipass@5.0.0: {} + + minipass@7.1.2: {} + + minizlib@2.1.2: + dependencies: + minipass: 3.3.6 + yallist: 4.0.0 + + mixpanel@0.13.0: + dependencies: + https-proxy-agent: 5.0.0 + transitivePeerDependencies: + - supports-color + + mkdirp@0.5.6: + dependencies: + minimist: 1.2.8 + + mkdirp@1.0.4: {} + + mkdirp@3.0.1: {} + + module-definition@5.0.1: + dependencies: + ast-module-types: 5.0.0 + node-source-walk: 6.0.2 + + module-lookup-amd@8.0.5: + dependencies: + commander: 10.0.1 + glob: 7.2.3 + requirejs: 2.3.7 + requirejs-config-file: 4.0.0 + + moment@2.30.1: {} + + ms@2.0.0: {} + ms@2.1.2: {} ms@2.1.3: {} + mute-stream@0.0.8: {} + + nan@2.20.0: + optional: true + + nanoid@3.3.7: {} + + nanotimer@0.3.14: {} + natural-compare@1.4.0: {} + natural-orderby@2.0.3: {} + + negotiator@0.6.3: {} + node-domexception@1.0.0: {} - node-fetch@2.7.0: + node-fetch@2.7.0(encoding@0.1.13): dependencies: whatwg-url: 5.0.0 + optionalDependencies: + encoding: 0.1.13 + + node-gyp@10.2.0: + dependencies: + env-paths: 2.2.1 + exponential-backoff: 3.1.1 + glob: 10.4.5 + graceful-fs: 4.2.11 + make-fetch-happen: 13.0.1 + nopt: 7.2.1 + proc-log: 4.2.0 + semver: 7.6.2 + tar: 6.2.1 + which: 4.0.0 + transitivePeerDependencies: + - supports-color node-int64@0.4.0: {} node-releases@2.0.14: {} + node-source-walk@6.0.2: + dependencies: + '@babel/parser': 7.24.5 + + nopt@7.2.1: + dependencies: + abbrev: 2.0.0 + + normalize-package-data@6.0.2: + dependencies: + hosted-git-info: 7.0.2 + semver: 7.6.2 + validate-npm-package-license: 3.0.4 + normalize-path@3.0.0: {} + normalize-url@6.1.0: {} + + npm-bundled@3.0.1: + dependencies: + npm-normalize-package-bin: 3.0.1 + + npm-install-checks@6.3.0: + dependencies: + semver: 7.6.2 + + npm-normalize-package-bin@3.0.1: {} + + npm-package-arg@11.0.3: + dependencies: + hosted-git-info: 7.0.2 + proc-log: 4.2.0 + semver: 7.6.2 + validate-npm-package-name: 5.0.1 + + npm-packlist@8.0.2: + dependencies: + ignore-walk: 6.0.5 + + npm-pick-manifest@9.1.0: + dependencies: + npm-install-checks: 6.3.0 + npm-normalize-package-bin: 3.0.1 + npm-package-arg: 11.0.3 + semver: 7.6.2 + + npm-registry-fetch@16.2.1: + dependencies: + '@npmcli/redact': 1.1.0 + make-fetch-happen: 13.0.1 + minipass: 7.1.2 + minipass-fetch: 3.0.5 + minipass-json-stream: 1.0.2 + minizlib: 2.1.2 + npm-package-arg: 11.0.3 + proc-log: 4.2.0 + transitivePeerDependencies: + - supports-color + npm-run-path@4.0.1: dependencies: path-key: 3.1.1 + nth-check@2.1.1: + dependencies: + boolbase: 1.0.0 + object-inspect@1.13.1: {} + object-treeify@1.1.33: {} + once@1.4.0: dependencies: wrappy: 1.0.2 @@ -2868,7 +8909,13 @@ snapshots: dependencies: mimic-fn: 2.1.0 - openai@4.52.2: + open@8.4.2: + dependencies: + define-lazy-prop: 2.0.0 + is-docker: 2.2.1 + is-wsl: 2.2.0 + + openai@4.52.2(encoding@0.1.13): dependencies: '@types/node': 18.19.39 '@types/node-fetch': 2.6.11 @@ -2876,11 +8923,28 @@ snapshots: agentkeepalive: 4.5.0 form-data-encoder: 1.7.2 formdata-node: 4.4.1 - node-fetch: 2.7.0 + node-fetch: 2.7.0(encoding@0.1.13) web-streams-polyfill: 3.3.3 transitivePeerDependencies: - encoding + opener@1.5.2: {} + + opentracing@0.14.7: {} + + ora@4.1.1: + dependencies: + chalk: 3.0.0 + cli-cursor: 3.1.0 + cli-spinners: 2.9.2 + is-interactive: 1.0.0 + log-symbols: 3.0.0 + mute-stream: 0.0.8 + strip-ansi: 6.0.1 + wcwidth: 1.0.1 + + p-cancelable@2.1.1: {} + p-limit@2.3.0: dependencies: p-try: 2.2.0 @@ -2893,8 +8957,42 @@ snapshots: dependencies: p-limit: 2.3.0 + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + p-map@4.0.0: + dependencies: + aggregate-error: 3.1.0 + p-try@2.2.0: {} + package-json-from-dist@1.0.0: {} + + pacote@17.0.7: + dependencies: + '@npmcli/git': 5.0.8 + '@npmcli/installed-package-contents': 2.1.0 + '@npmcli/promise-spawn': 7.0.2 + '@npmcli/run-script': 7.0.4 + cacache: 18.0.4 + fs-minipass: 3.0.3 + minipass: 7.1.2 + npm-package-arg: 11.0.3 + npm-packlist: 8.0.2 + npm-pick-manifest: 9.1.0 + npm-registry-fetch: 16.2.1 + proc-log: 4.2.0 + promise-retry: 2.0.1 + read-package-json: 7.0.1 + read-package-json-fast: 3.0.2 + sigstore: 2.3.1 + ssri: 10.0.6 + tar: 6.2.1 + transitivePeerDependencies: + - bluebird + - supports-color + parse-json@5.2.0: dependencies: '@babel/code-frame': 7.24.2 @@ -2902,6 +9000,26 @@ snapshots: json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.2.4 + parse5-htmlparser2-tree-adapter@7.0.0: + dependencies: + domhandler: 5.0.3 + parse5: 7.1.2 + + parse5-parser-stream@7.1.2: + dependencies: + parse5: 7.1.2 + + parse5@7.1.2: + dependencies: + entities: 4.5.0 + + password-prompt@1.1.3: + dependencies: + ansi-escapes: 4.3.2 + cross-spawn: 7.0.3 + + patch-console@2.0.0: {} + path-exists@4.0.0: {} path-is-absolute@1.0.1: {} @@ -2910,8 +9028,17 @@ snapshots: path-parse@1.0.7: {} + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.2 + + path-type@4.0.0: {} + picocolors@1.0.0: {} + picocolors@1.0.1: {} + picomatch@2.3.1: {} pirates@4.0.6: {} @@ -2922,39 +9049,239 @@ snapshots: playwright-core@1.45.0: {} + playwright-core@1.45.3: {} + playwright@1.45.0: dependencies: playwright-core: 1.45.0 optionalDependencies: fsevents: 2.3.2 + playwright@1.45.3: + dependencies: + playwright-core: 1.45.3 + optionalDependencies: + fsevents: 2.3.2 + + polite-json@4.0.1: {} + + polite-json@5.0.0: {} + + possible-typed-array-names@1.0.0: {} + + postcss-values-parser@6.0.2(postcss@8.4.41): + dependencies: + color-name: 1.1.4 + is-url-superb: 4.0.0 + postcss: 8.4.41 + quote-unquote: 1.0.0 + + postcss@8.4.41: + dependencies: + nanoid: 3.3.7 + picocolors: 1.0.1 + source-map-js: 1.2.0 + + posthog-node@2.6.0(debug@4.3.4): + dependencies: + axios: 0.27.2(debug@4.3.4) + transitivePeerDependencies: + - debug + + precinct@11.0.5: + dependencies: + '@dependents/detective-less': 4.1.0 + commander: 10.0.1 + detective-amd: 5.0.2 + detective-cjs: 5.0.1 + detective-es6: 4.0.1 + detective-postcss: 6.1.3 + detective-sass: 5.0.3 + detective-scss: 4.0.3 + detective-stylus: 4.0.0 + detective-typescript: 11.2.0 + module-definition: 5.0.1 + node-source-walk: 6.0.2 + transitivePeerDependencies: + - supports-color + + present@0.0.3: {} + pretty-format@29.7.0: dependencies: '@jest/schemas': 29.6.3 ansi-styles: 5.2.0 react-is: 18.3.1 + prismjs-terminal@1.2.3: + dependencies: + chalk: 5.3.0 + prismjs: 1.29.0 + string-length: 6.0.0 + + prismjs@1.29.0: {} + + proc-log@4.2.0: {} + + process-nextick-args@2.0.1: {} + + process-on-spawn@1.0.0: + dependencies: + fromentries: 1.3.2 + + prom-client@14.2.0: + dependencies: + tdigest: 0.1.2 + + promise-inflight@1.0.1: {} + + promise-retry@2.0.1: + dependencies: + err-code: 2.0.3 + retry: 0.12.0 + prompts@2.4.2: dependencies: kleur: 3.0.3 sisteransi: 1.0.5 + protobufjs@7.3.2: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 20.14.9 + long: 5.2.3 + + pump@3.0.0: + dependencies: + end-of-stream: 1.4.4 + once: 1.4.0 + + punycode@1.3.2: {} + pure-rand@6.1.0: {} + q@1.5.1: {} + qs@6.12.1: dependencies: side-channel: 1.0.6 + querystring@0.2.0: {} + + queue-microtask@1.2.3: {} + + quick-lru@5.1.1: {} + + quote-unquote@1.0.0: {} + + rc@1.2.8: + dependencies: + deep-extend: 0.6.0 + ini: 1.3.8 + minimist: 1.2.8 + strip-json-comments: 2.0.1 + + react-dom@18.3.1(react@18.3.1): + dependencies: + loose-envify: 1.4.0 + react: 18.3.1 + scheduler: 0.23.2 + + react-element-to-jsx-string@15.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + '@base2/pretty-print-object': 1.0.1 + is-plain-object: 5.0.0 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-is: 18.1.0 + + react-is@18.1.0: {} + react-is@18.3.1: {} + react-reconciler@0.29.2(react@18.3.1): + dependencies: + loose-envify: 1.4.0 + react: 18.3.1 + scheduler: 0.23.2 + + react@18.3.1: + dependencies: + loose-envify: 1.4.0 + + read-package-json-fast@3.0.2: + dependencies: + json-parse-even-better-errors: 3.0.2 + npm-normalize-package-bin: 3.0.1 + + read-package-json@7.0.1: + dependencies: + glob: 10.4.5 + json-parse-even-better-errors: 3.0.2 + normalize-package-data: 6.0.2 + npm-normalize-package-bin: 3.0.1 + + readable-stream@2.3.8: + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readdir-glob@1.1.3: + dependencies: + minimatch: 5.1.6 + + readdirp@3.6.0: + dependencies: + picomatch: 2.3.1 + + redeyed@2.1.1: + dependencies: + esprima: 4.0.1 + require-directory@2.1.1: {} + requirejs-config-file@4.0.0: + dependencies: + esprima: 4.0.1 + stringify-object: 3.3.0 + + requirejs@2.3.7: {} + + resolve-alpn@1.2.1: {} + resolve-cwd@3.0.0: dependencies: resolve-from: 5.0.0 + resolve-dependency-path@3.0.2: {} + resolve-from@5.0.0: {} + resolve-import@1.4.6: + dependencies: + glob: 10.4.5 + walk-up-path: 3.0.1 + resolve.exports@2.0.2: {} resolve@1.22.8: @@ -2963,6 +9290,58 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 + responselike@2.0.1: + dependencies: + lowercase-keys: 2.0.0 + + restore-cursor@3.1.0: + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + + restore-cursor@4.0.0: + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + + retry@0.12.0: {} + + reusify@1.0.4: {} + + rimraf@2.6.3: + dependencies: + glob: 7.2.3 + + rimraf@3.0.2: + dependencies: + glob: 7.2.3 + + rimraf@5.0.10: + dependencies: + glob: 10.4.5 + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + safe-buffer@5.1.2: {} + + safe-buffer@5.2.1: {} + + safer-buffer@2.1.2: {} + + sass-lookup@5.0.1: + dependencies: + commander: 10.0.1 + + sax@1.2.1: {} + + scheduler@0.23.2: + dependencies: + loose-envify: 1.4.0 + + seedrandom@3.0.5: {} + semver@6.3.1: {} semver@7.6.2: {} @@ -2991,49 +9370,190 @@ snapshots: signal-exit@3.0.7: {} + signal-exit@4.1.0: {} + + sigstore@2.3.1: + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.2 + '@sigstore/sign': 2.3.2 + '@sigstore/tuf': 2.3.4 + '@sigstore/verify': 1.2.1 + transitivePeerDependencies: + - supports-color + sisteransi@1.0.5: {} slash@3.0.0: {} + slice-ansi@4.0.0: + dependencies: + ansi-styles: 4.3.0 + astral-regex: 2.0.0 + is-fullwidth-code-point: 3.0.0 + + slice-ansi@5.0.0: + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 4.0.0 + + slice-ansi@6.0.0: + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 4.0.0 + + smart-buffer@4.2.0: {} + + socket.io-client@4.7.5: + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.4(supports-color@8.1.1) + engine.io-client: 6.5.4 + socket.io-parser: 4.2.4 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + socket.io-parser@4.2.4: + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + socketio-wildcard@2.0.0: {} + + socks-proxy-agent@8.0.4: + dependencies: + agent-base: 7.1.1 + debug: 4.3.4(supports-color@8.1.1) + socks: 2.8.3 + transitivePeerDependencies: + - supports-color + + socks@2.8.3: + dependencies: + ip-address: 9.0.5 + smart-buffer: 4.2.0 + + source-map-js@1.2.0: {} + + source-map-support@0.3.3: + dependencies: + source-map: 0.1.32 + source-map-support@0.5.13: dependencies: buffer-from: 1.1.2 source-map: 0.6.1 + source-map@0.1.32: + dependencies: + amdefine: 1.0.1 + source-map@0.6.1: {} + spdx-correct@3.2.0: + dependencies: + spdx-expression-parse: 3.0.1 + spdx-license-ids: 3.0.18 + + spdx-exceptions@2.5.0: {} + + spdx-expression-parse@3.0.1: + dependencies: + spdx-exceptions: 2.5.0 + spdx-license-ids: 3.0.18 + + spdx-license-ids@3.0.18: {} + sprintf-js@1.0.3: {} + sprintf-js@1.1.3: {} + + sqs-consumer@5.8.0(aws-sdk@2.1674.0): + dependencies: + aws-sdk: 2.1674.0 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + ssri@10.0.6: + dependencies: + minipass: 7.1.2 + stack-utils@2.0.6: dependencies: escape-string-regexp: 2.0.0 + stoppable@1.1.0: {} + string-length@4.0.2: dependencies: char-regex: 1.0.2 strip-ansi: 6.0.1 + string-length@6.0.0: + dependencies: + strip-ansi: 7.1.0 + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 is-fullwidth-code-point: 3.0.0 strip-ansi: 6.0.1 + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + + string_decoder@1.1.1: + dependencies: + safe-buffer: 5.1.2 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + stringify-object@3.3.0: + dependencies: + get-own-enumerable-property-symbols: 3.0.2 + is-obj: 1.0.1 + is-regexp: 1.0.0 + strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.0.1 + + strip-bom@3.0.0: {} + strip-bom@4.0.0: {} strip-final-newline@2.0.0: {} + strip-json-comments@2.0.1: {} + strip-json-comments@3.1.1: {} + strnum@1.0.5: {} + + stylus-lookup@5.0.1: + dependencies: + commander: 10.0.1 + superagent@9.0.2: dependencies: component-emitter: 1.3.1 cookiejar: 2.1.4 - debug: 4.3.4 + debug: 4.3.4(supports-color@8.1.1) fast-safe-stringify: 2.1.1 form-data: 4.0.0 formidable: 3.5.1 @@ -3062,14 +9582,129 @@ snapshots: dependencies: has-flag: 4.0.0 + supports-hyperlinks@2.3.0: + dependencies: + has-flag: 4.0.0 + supports-color: 7.2.0 + supports-preserve-symlinks-flag@1.0.0: {} + sync-content@1.0.2: + dependencies: + glob: 10.4.5 + mkdirp: 3.0.1 + path-scurry: 1.11.1 + rimraf: 5.0.10 + + tap-parser@16.0.1: + dependencies: + events-to-array: 2.0.3 + tap-yaml: 2.2.2 + + tap-yaml@2.2.2: + dependencies: + yaml: 2.5.0 + yaml-types: 0.3.0(yaml@2.5.0) + + tap@19.2.5(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5): + dependencies: + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/after-each': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/asserts': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/before': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/before-each': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/chdir': 1.1.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/filter': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/fixture': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/intercept': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/mock': 2.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/node-serialize': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/run': 2.1.7(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/snapshot': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/spawn': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/stdin': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/test': 2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/typescript': 1.4.13(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(typescript@5.4.5) + '@tapjs/worker': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + resolve-import: 1.4.6 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - '@types/react' + - bluebird + - bufferutil + - react + - react-devtools-core + - react-dom + - supports-color + - typescript + - utf-8-validate + + tapable@2.2.1: {} + + tar-stream@2.2.0: + dependencies: + bl: 4.1.0 + end-of-stream: 1.4.4 + fs-constants: 1.0.0 + inherits: 2.0.4 + readable-stream: 3.6.2 + + tar@6.2.1: + dependencies: + chownr: 2.0.0 + fs-minipass: 2.1.0 + minipass: 5.0.0 + minizlib: 2.1.2 + mkdirp: 1.0.4 + yallist: 4.0.0 + + tcompare@7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + diff: 5.2.0 + react-element-to-jsx-string: 15.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + transitivePeerDependencies: + - react + - react-dom + + tdigest@0.1.2: + dependencies: + bintrees: 1.0.2 + + temp@0.9.4: + dependencies: + mkdirp: 0.5.6 + rimraf: 2.6.3 + test-exclude@6.0.0: dependencies: '@istanbuljs/schema': 0.1.3 glob: 7.2.3 minimatch: 3.1.2 + thrift@0.14.2: + dependencies: + browser-or-node: 1.3.0 + isomorphic-ws: 4.0.1(ws@5.2.4) + node-int64: 0.4.0 + q: 1.5.1 + ws: 5.2.4 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + tldts-core@6.1.39: {} + + tldts@6.1.39: + dependencies: + tldts-core: 6.1.39 + + tmp@0.2.1: + dependencies: + rimraf: 3.0.2 + tmpl@1.0.5: {} to-fast-properties@2.0.0: {} @@ -3078,13 +9713,21 @@ snapshots: dependencies: is-number: 7.0.0 + tough-cookie@5.0.0-rc.4: + dependencies: + tldts: 6.1.39 + tr46@0.0.3: {} - ts-jest@29.1.5(@babel/core@7.24.5)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.5))(jest@29.7.0(@types/node@20.14.9))(typescript@5.4.5): + trivial-deferred@2.0.0: {} + + try-require@1.2.1: {} + + ts-jest@29.1.5(@babel/core@7.24.5)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.5))(jest@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)))(typescript@5.4.5): dependencies: bs-logger: 0.2.6 fast-json-stable-stringify: 2.1.0 - jest: 29.7.0(@types/node@20.14.9) + jest: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) jest-util: 29.7.0 json5: 2.2.3 lodash.memoize: 4.1.2 @@ -3098,51 +9741,201 @@ snapshots: '@jest/types': 29.6.3 babel-jest: 29.7.0(@babel/core@7.24.5) + ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5): + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.11 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.4 + '@types/node': 20.14.9 + acorn: 8.12.1 + acorn-walk: 8.3.3 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 5.4.5 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + + tsconfig-paths@4.2.0: + dependencies: + json5: 2.2.3 + minimist: 1.2.8 + strip-bom: 3.0.0 + + tshy@1.18.0: + dependencies: + chalk: 5.3.0 + chokidar: 3.6.0 + foreground-child: 3.3.0 + minimatch: 9.0.5 + mkdirp: 3.0.1 + polite-json: 5.0.0 + resolve-import: 1.4.6 + rimraf: 5.0.10 + sync-content: 1.0.2 + typescript: 5.4.5 + walk-up-path: 3.0.1 + + tslib@1.14.1: {} + + tslib@2.6.3: {} + + tsutils@3.21.0(typescript@5.4.5): + dependencies: + tslib: 1.14.1 + typescript: 5.4.5 + + tuf-js@2.2.1: + dependencies: + '@tufjs/models': 2.0.1 + debug: 4.3.4(supports-color@8.1.1) + make-fetch-happen: 13.0.1 + transitivePeerDependencies: + - supports-color + type-detect@4.0.8: {} + type-fest@0.12.0: {} + type-fest@0.21.3: {} typescript@5.4.5: {} undici-types@5.26.5: {} + undici@6.19.7: {} + + unique-filename@3.0.0: + dependencies: + unique-slug: 4.0.0 + + unique-slug@4.0.0: + dependencies: + imurmurhash: 0.1.4 + + universalify@2.0.1: {} + + unix-dgram@2.0.6: + dependencies: + bindings: 1.5.0 + nan: 2.20.0 + optional: true + update-browserslist-db@1.0.14(browserslist@4.23.0): dependencies: browserslist: 4.23.0 escalade: 3.1.2 picocolors: 1.0.0 + url@0.10.3: + dependencies: + punycode: 1.3.2 + querystring: 0.2.0 + + util-deprecate@1.0.2: {} + + util@0.12.5: + dependencies: + inherits: 2.0.4 + is-arguments: 1.1.1 + is-generator-function: 1.0.10 + is-typed-array: 1.1.13 + which-typed-array: 1.1.15 + + uuid@8.0.0: {} + + uuid@8.3.2: {} + + uuid@9.0.1: {} + + v8-compile-cache-lib@3.0.1: {} + v8-to-istanbul@9.2.0: dependencies: '@jridgewell/trace-mapping': 0.3.25 '@types/istanbul-lib-coverage': 2.0.6 convert-source-map: 2.0.0 + validate-npm-package-license@3.0.4: + dependencies: + spdx-correct: 3.2.0 + spdx-expression-parse: 3.0.1 + + validate-npm-package-name@5.0.1: {} + + walk-sync@0.2.7: + dependencies: + ensure-posix-path: 1.1.1 + matcher-collection: 1.1.2 + + walk-up-path@3.0.1: {} + walker@1.0.8: dependencies: makeerror: 1.0.12 + wcwidth@1.0.1: + dependencies: + defaults: 1.0.4 + web-streams-polyfill@3.3.3: {} web-streams-polyfill@4.0.0-beta.3: {} webidl-conversions@3.0.1: {} + whatwg-encoding@3.1.1: + dependencies: + iconv-lite: 0.6.3 + + whatwg-mimetype@4.0.0: {} + whatwg-url@5.0.0: dependencies: tr46: 0.0.3 webidl-conversions: 3.0.1 + which-typed-array@1.1.15: + dependencies: + available-typed-arrays: 1.0.7 + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.2 + which@2.0.2: dependencies: isexe: 2.0.0 + which@4.0.0: + dependencies: + isexe: 3.1.1 + + widest-line@3.1.0: + dependencies: + string-width: 4.2.3 + + widest-line@4.0.1: + dependencies: + string-width: 5.1.2 + + wordwrap@1.0.0: {} + wrap-ansi@7.0.0: dependencies: ansi-styles: 4.3.0 string-width: 4.2.3 strip-ansi: 6.0.1 + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + wrappy@1.0.2: {} write-file-atomic@4.0.2: @@ -3150,12 +9943,37 @@ snapshots: imurmurhash: 0.1.4 signal-exit: 3.0.7 + ws@5.2.4: + dependencies: + async-limiter: 1.0.1 + + ws@7.5.10: {} + ws@8.17.1: {} + xml2js@0.6.2: + dependencies: + sax: 1.2.1 + xmlbuilder: 11.0.1 + + xmlbuilder@11.0.1: {} + + xmlhttprequest-ssl@2.0.0: {} + y18n@5.0.8: {} yallist@3.1.1: {} + yallist@4.0.0: {} + + yaml-js@0.2.3: {} + + yaml-types@0.3.0(yaml@2.5.0): + dependencies: + yaml: 2.5.0 + + yaml@2.5.0: {} + yargs-parser@21.1.1: {} yargs@17.7.2: @@ -3168,4 +9986,14 @@ snapshots: y18n: 5.0.8 yargs-parser: 21.1.1 + yn@3.1.1: {} + yocto-queue@0.1.0: {} + + yoga-wasm-web@0.3.3: {} + + zip-stream@4.1.1: + dependencies: + archiver-utils: 3.0.4 + compress-commons: 4.1.2 + readable-stream: 3.6.2 diff --git a/docker-compose.yaml b/docker-compose.yaml index 4974e8b8..24b51762 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,5 +1,4 @@ name: firecrawl -version: '3.9' x-common-service: &common-service build: apps/api @@ -16,7 +15,6 @@ x-common-service: &common-service - OPENAI_BASE_URL=${OPENAI_BASE_URL} - MODEL_NAME=${MODEL_NAME:-gpt-4o} - SLACK_WEBHOOK_URL=${SLACK_WEBHOOK_URL} - - SERPER_API_KEY=${SERPER_API_KEY} - LLAMAPARSE_API_KEY=${LLAMAPARSE_API_KEY} - LOGTAIL_KEY=${LOGTAIL_KEY} - BULL_AUTH_KEY=${BULL_AUTH_KEY} @@ -29,6 +27,7 @@ x-common-service: &common-service - SCRAPING_BEE_API_KEY=${SCRAPING_BEE_API_KEY} - HOST=${HOST:-0.0.0.0} - SELF_HOSTED_WEBHOOK_URL=${SELF_HOSTED_WEBHOOK_URL} + - LOGGING_LEVEL=${LOGGING_LEVEL} extra_hosts: - "host.docker.internal:host-gateway" diff --git a/examples/kubernetes/cluster-install/secret.yaml b/examples/kubernetes/cluster-install/secret.yaml index 2be96320..6d8eed3b 100644 --- a/examples/kubernetes/cluster-install/secret.yaml +++ b/examples/kubernetes/cluster-install/secret.yaml @@ -6,7 +6,6 @@ type: Opaque data: OPENAI_API_KEY: "" SLACK_WEBHOOK_URL: "" - SERPER_API_KEY: "" LLAMAPARSE_API_KEY: "" LOGTAIL_KEY: "" BULL_AUTH_KEY: "" diff --git a/temp-37564.rdb b/temp-37564.rdb new file mode 100644 index 00000000..2d331a24 Binary files /dev/null and b/temp-37564.rdb differ