diff --git a/.devcontainer/post_start_command.sh b/.devcontainer/post_start_command.sh index e3d5a6d59d..56e87614ba 100755 --- a/.devcontainer/post_start_command.sh +++ b/.devcontainer/post_start_command.sh @@ -1,3 +1,3 @@ #!/bin/bash -poetry install -C api \ No newline at end of file +cd api && poetry install \ No newline at end of file diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index e9c2b7b086..c87d5a4dd4 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -78,7 +78,7 @@ jobs: - name: Run Workflow run: poetry run -C api bash dev/pytest/pytest_workflow.sh - - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch) + - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase) uses: hoverkraft-tech/compose-action@v2.0.0 with: compose-file: | @@ -86,6 +86,7 @@ jobs: services: | weaviate qdrant + couchbase-server etcd minio milvus-standalone diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index 6daaaf5791..8e5279fb67 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -49,7 +49,7 @@ jobs: echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ env.DOCKERHUB_USER }} password: ${{ env.DOCKERHUB_TOKEN }} @@ -114,7 +114,7 @@ jobs: merge-multiple: true - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ env.DOCKERHUB_USER }} password: ${{ env.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/expose_service_ports.sh b/.github/workflows/expose_service_ports.sh index ae3e0ee69d..bc65c19a91 100755 --- a/.github/workflows/expose_service_ports.sh +++ b/.github/workflows/expose_service_ports.sh @@ -7,5 +7,7 @@ yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/dock yq eval '.services.pgvector.ports += ["5433:5432"]' -i docker/docker-compose.yaml yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compose.yaml yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml +yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml +yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml -echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch" \ No newline at end of file +echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase" diff --git a/.gitignore b/.gitignore index 27cf8a4ba3..60b5781733 100644 --- a/.gitignore +++ b/.gitignore @@ -173,6 +173,8 @@ docker/volumes/myscale/log/* docker/volumes/unstructured/* docker/volumes/pgvector/data/* docker/volumes/pgvecto_rs/data/* +docker/volumes/couchbase/* +docker/volumes/oceanbase/* docker/nginx/conf.d/default.conf docker/nginx/ssl/* @@ -189,4 +191,4 @@ pyrightconfig.json api/.vscode .idea/ -.vscode \ No newline at end of file +.vscode diff --git a/README.md b/README.md index f6d14bb840..61bd0d1e26 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,9 @@ ![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab) +

+ 📌 Introducing Dify Workflow File Upload: Recreate Google NotebookLM Podcast +

+

Dify Cloud · Self-hosting · @@ -42,6 +46,56 @@

+## Table of Content +0. [Quick-Start🚀](https://github.com/langgenius/dify?tab=readme-ov-file#quick-start) + +1. [Intro📖](https://github.com/langgenius/dify?tab=readme-ov-file#intro) + +2. [How to use🔧](https://github.com/langgenius/dify?tab=readme-ov-file#using-dify) + +3. [Stay Ahead🏃](https://github.com/langgenius/dify?tab=readme-ov-file#staying-ahead) + +4. [Next Steps🏹](https://github.com/langgenius/dify?tab=readme-ov-file#next-steps) + +5. [Contributing💪](https://github.com/langgenius/dify?tab=readme-ov-file#contributing) + +6. [Community and Contact🏠](https://github.com/langgenius/dify?tab=readme-ov-file#community--contact) + +7. [Star-History📈](https://github.com/langgenius/dify?tab=readme-ov-file#star-history) + +8. [Security🔒](https://github.com/langgenius/dify?tab=readme-ov-file#security-disclosure) + +9. [License🤝](https://github.com/langgenius/dify?tab=readme-ov-file#license) + +> Make sure you read through this README before you start utilizing Dify😊 + + +## Quick start +The quickest way to deploy Dify locally is to run our [docker-compose.yml](https://github.com/langgenius/dify/blob/main/docker/docker-compose.yaml). Follow the instructions to start in 5 minutes. + +> Before installing Dify, make sure your machine meets the following minimum system requirements: +> +>- CPU >= 2 Core +>- RAM >= 4 GiB +>- Docker and Docker Compose Installed +
+ +Run the following command in your terminal to clone the whole repo. +```bash +git clone https://github.com/langgenius/dify.git +``` +After cloning,run the following command one by one. +```bash +cd dify +cd docker +cp .env.example .env +docker compose up -d +``` + +After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process. You will be asked to setup an admin account. +For more info of quick setup, check [here](https://docs.dify.ai/getting-started/install-self-hosted/docker-compose) + +## Intro Dify is an open-source LLM app development platform. Its intuitive interface combines AI workflow, RAG pipeline, agent capabilities, model management, observability features and more, letting you quickly go from prototype to production. Here's a list of the core features:

@@ -75,73 +129,6 @@ Dify is an open-source LLM app development platform. Its intuitive interface com All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic. -## Feature comparison - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
FeatureDify.AILangChainFlowiseOpenAI Assistants API
Programming ApproachAPI + App-orientedPython CodeApp-orientedAPI-oriented
Supported LLMsRich VarietyRich VarietyRich VarietyOpenAI-only
RAG Engine
Agent
Workflow
Observability
Enterprise Features (SSO/Access control)
Local Deployment
- ## Using Dify - **Cloud
** @@ -162,30 +149,21 @@ Star Dify on GitHub and be instantly notified of new releases. ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - -## Quick start -> Before installing Dify, make sure your machine meets the following minimum system requirements: -> ->- CPU >= 2 Core ->- RAM >= 4 GiB - -
- -The easiest way to start the Dify server is to run our [docker-compose.yml](docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine: - -```bash -cd docker -cp .env.example .env -docker compose up -d -``` - -After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process. - -> If you'd like to contribute to Dify or do additional development, refer to our [guide to deploying from source code](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) - ## Next steps +Go to [quick-start](https://github.com/langgenius/dify?tab=readme-ov-file#quick-start) to setup your Dify or setup by source code. + +#### If you...... +If you forget your admin account, you can refer to this [guide](https://docs.dify.ai/getting-started/install-self-hosted/faqs#id-4.-how-to-reset-the-password-of-the-admin-account) to reset the password. + +> Use docker compose up without "-d" to enable logs printing out in your terminal. This might be useful if you have encountered unknow problems when using Dify. + +If you encountered system error and would like to acquire help in Github issues, make sure you always paste logs of the error in the request to accerate the conversation. Go to [Community & contact](https://github.com/langgenius/dify?tab=readme-ov-file#community--contact) for more information. + +> Please read the [Dify Documentation](https://docs.dify.ai/) for detailed how-to-use guidance. Most of the potential problems are explained in the doc. + +> If you'd like to contribute to Dify or make additional development, refer to our [guide to deploying from source code](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) + If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes. @@ -224,6 +202,7 @@ At the same time, please consider supporting Dify by sharing it on social media * [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). * [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. * [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. +* Make sure a log, if possible, is attached to an error reported to maximize solution efficiency. ## Star history diff --git a/README_CN.md b/README_CN.md index 689f98ccf4..070951699a 100644 --- a/README_CN.md +++ b/README_CN.md @@ -154,7 +154,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI 我们提供[ Dify 云服务](https://dify.ai),任何人都可以零设置尝试。它提供了自部署版本的所有功能,并在沙盒计划中包含 200 次免费的 GPT-4 调用。 - **自托管 Dify 社区版
** -使用这个[入门指南](#quick-start)快速在您的环境中运行 Dify。 +使用这个[入门指南](#快速启动)快速在您的环境中运行 Dify。 使用我们的[文档](https://docs.dify.ai)进行进一步的参考和更深入的说明。 - **面向企业/组织的 Dify
** diff --git a/README_PT.md b/README_PT.md new file mode 100644 index 0000000000..3d66b76802 --- /dev/null +++ b/README_PT.md @@ -0,0 +1,241 @@ +![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab) + +

+ 📌 Introduzindo o Dify Workflow com Upload de Arquivo: Recrie o Podcast Google NotebookLM +

+ +

+ Dify Cloud · + Auto-hospedagem · + Documentação · + Consultas empresariais +

+ +

+ + Static Badge + + Static Badge + + chat on Discord + + follow on X(Twitter) + + Docker Pulls + + Commits last month + + Issues closed + + Discussion posts +

+ +

+ README em Inglês + 简体中文版自述文件 + 日本語のREADME + README em Espanhol + README em Francês + README tlhIngan Hol + README em Coreano + README em Árabe + README em Turco + README em Vietnamita + README em Português - BR +

+ +Dify é uma plataforma de desenvolvimento de aplicativos LLM de código aberto. Sua interface intuitiva combina workflow de IA, pipeline RAG, capacidades de agente, gerenciamento de modelos, recursos de observabilidade e muito mais, permitindo que você vá rapidamente do protótipo à produção. Aqui está uma lista das principais funcionalidades: +

+ +**1. Workflow**: + Construa e teste workflows poderosos de IA em uma interface visual, aproveitando todos os recursos a seguir e muito mais. + + + https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa + + + +**2. Suporte abrangente a modelos**: + Integração perfeita com centenas de LLMs proprietários e de código aberto de diversas provedoras e soluções auto-hospedadas, abrangendo GPT, Mistral, Llama3 e qualquer modelo compatível com a API da OpenAI. A lista completa de provedores suportados pode ser encontrada [aqui](https://docs.dify.ai/getting-started/readme/model-providers). + +![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) + + +**3. IDE de Prompt**: + Interface intuitiva para criação de prompts, comparação de desempenho de modelos e adição de recursos como conversão de texto para fala em um aplicativo baseado em chat. + +**4. Pipeline RAG**: + Extensas capacidades de RAG que cobrem desde a ingestão de documentos até a recuperação, com suporte nativo para extração de texto de PDFs, PPTs e outros formatos de documentos comuns. + +**5. Capacidades de agente**: + Você pode definir agentes com base em LLM Function Calling ou ReAct e adicionar ferramentas pré-construídas ou personalizadas para o agente. O Dify oferece mais de 50 ferramentas integradas para agentes de IA, como Google Search, DALL·E, Stable Diffusion e WolframAlpha. + +**6. LLMOps**: + Monitore e analise os registros e o desempenho do aplicativo ao longo do tempo. É possível melhorar continuamente prompts, conjuntos de dados e modelos com base nos dados de produção e anotações. + +**7. Backend como Serviço**: + Todas os recursos do Dify vêm com APIs correspondentes, permitindo que você integre o Dify sem esforço na lógica de negócios da sua empresa. + + +## Comparação de recursos + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
RecursoDify.AILangChainFlowiseOpenAI Assistants API
Abordagem de ProgramaçãoOrientada a API + AplicativoCódigo PythonOrientada a AplicativoOrientada a API
LLMs SuportadosVariedade RicaVariedade RicaVariedade RicaApenas OpenAI
RAG Engine
Agente
Workflow
Observabilidade
Recursos Empresariais (SSO/Controle de Acesso)
Implantação Local
+ +## Usando o Dify + +- **Nuvem
** +Oferecemos o serviço [Dify Cloud](https://dify.ai) para qualquer pessoa experimentar sem nenhuma configuração. Ele fornece todas as funcionalidades da versão auto-hospedada, incluindo 200 chamadas GPT-4 gratuitas no plano sandbox. + +- **Auto-hospedagem do Dify Community Edition
** +Configure rapidamente o Dify no seu ambiente com este [guia inicial](#quick-start). +Use nossa [documentação](https://docs.dify.ai) para referências adicionais e instruções mais detalhadas. + +- **Dify para empresas/organizações
** +Oferecemos recursos adicionais voltados para empresas. [Envie suas perguntas através deste chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) ou [envie-nos um e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) para discutir necessidades empresariais.
+ > Para startups e pequenas empresas que utilizam AWS, confira o [Dify Premium no AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) e implemente no seu próprio AWS VPC com um clique. É uma oferta AMI acessível com a opção de criar aplicativos com logotipo e marca personalizados. + + +## Mantendo-se atualizado + +Dê uma estrela no Dify no GitHub e seja notificado imediatamente sobre novos lançamentos. + +![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) + + + +## Início rápido +> Antes de instalar o Dify, certifique-se de que sua máquina atenda aos seguintes requisitos mínimos de sistema: +> +>- CPU >= 2 Núcleos +>- RAM >= 4 GiB + +
+ +A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina: + +```bash +cd docker +cp .env.example .env +docker compose up -d +``` + +Após a execução, você pode acessar o painel do Dify no navegador em [http://localhost/install](http://localhost/install) e iniciar o processo de inicialização. + +> Se você deseja contribuir com o Dify ou fazer desenvolvimento adicional, consulte nosso [guia para implantar a partir do código fonte](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code). + +## Próximos passos + +Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments). + +Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts](https://helm.sh/) e arquivos YAML contribuídos pela comunidade que permitem a implantação do Dify no Kubernetes. + +- [Helm Chart de @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) +- [Helm Chart de @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm) +- [Arquivo YAML de @Winson-030](https://github.com/Winson-030/dify-kubernetes) + +#### Usando o Terraform para Implantação + +Implante o Dify na Plataforma Cloud com um único clique usando [terraform](https://www.terraform.io/) + +##### Azure Global +- [Azure Terraform por @nikawang](https://github.com/nikawang/dify-azure-terraform) + +##### Google Cloud +- [Google Cloud Terraform por @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) + +## Contribuindo + +Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências. + +> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c). + +**Contribuidores** + + + + + +## Comunidade e contato + +* [Discussões no GitHub](https://github.com/langgenius/dify/discussions). Melhor para: compartilhar feedback e fazer perguntas. +* [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +* [Discord](https://discord.gg/FngNHpbcY7). Melhor para: compartilhar suas aplicações e interagir com a comunidade. +* [X(Twitter)](https://twitter.com/dify_ai). Melhor para: compartilhar suas aplicações e interagir com a comunidade. + +## Histórico de estrelas + +[![Gráfico de Histórico de Estrelas](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) + +## Divulgação de segurança + +Para proteger sua privacidade, evite postar problemas de segurança no GitHub. Em vez disso, envie suas perguntas para security@dify.ai e forneceremos uma resposta mais detalhada. + +## Licença + +Este repositório está disponível sob a [Licença de Código Aberto Dify](LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. \ No newline at end of file diff --git a/api/.env.example b/api/.env.example index f2b8a19dda..d13f4a13ad 100644 --- a/api/.env.example +++ b/api/.env.example @@ -31,8 +31,17 @@ REDIS_HOST=localhost REDIS_PORT=6379 REDIS_USERNAME= REDIS_PASSWORD=difyai123456 +REDIS_USE_SSL=false REDIS_DB=0 +# redis Sentinel configuration. +REDIS_USE_SENTINEL=false +REDIS_SENTINELS= +REDIS_SENTINEL_SERVICE_NAME= +REDIS_SENTINEL_USERNAME= +REDIS_SENTINEL_PASSWORD= +REDIS_SENTINEL_SOCKET_TIMEOUT=0.1 + # PostgreSQL database configuration DB_USERNAME=postgres DB_PASSWORD=difyai123456 @@ -111,7 +120,7 @@ SUPABASE_URL=your-server-url WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* -# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, vikingdb +# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash VECTOR_STORE=weaviate # Weaviate configuration @@ -127,6 +136,13 @@ QDRANT_CLIENT_TIMEOUT=20 QDRANT_GRPC_ENABLED=false QDRANT_GRPC_PORT=6334 +#Couchbase configuration +COUCHBASE_CONNECTION_STRING=127.0.0.1 +COUCHBASE_USER=Administrator +COUCHBASE_PASSWORD=password +COUCHBASE_BUCKET_NAME=Embeddings +COUCHBASE_SCOPE_NAME=_default + # Milvus configuration MILVUS_URI=http://127.0.0.1:19530 MILVUS_TOKEN= @@ -186,6 +202,20 @@ TIDB_VECTOR_USER=xxx.root TIDB_VECTOR_PASSWORD=xxxxxx TIDB_VECTOR_DATABASE=dify +# Tidb on qdrant configuration +TIDB_ON_QDRANT_URL=http://127.0.0.1 +TIDB_ON_QDRANT_API_KEY=dify +TIDB_ON_QDRANT_CLIENT_TIMEOUT=20 +TIDB_ON_QDRANT_GRPC_ENABLED=false +TIDB_ON_QDRANT_GRPC_PORT=6334 +TIDB_PUBLIC_KEY=dify +TIDB_PRIVATE_KEY=dify +TIDB_API_URL=http://127.0.0.1 +TIDB_IAM_API_URL=http://127.0.0.1 +TIDB_REGION=regions/aws-us-east-1 +TIDB_PROJECT_ID=dify +TIDB_SPEND_LIMIT=100 + # Chroma configuration CHROMA_HOST=127.0.0.1 CHROMA_PORT=8000 @@ -220,6 +250,10 @@ BAIDU_VECTOR_DB_DATABASE=dify BAIDU_VECTOR_DB_SHARD=1 BAIDU_VECTOR_DB_REPLICAS=3 +# Upstash configuration +UPSTASH_VECTOR_URL=your-server-url +UPSTASH_VECTOR_TOKEN=your-access-token + # ViKingDB configuration VIKINGDB_ACCESS_KEY=your-ak VIKINGDB_SECRET_KEY=your-sk @@ -229,6 +263,14 @@ VIKINGDB_SCHEMA=http VIKINGDB_CONNECTION_TIMEOUT=30 VIKINGDB_SOCKET_TIMEOUT=30 +# OceanBase Vector configuration +OCEANBASE_VECTOR_HOST=127.0.0.1 +OCEANBASE_VECTOR_PORT=2881 +OCEANBASE_VECTOR_USER=root@test +OCEANBASE_VECTOR_PASSWORD= +OCEANBASE_VECTOR_DATABASE=test +OCEANBASE_MEMORY_LIMIT=6G + # Upload configuration UPLOAD_FILE_SIZE_LIMIT=15 UPLOAD_FILE_BATCH_LIMIT=5 @@ -239,6 +281,7 @@ UPLOAD_AUDIO_FILE_SIZE_LIMIT=50 # Model Configuration MULTIMODAL_SEND_IMAGE_FORMAT=base64 PROMPT_GENERATION_MAX_TOKENS=512 +CODE_GENERATION_MAX_TOKENS=1024 # Mail configuration, support: resend, smtp MAIL_TYPE= @@ -304,6 +347,10 @@ RESPECT_XFORWARD_HEADERS_ENABLED=false # Log file path LOG_FILE= +# Log file max size, the unit is MB +LOG_FILE_MAX_SIZE=20 +# Log file max backup count +LOG_FILE_BACKUP_COUNT=5 # Indexing configuration INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000 diff --git a/api/Dockerfile b/api/Dockerfile index 4ec89d5f3c..c71317f797 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -55,7 +55,14 @@ RUN apt-get update \ && echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \ && apt-get update \ # For Security - && apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 libldap-2.5-0=2.5.18+dfsg-3 perl=5.38.2-5 libsqlite3-0=3.46.1-1 \ + && apt-get install -y --no-install-recommends expat=2.6.3-2 libldap-2.5-0=2.5.18+dfsg-3+b1 perl=5.40.0-6 libsqlite3-0=3.46.1-1 \ + && if [ "$(dpkg --print-architecture)" = "amd64" ]; then \ + apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1+b1; \ + else \ + apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1; \ + fi \ + # install a chinese font to support the use of tools like matplotlib + && apt-get install -y fonts-noto-cjk \ && apt-get autoremove -y \ && rm -rf /var/lib/apt/lists/* diff --git a/api/commands.py b/api/commands.py index cd250a0b59..64238698f8 100644 --- a/api/commands.py +++ b/api/commands.py @@ -278,6 +278,9 @@ def migrate_knowledge_vector_database(): VectorType.TENCENT, VectorType.BAIDU, VectorType.VIKINGDB, + VectorType.UPSTASH, + VectorType.COUCHBASE, + VectorType.OCEANBASE, } page = 1 while True: diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 72beccd49b..f88019fbb6 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -372,6 +372,16 @@ class LoggingConfig(BaseSettings): default=None, ) + LOG_FILE_MAX_SIZE: PositiveInt = Field( + description="Maximum file size for file rotation retention, the unit is megabytes (MB)", + default=20, + ) + + LOG_FILE_BACKUP_COUNT: PositiveInt = Field( + description="Maximum file backup count file rotation retention", + default=5, + ) + LOG_FORMAT: str = Field( description="Format string for log messages", default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s", @@ -614,6 +624,11 @@ class DataSetConfig(BaseSettings): default=False, ) + TIDB_SERVERLESS_NUMBER: PositiveInt = Field( + description="number of tidb serverless cluster", + default=500, + ) + class WorkspaceConfig(BaseSettings): """ diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 84d03e2f45..4be761747d 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -16,10 +16,13 @@ from configs.middleware.storage.supabase_storage_config import SupabaseStorageCo from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig +from configs.middleware.vdb.baidu_vector_config import BaiduVectorDBConfig from configs.middleware.vdb.chroma_config import ChromaConfig +from configs.middleware.vdb.couchbase_config import CouchbaseConfig from configs.middleware.vdb.elasticsearch_config import ElasticsearchConfig from configs.middleware.vdb.milvus_config import MilvusConfig from configs.middleware.vdb.myscale_config import MyScaleConfig +from configs.middleware.vdb.oceanbase_config import OceanBaseVectorConfig from configs.middleware.vdb.opensearch_config import OpenSearchConfig from configs.middleware.vdb.oracle_config import OracleConfig from configs.middleware.vdb.pgvector_config import PGVectorConfig @@ -27,7 +30,9 @@ from configs.middleware.vdb.pgvectors_config import PGVectoRSConfig from configs.middleware.vdb.qdrant_config import QdrantConfig from configs.middleware.vdb.relyt_config import RelytConfig from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig +from configs.middleware.vdb.tidb_on_qdrant_config import TidbOnQdrantConfig from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig +from configs.middleware.vdb.upstash_config import UpstashConfig from configs.middleware.vdb.vikingdb_config import VikingDBConfig from configs.middleware.vdb.weaviate_config import WeaviateConfig @@ -53,6 +58,11 @@ class VectorStoreConfig(BaseSettings): default=None, ) + VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field( + description="Enable whitelist for vector store.", + default=False, + ) + class KeywordStoreConfig(BaseSettings): KEYWORD_STORE: str = Field( @@ -244,7 +254,12 @@ class MiddlewareConfig( TiDBVectorConfig, WeaviateConfig, ElasticsearchConfig, + CouchbaseConfig, InternalTestConfig, VikingDBConfig, + UpstashConfig, + TidbOnQdrantConfig, + OceanBaseVectorConfig, + BaiduVectorDBConfig, ): pass diff --git a/api/configs/middleware/vdb/couchbase_config.py b/api/configs/middleware/vdb/couchbase_config.py new file mode 100644 index 0000000000..391089ec6e --- /dev/null +++ b/api/configs/middleware/vdb/couchbase_config.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class CouchbaseConfig(BaseModel): + """ + Couchbase configs + """ + + COUCHBASE_CONNECTION_STRING: Optional[str] = Field( + description="COUCHBASE connection string", + default=None, + ) + + COUCHBASE_USER: Optional[str] = Field( + description="COUCHBASE user", + default=None, + ) + + COUCHBASE_PASSWORD: Optional[str] = Field( + description="COUCHBASE password", + default=None, + ) + + COUCHBASE_BUCKET_NAME: Optional[str] = Field( + description="COUCHBASE bucket name", + default=None, + ) + + COUCHBASE_SCOPE_NAME: Optional[str] = Field( + description="COUCHBASE scope name", + default=None, + ) diff --git a/api/configs/middleware/vdb/oceanbase_config.py b/api/configs/middleware/vdb/oceanbase_config.py new file mode 100644 index 0000000000..87427af960 --- /dev/null +++ b/api/configs/middleware/vdb/oceanbase_config.py @@ -0,0 +1,35 @@ +from typing import Optional + +from pydantic import Field, PositiveInt +from pydantic_settings import BaseSettings + + +class OceanBaseVectorConfig(BaseSettings): + """ + Configuration settings for OceanBase Vector database + """ + + OCEANBASE_VECTOR_HOST: Optional[str] = Field( + description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')", + default=None, + ) + + OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field( + description="Port number on which the OceanBase Vector server is listening (default is 2881)", + default=2881, + ) + + OCEANBASE_VECTOR_USER: Optional[str] = Field( + description="Username for authenticating with the OceanBase Vector database", + default=None, + ) + + OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field( + description="Password for authenticating with the OceanBase Vector database", + default=None, + ) + + OCEANBASE_VECTOR_DATABASE: Optional[str] = Field( + description="Name of the OceanBase Vector database to connect to", + default=None, + ) diff --git a/api/configs/middleware/vdb/tidb_on_qdrant_config.py b/api/configs/middleware/vdb/tidb_on_qdrant_config.py new file mode 100644 index 0000000000..d2625af264 --- /dev/null +++ b/api/configs/middleware/vdb/tidb_on_qdrant_config.py @@ -0,0 +1,70 @@ +from typing import Optional + +from pydantic import Field, NonNegativeInt, PositiveInt +from pydantic_settings import BaseSettings + + +class TidbOnQdrantConfig(BaseSettings): + """ + Tidb on Qdrant configs + """ + + TIDB_ON_QDRANT_URL: Optional[str] = Field( + description="Tidb on Qdrant url", + default=None, + ) + + TIDB_ON_QDRANT_API_KEY: Optional[str] = Field( + description="Tidb on Qdrant api key", + default=None, + ) + + TIDB_ON_QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field( + description="Tidb on Qdrant client timeout in seconds", + default=20, + ) + + TIDB_ON_QDRANT_GRPC_ENABLED: bool = Field( + description="whether enable grpc support for Tidb on Qdrant connection", + default=False, + ) + + TIDB_ON_QDRANT_GRPC_PORT: PositiveInt = Field( + description="Tidb on Qdrant grpc port", + default=6334, + ) + + TIDB_PUBLIC_KEY: Optional[str] = Field( + description="Tidb account public key", + default=None, + ) + + TIDB_PRIVATE_KEY: Optional[str] = Field( + description="Tidb account private key", + default=None, + ) + + TIDB_API_URL: Optional[str] = Field( + description="Tidb API url", + default=None, + ) + + TIDB_IAM_API_URL: Optional[str] = Field( + description="Tidb IAM API url", + default=None, + ) + + TIDB_REGION: Optional[str] = Field( + description="Tidb serverless region", + default="regions/aws-us-east-1", + ) + + TIDB_PROJECT_ID: Optional[str] = Field( + description="Tidb project id", + default=None, + ) + + TIDB_SPEND_LIMIT: Optional[int] = Field( + description="Tidb spend limit", + default=100, + ) diff --git a/api/configs/middleware/vdb/upstash_config.py b/api/configs/middleware/vdb/upstash_config.py new file mode 100644 index 0000000000..412c56374a --- /dev/null +++ b/api/configs/middleware/vdb/upstash_config.py @@ -0,0 +1,20 @@ +from typing import Optional + +from pydantic import Field +from pydantic_settings import BaseSettings + + +class UpstashConfig(BaseSettings): + """ + Configuration settings for Upstash vector database + """ + + UPSTASH_VECTOR_URL: Optional[str] = Field( + description="URL of the upstash server (e.g., 'https://vector.upstash.io')", + default=None, + ) + + UPSTASH_VECTOR_TOKEN: Optional[str] = Field( + description="Token for authenticating with the upstash server", + default=None, + ) diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index 635d12fc55..3dc87e3058 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="0.10.0", + default="0.10.2", ) COMMIT_SHA: str = Field( diff --git a/api/constants/__init__.py b/api/constants/__init__.py index 66b9c0b632..05795e11d7 100644 --- a/api/constants/__init__.py +++ b/api/constants/__init__.py @@ -15,7 +15,9 @@ AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS]) if dify_config.ETL_TYPE == "Unstructured": DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls"] - DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "ppt", "xml", "epub")) + DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub")) + if dify_config.UNSTRUCTURED_API_URL: + DOCUMENT_EXTENSIONS.append("ppt") DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS]) else: DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"] diff --git a/api/controllers/common/errors.py b/api/controllers/common/errors.py new file mode 100644 index 0000000000..c71f1ce5a3 --- /dev/null +++ b/api/controllers/common/errors.py @@ -0,0 +1,6 @@ +from werkzeug.exceptions import HTTPException + + +class FilenameNotExistsError(HTTPException): + code = 400 + description = "The specified filename does not exist." diff --git a/api/controllers/common/helpers.py b/api/controllers/common/helpers.py new file mode 100644 index 0000000000..ed24b265ef --- /dev/null +++ b/api/controllers/common/helpers.py @@ -0,0 +1,58 @@ +import mimetypes +import os +import re +import urllib.parse +from uuid import uuid4 + +import httpx +from pydantic import BaseModel + + +class FileInfo(BaseModel): + filename: str + extension: str + mimetype: str + size: int + + +def guess_file_info_from_response(response: httpx.Response): + url = str(response.url) + # Try to extract filename from URL + parsed_url = urllib.parse.urlparse(url) + url_path = parsed_url.path + filename = os.path.basename(url_path) + + # If filename couldn't be extracted, use Content-Disposition header + if not filename: + content_disposition = response.headers.get("Content-Disposition") + if content_disposition: + filename_match = re.search(r'filename="?(.+)"?', content_disposition) + if filename_match: + filename = filename_match.group(1) + + # If still no filename, generate a unique one + if not filename: + unique_name = str(uuid4()) + filename = f"{unique_name}" + + # Guess MIME type from filename first, then URL + mimetype, _ = mimetypes.guess_type(filename) + if mimetype is None: + mimetype, _ = mimetypes.guess_type(url) + if mimetype is None: + # If guessing fails, use Content-Type from response headers + mimetype = response.headers.get("Content-Type", "application/octet-stream") + + extension = os.path.splitext(filename)[1] + + # Ensure filename has an extension + if not extension: + extension = mimetypes.guess_extension(mimetype) or ".bin" + filename = f"{filename}{extension}" + + return FileInfo( + filename=filename, + extension=extension, + mimetype=mimetype, + size=int(response.headers.get("Content-Length", -1)), + ) diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index 8198e9d0ff..9d0dd3fb23 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -2,9 +2,21 @@ from flask import Blueprint from libs.external_api import ExternalApi +from .files import FileApi, FilePreviewApi, FileSupportTypeApi +from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi + bp = Blueprint("console", __name__, url_prefix="/console/api") api = ExternalApi(bp) +# File +api.add_resource(FileApi, "/files/upload") +api.add_resource(FilePreviewApi, "/files//preview") +api.add_resource(FileSupportTypeApi, "/files/support-type") + +# Remote files +api.add_resource(RemoteFileInfoApi, "/remote-files/") +api.add_resource(RemoteFileUploadApi, "/remote-files/upload") + # Import other controllers from . import admin, apikey, extension, feature, ping, setup, version @@ -43,7 +55,6 @@ from .datasets import ( datasets_document, datasets_segments, external, - file, hit_testing, website, ) diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index e014964bf9..b612f7bd96 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -12,8 +12,7 @@ from models.dataset import Dataset from models.model import ApiToken, App from . import api -from .setup import setup_required -from .wraps import account_initialization_required +from .wraps import account_initialization_required, setup_required api_key_fields = { "id": fields.String, diff --git a/api/controllers/console/app/advanced_prompt_template.py b/api/controllers/console/app/advanced_prompt_template.py index e7346bdf1d..c228743fa5 100644 --- a/api/controllers/console/app/advanced_prompt_template.py +++ b/api/controllers/console/app/advanced_prompt_template.py @@ -1,8 +1,7 @@ from flask_restful import Resource, reqparse from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from libs.login import login_required from services.advanced_prompt_template_service import AdvancedPromptTemplateService diff --git a/api/controllers/console/app/agent.py b/api/controllers/console/app/agent.py index 51899da705..d433415894 100644 --- a/api/controllers/console/app/agent.py +++ b/api/controllers/console/app/agent.py @@ -2,8 +2,7 @@ from flask_restful import Resource, reqparse from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from libs.helper import uuid_value from libs.login import login_required from models.model import AppMode diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index 1ea1c82679..fd05cbc19b 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -6,8 +6,11 @@ from werkzeug.exceptions import Forbidden from controllers.console import api from controllers.console.app.error import NoFileUploadedError from controllers.console.datasets.error import TooManyFilesError -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check +from controllers.console.wraps import ( + account_initialization_required, + cloud_edition_billing_resource_check, + setup_required, +) from extensions.ext_redis import redis_client from fields.annotation_fields import ( annotation_fields, diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 1b46a3a7d3..36338cbd8a 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -6,8 +6,11 @@ from werkzeug.exceptions import BadRequest, Forbidden, abort from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check +from controllers.console.wraps import ( + account_initialization_required, + cloud_edition_billing_resource_check, + setup_required, +) from core.ops.ops_trace_manager import OpsTraceManager from fields.app_fields import ( app_detail_fields, diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index c1ef05a488..112446613f 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -18,8 +18,7 @@ from controllers.console.app.error import ( UnsupportedAudioTypeError, ) from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.model_runtime.errors.invoke import InvokeError from libs.login import login_required diff --git a/api/controllers/console/app/completion.py b/api/controllers/console/app/completion.py index d3296d3dff..9896fcaab8 100644 --- a/api/controllers/console/app/completion.py +++ b/api/controllers/console/app/completion.py @@ -15,8 +15,7 @@ from controllers.console.app.error import ( ProviderQuotaExceededError, ) from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index b60a424d98..7b78f622b9 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -10,8 +10,7 @@ from werkzeug.exceptions import Forbidden, NotFound from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from fields.conversation_fields import ( diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py index 23b234dac9..d49f433ba1 100644 --- a/api/controllers/console/app/conversation_variables.py +++ b/api/controllers/console/app/conversation_variables.py @@ -4,8 +4,7 @@ from sqlalchemy.orm import Session from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db from fields.conversation_variable_fields import paginated_conversation_variable_fields from libs.login import login_required diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index 3d1e6b7a37..9c3cbe4e3e 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -10,8 +10,7 @@ from controllers.console.app.error import ( ProviderNotInitializeError, ProviderQuotaExceededError, ) -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.llm_generator.llm_generator import LLMGenerator from core.model_runtime.errors.invoke import InvokeError @@ -52,4 +51,39 @@ class RuleGenerateApi(Resource): return rules +class RuleCodeGenerateApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") + parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") + parser.add_argument("no_variable", type=bool, required=True, default=False, location="json") + parser.add_argument("code_language", type=str, required=False, default="javascript", location="json") + args = parser.parse_args() + + account = current_user + CODE_GENERATION_MAX_TOKENS = int(os.getenv("CODE_GENERATION_MAX_TOKENS", "1024")) + try: + code_result = LLMGenerator.generate_code( + tenant_id=account.current_tenant_id, + instruction=args["instruction"], + model_config=args["model_config"], + code_language=args["code_language"], + max_tokens=CODE_GENERATION_MAX_TOKENS, + ) + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeError as e: + raise CompletionRequestError(e.description) + + return code_result + + api.add_resource(RuleGenerateApi, "/rule-generate") +api.add_resource(RuleCodeGenerateApi, "/rule-code-generate") diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 2fba3e0af0..b7a4c31a15 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -14,8 +14,11 @@ from controllers.console.app.error import ( ) from controllers.console.app.wraps import get_app_model from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDisabledError -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check +from controllers.console.wraps import ( + account_initialization_required, + cloud_edition_billing_resource_check, + setup_required, +) from core.app.entities.app_invoke_entities import InvokeFrom from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.model_runtime.errors.invoke import InvokeError @@ -105,6 +108,8 @@ class ChatMessageListApi(Resource): if rest_count > 0: has_more = True + history_messages = list(reversed(history_messages)) + return InfiniteScrollPagination(data=history_messages, limit=args["limit"], has_more=has_more) diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py index f5068a4cd8..8ba195f5a5 100644 --- a/api/controllers/console/app/model_config.py +++ b/api/controllers/console/app/model_config.py @@ -6,8 +6,7 @@ from flask_restful import Resource from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.agent.entities import AgentToolEntity from core.tools.tool_manager import ToolManager from core.tools.utils.configuration import ToolParameterConfigurationManager diff --git a/api/controllers/console/app/ops_trace.py b/api/controllers/console/app/ops_trace.py index 374bd2b815..47b58396a1 100644 --- a/api/controllers/console/app/ops_trace.py +++ b/api/controllers/console/app/ops_trace.py @@ -2,8 +2,7 @@ from flask_restful import Resource, reqparse from controllers.console import api from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from libs.login import login_required from services.ops_service import OpsService diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index 115a832da9..2f5645852f 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -7,8 +7,7 @@ from werkzeug.exceptions import Forbidden, NotFound from constants.languages import supported_language from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db from fields.app_fields import app_site_fields from libs.login import login_required diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py index 3ef442812d..db5e282409 100644 --- a/api/controllers/console/app/statistic.py +++ b/api/controllers/console/app/statistic.py @@ -8,8 +8,7 @@ from flask_restful import Resource, reqparse from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db from libs.helper import DatetimeString from libs.login import login_required diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 1ffdceb2c8..75354218c4 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -9,8 +9,7 @@ import services from controllers.console import api from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom from factories import variable_factory diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index 629b7a8bf4..2940556f84 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -3,8 +3,7 @@ from flask_restful.inputs import int_range from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from fields.workflow_app_log_fields import workflow_app_log_pagination_fields from libs.login import login_required from models import App diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index 5824ead9c3..08ab61bbb9 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -3,8 +3,7 @@ from flask_restful.inputs import int_range from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from fields.workflow_run_fields import ( advanced_chat_workflow_run_pagination_fields, workflow_run_detail_fields, diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py index f46af0f1ca..6c7c73707b 100644 --- a/api/controllers/console/app/workflow_statistic.py +++ b/api/controllers/console/app/workflow_statistic.py @@ -8,8 +8,7 @@ from flask_restful import Resource, reqparse from controllers.console import api from controllers.console.app.wraps import get_app_model -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db from libs.helper import DatetimeString from libs.login import login_required diff --git a/api/controllers/console/auth/data_source_bearer_auth.py b/api/controllers/console/auth/data_source_bearer_auth.py index 50db6eebc1..465c44e9b6 100644 --- a/api/controllers/console/auth/data_source_bearer_auth.py +++ b/api/controllers/console/auth/data_source_bearer_auth.py @@ -7,8 +7,7 @@ from controllers.console.auth.error import ApiKeyAuthFailedError from libs.login import login_required from services.auth.api_key_auth_service import ApiKeyAuthService -from ..setup import setup_required -from ..wraps import account_initialization_required +from ..wraps import account_initialization_required, setup_required class ApiKeyAuthDataSource(Resource): diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index fd31e5ccc3..3c3f45260a 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -11,8 +11,7 @@ from controllers.console import api from libs.login import login_required from libs.oauth_data_source import NotionOAuth -from ..setup import setup_required -from ..wraps import account_initialization_required +from ..wraps import account_initialization_required, setup_required def get_oauth_providers(): diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 3c2de4612f..0cc115d0ee 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -15,7 +15,7 @@ from controllers.console.auth.error import ( PasswordMismatchError, ) from controllers.console.error import EmailSendIpLimitError, NotAllowedRegister -from controllers.console.setup import setup_required +from controllers.console.wraps import setup_required from events.tenant_event import tenant_was_created from extensions.ext_database import db from libs.helper import email, extract_remote_ip diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 4821c543b7..e2e8f84920 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -1,11 +1,10 @@ from typing import cast import flask_login -from flask import redirect, request +from flask import request from flask_restful import Resource, reqparse import services -from configs import dify_config from constants.languages import languages from controllers.console import api from controllers.console.auth.error import ( @@ -21,7 +20,7 @@ from controllers.console.error import ( NotAllowedCreateWorkspace, NotAllowedRegister, ) -from controllers.console.setup import setup_required +from controllers.console.wraps import setup_required from events.tenant_event import tenant_was_created from libs.helper import email, extract_remote_ip from libs.password import valid_password @@ -196,10 +195,7 @@ class EmailCodeLoginApi(Resource): email=user_email, name=user_email, interface_language=languages[0] ) except WorkSpaceNotAllowedCreateError: - return redirect( - f"{dify_config.CONSOLE_WEB_URL}/signin" - "?message=Workspace not found, please contact system admin to invite you to join in a workspace." - ) + return NotAllowedCreateWorkspace() token_pair = AccountService.login(account, ip_address=extract_remote_ip(request)) AccountService.reset_login_error_rate_limit(args["email"]) return {"result": "success", "data": token_pair.model_dump()} diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 45ae77a002..2ee8060502 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -96,17 +96,15 @@ class OAuthCallback(Resource): account = _generate_account(provider, user_info) except AccountNotFoundError: return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account not found.") - except WorkSpaceNotFoundError: - return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Workspace not found.") - except WorkSpaceNotAllowedCreateError: + except (WorkSpaceNotFoundError, WorkSpaceNotAllowedCreateError): return redirect( f"{dify_config.CONSOLE_WEB_URL}/signin" "?message=Workspace not found, please contact system admin to invite you to join in a workspace." ) # Check account status - if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: - return {"error": "Account is banned or closed."}, 403 + if account.status == AccountStatus.BANNED.value: + return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account is banned.") if account.status == AccountStatus.PENDING.value: account.status = AccountStatus.ACTIVE.value diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index 9a1d914869..4b0c82ae6c 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -2,8 +2,7 @@ from flask_login import current_user from flask_restful import Resource, reqparse from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required, only_edition_cloud +from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required from libs.login import login_required from services.billing_service import BillingService diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index f024e3799c..06fb3a0a31 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -9,8 +9,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.indexing_runner import IndexingRunner from core.rag.extractor.entity.extract_setting import ExtractSetting from core.rag.extractor.notion_extractor import NotionExtractor diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 16a77ed880..07ef0ce3e5 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -10,8 +10,7 @@ from controllers.console import api from controllers.console.apikey import api_key_fields, api_key_list from controllers.console.app.error import ProviderNotInitializeError from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError from core.indexing_runner import IndexingRunner from core.model_runtime.entities.model_entities import ModelType @@ -102,6 +101,13 @@ class DatasetListApi(Resource): help="type is required. Name must be between 1 to 40 characters.", type=_validate_name, ) + parser.add_argument( + "description", + type=str, + nullable=True, + required=False, + default="", + ) parser.add_argument( "indexing_technique", type=str, @@ -140,6 +146,7 @@ class DatasetListApi(Resource): dataset = DatasetService.create_empty_dataset( tenant_id=current_user.current_tenant_id, name=args["name"], + description=args["description"], indexing_technique=args["indexing_technique"], account=current_user, permission=DatasetPermissionEnum.ONLY_ME, @@ -619,6 +626,8 @@ class DatasetRetrievalSettingApi(Resource): | VectorType.PGVECTO_RS | VectorType.BAIDU | VectorType.VIKINGDB + | VectorType.UPSTASH + | VectorType.OCEANBASE ): return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} case ( @@ -630,6 +639,8 @@ class DatasetRetrievalSettingApi(Resource): | VectorType.ORACLE | VectorType.ELASTICSEARCH | VectorType.PGVECTOR + | VectorType.TIDB_ON_QDRANT + | VectorType.COUCHBASE ): return { "retrieval_method": [ @@ -657,6 +668,8 @@ class DatasetRetrievalSettingMockApi(Resource): | VectorType.PGVECTO_RS | VectorType.BAIDU | VectorType.VIKINGDB + | VectorType.UPSTASH + | VectorType.OCEANBASE ): return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} case ( @@ -667,6 +680,7 @@ class DatasetRetrievalSettingMockApi(Resource): | VectorType.MYSCALE | VectorType.ORACLE | VectorType.ELASTICSEARCH + | VectorType.COUCHBASE | VectorType.PGVECTOR ): return { diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 31b4f7b741..521805a651 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -23,8 +23,11 @@ from controllers.console.datasets.error import ( InvalidActionError, InvalidMetadataError, ) -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check +from controllers.console.wraps import ( + account_initialization_required, + cloud_edition_billing_resource_check, + setup_required, +) from core.errors.error import ( LLMBadRequestError, ModelCurrentlyNotSupportError, diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 08ea414288..5d8d664e41 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -11,11 +11,11 @@ import services from controllers.console import api from controllers.console.app.error import ProviderNotInitializeError from controllers.console.datasets.error import InvalidActionError, NoFileUploadedError, TooManyFilesError -from controllers.console.setup import setup_required from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_knowledge_limit_check, cloud_edition_billing_resource_check, + setup_required, ) from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError from core.model_manager import ModelManager diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index 2dc054cfbd..bc6e3687c1 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -6,8 +6,7 @@ from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services from controllers.console import api from controllers.console.datasets.error import DatasetNameDuplicateError -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from fields.dataset_fields import dataset_detail_fields from libs.login import login_required from services.dataset_service import DatasetService diff --git a/api/controllers/console/datasets/hit_testing.py b/api/controllers/console/datasets/hit_testing.py index 5c9bcef84c..495f511275 100644 --- a/api/controllers/console/datasets/hit_testing.py +++ b/api/controllers/console/datasets/hit_testing.py @@ -2,8 +2,7 @@ from flask_restful import Resource from controllers.console import api from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from libs.login import login_required diff --git a/api/controllers/console/datasets/website.py b/api/controllers/console/datasets/website.py index e80ce17c68..9127c8af45 100644 --- a/api/controllers/console/datasets/website.py +++ b/api/controllers/console/datasets/website.py @@ -2,8 +2,7 @@ from flask_restful import Resource, reqparse from controllers.console import api from controllers.console.datasets.error import WebsiteCrawlError -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from libs.login import login_required from services.website_service import WebsiteService diff --git a/api/controllers/console/error.py b/api/controllers/console/error.py index a6d4c8e8ec..ed6a99a017 100644 --- a/api/controllers/console/error.py +++ b/api/controllers/console/error.py @@ -41,7 +41,7 @@ class AlreadyActivateError(BaseHTTPException): class NotAllowedCreateWorkspace(BaseHTTPException): - error_code = "unauthorized" + error_code = "not_allowed_create_workspace" description = "Workspace not found, please contact system admin to invite you to join in a workspace." code = 400 diff --git a/api/controllers/console/explore/parameter.py b/api/controllers/console/explore/parameter.py index aab7dd7888..7c7580e3c6 100644 --- a/api/controllers/console/explore/parameter.py +++ b/api/controllers/console/explore/parameter.py @@ -21,7 +21,12 @@ class AppParameterApi(InstalledAppResource): "options": fields.List(fields.String), } - system_parameters_fields = {"image_file_size_limit": fields.String} + system_parameters_fields = { + "image_file_size_limit": fields.Integer, + "video_file_size_limit": fields.Integer, + "audio_file_size_limit": fields.Integer, + "file_size_limit": fields.Integer, + } parameters_fields = { "opening_statement": fields.String, @@ -82,7 +87,12 @@ class AppParameterApi(InstalledAppResource): } }, ), - "system_parameters": {"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT}, + "system_parameters": { + "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, + "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, + }, } diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index 5d6a8bf152..4ac0aa497e 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -3,8 +3,7 @@ from flask_restful import Resource, marshal_with, reqparse from constants import HIDDEN_VALUE from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from fields.api_based_extension_fields import api_based_extension_fields from libs.login import login_required from models.api_based_extension import APIBasedExtension diff --git a/api/controllers/console/feature.py b/api/controllers/console/feature.py index f0482f749d..70ab4ff865 100644 --- a/api/controllers/console/feature.py +++ b/api/controllers/console/feature.py @@ -5,8 +5,7 @@ from libs.login import login_required from services.feature_service import FeatureService from . import api -from .setup import setup_required -from .wraps import account_initialization_required, cloud_utm_record +from .wraps import account_initialization_required, cloud_utm_record, setup_required class FeatureApi(Resource): diff --git a/api/controllers/console/datasets/file.py b/api/controllers/console/files/__init__.py similarity index 57% rename from api/controllers/console/datasets/file.py rename to api/controllers/console/files/__init__.py index 17d2879875..69ee7eaabd 100644 --- a/api/controllers/console/datasets/file.py +++ b/api/controllers/console/files/__init__.py @@ -1,25 +1,26 @@ -import urllib.parse - from flask import request from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restful import Resource, marshal_with import services from configs import dify_config from constants import DOCUMENT_EXTENSIONS -from controllers.console import api -from controllers.console.datasets.error import ( +from controllers.common.errors import FilenameNotExistsError +from controllers.console.wraps import ( + account_initialization_required, + cloud_edition_billing_resource_check, + setup_required, +) +from fields.file_fields import file_fields, upload_config_fields +from libs.login import login_required +from services.file_service import FileService + +from .errors import ( FileTooLargeError, NoFileUploadedError, TooManyFilesError, UnsupportedFileTypeError, ) -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check -from core.helper import ssrf_proxy -from fields.file_fields import file_fields, remote_file_info_fields, upload_config_fields -from libs.login import login_required -from services.file_service import FileService PREVIEW_WORDS_LIMIT = 3000 @@ -44,21 +45,29 @@ class FileApi(Resource): @marshal_with(file_fields) @cloud_edition_billing_resource_check("documents") def post(self): - # get file from request file = request.files["file"] + source = request.form.get("source") - parser = reqparse.RequestParser() - parser.add_argument("source", type=str, required=False, location="args") - source = parser.parse_args().get("source") - - # check file if "file" not in request.files: raise NoFileUploadedError() if len(request.files) > 1: raise TooManyFilesError() + + if not file.filename: + raise FilenameNotExistsError + + if source not in ("datasets", None): + source = None + try: - upload_file = FileService.upload_file(file=file, user=current_user, source=source) + upload_file = FileService.upload_file( + filename=file.filename, + content=file.read(), + mimetype=file.mimetype, + user=current_user, + source=source, + ) except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) except services.errors.file.UnsupportedFileTypeError: @@ -83,23 +92,3 @@ class FileSupportTypeApi(Resource): @account_initialization_required def get(self): return {"allowed_extensions": DOCUMENT_EXTENSIONS} - - -class RemoteFileInfoApi(Resource): - @marshal_with(remote_file_info_fields) - def get(self, url): - decoded_url = urllib.parse.unquote(url) - try: - response = ssrf_proxy.head(decoded_url) - return { - "file_type": response.headers.get("Content-Type", "application/octet-stream"), - "file_length": int(response.headers.get("Content-Length", 0)), - } - except Exception as e: - return {"error": str(e)}, 400 - - -api.add_resource(FileApi, "/files/upload") -api.add_resource(FilePreviewApi, "/files//preview") -api.add_resource(FileSupportTypeApi, "/files/support-type") -api.add_resource(RemoteFileInfoApi, "/remote-files/") diff --git a/api/controllers/console/files/errors.py b/api/controllers/console/files/errors.py new file mode 100644 index 0000000000..1654ef2cf4 --- /dev/null +++ b/api/controllers/console/files/errors.py @@ -0,0 +1,25 @@ +from libs.exception import BaseHTTPException + + +class FileTooLargeError(BaseHTTPException): + error_code = "file_too_large" + description = "File size exceeded. {message}" + code = 413 + + +class UnsupportedFileTypeError(BaseHTTPException): + error_code = "unsupported_file_type" + description = "File type not allowed." + code = 415 + + +class TooManyFilesError(BaseHTTPException): + error_code = "too_many_files" + description = "Only one file is allowed." + code = 400 + + +class NoFileUploadedError(BaseHTTPException): + error_code = "no_file_uploaded" + description = "Please upload your file." + code = 400 diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py new file mode 100644 index 0000000000..42d6e25416 --- /dev/null +++ b/api/controllers/console/remote_files.py @@ -0,0 +1,71 @@ +import urllib.parse +from typing import cast + +from flask_login import current_user +from flask_restful import Resource, marshal_with, reqparse + +from controllers.common import helpers +from core.file import helpers as file_helpers +from core.helper import ssrf_proxy +from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields +from models.account import Account +from services.file_service import FileService + + +class RemoteFileInfoApi(Resource): + @marshal_with(remote_file_info_fields) + def get(self, url): + decoded_url = urllib.parse.unquote(url) + try: + response = ssrf_proxy.head(decoded_url) + return { + "file_type": response.headers.get("Content-Type", "application/octet-stream"), + "file_length": int(response.headers.get("Content-Length", 0)), + } + except Exception as e: + return {"error": str(e)}, 400 + + +class RemoteFileUploadApi(Resource): + @marshal_with(file_fields_with_signed_url) + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("url", type=str, required=True, help="URL is required") + args = parser.parse_args() + + url = args["url"] + + response = ssrf_proxy.head(url) + response.raise_for_status() + + file_info = helpers.guess_file_info_from_response(response) + + if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size): + return {"error": "File size exceeded"}, 400 + + response = ssrf_proxy.get(url) + response.raise_for_status() + content = response.content + + try: + user = cast(Account, current_user) + upload_file = FileService.upload_file( + filename=file_info.filename, + content=content, + mimetype=file_info.mimetype, + user=user, + source_url=url, + ) + except Exception as e: + return {"error": str(e)}, 400 + + return { + "id": upload_file.id, + "name": upload_file.name, + "size": upload_file.size, + "extension": upload_file.extension, + "url": file_helpers.get_signed_file_url(upload_file_id=upload_file.id), + "mime_type": upload_file.mime_type, + "created_by": upload_file.created_by, + "created_at": upload_file.created_at, + }, 201 diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index d229bb2a19..e1f19a87a3 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -1,5 +1,3 @@ -from functools import wraps - from flask import request from flask_restful import Resource, reqparse @@ -10,7 +8,7 @@ from models.model import DifySetup, db from services.account_service import RegisterService, TenantService from . import api -from .error import AlreadySetupError, NotInitValidateError, NotSetupError +from .error import AlreadySetupError, NotInitValidateError from .init_validate import get_init_validate_status from .wraps import only_edition_self_hosted @@ -52,21 +50,6 @@ class SetupApi(Resource): return {"result": "success"}, 201 -def setup_required(view): - @wraps(view) - def decorated(*args, **kwargs): - # check setup - if not get_init_validate_status(): - raise NotInitValidateError() - - elif not get_setup_status(): - raise NotSetupError() - - return view(*args, **kwargs) - - return decorated - - def get_setup_status(): if dify_config.EDITION == "SELF_HOSTED": return db.session.query(DifySetup).first() diff --git a/api/controllers/console/tag/tags.py b/api/controllers/console/tag/tags.py index de30547e93..ccd3293a62 100644 --- a/api/controllers/console/tag/tags.py +++ b/api/controllers/console/tag/tags.py @@ -4,8 +4,7 @@ from flask_restful import Resource, marshal_with, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from fields.tag_fields import tag_fields from libs.login import login_required from models.model import Tag diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index deda1a0d02..7dea8e554e 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -3,6 +3,7 @@ import logging import requests from flask_restful import Resource, reqparse +from packaging import version from configs import dify_config @@ -47,43 +48,15 @@ class VersionApi(Resource): def _has_new_version(*, latest_version: str, current_version: str) -> bool: - def parse_version(version: str) -> tuple: - # Split version into parts and pre-release suffix if any - parts = version.split("-") - version_parts = parts[0].split(".") - pre_release = parts[1] if len(parts) > 1 else None + try: + latest = version.parse(latest_version) + current = version.parse(current_version) - # Validate version format - if len(version_parts) != 3: - raise ValueError(f"Invalid version format: {version}") - - try: - # Convert version parts to integers - major, minor, patch = map(int, version_parts) - return (major, minor, patch, pre_release) - except ValueError: - raise ValueError(f"Invalid version format: {version}") - - latest = parse_version(latest_version) - current = parse_version(current_version) - - # Compare major, minor, and patch versions - for latest_part, current_part in zip(latest[:3], current[:3]): - if latest_part > current_part: - return True - elif latest_part < current_part: - return False - - # If versions are equal, check pre-release suffixes - if latest[3] is None and current[3] is not None: - return True - elif latest[3] is not None and current[3] is None: + # Compare versions + return latest > current + except version.InvalidVersion: + logging.warning(f"Invalid version format: latest={latest_version}, current={current_version}") return False - elif latest[3] is not None and current[3] is not None: - # Simple string comparison for pre-release versions - return latest[3] > current[3] - - return False api.add_resource(VersionApi, "/version") diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 97f5625726..aabc417759 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -8,14 +8,13 @@ from flask_restful import Resource, fields, marshal_with, reqparse from configs import dify_config from constants.languages import supported_language from controllers.console import api -from controllers.console.setup import setup_required from controllers.console.workspace.error import ( AccountAlreadyInitedError, CurrentPasswordIncorrectError, InvalidInvitationCodeError, RepeatPasswordNotMatchError, ) -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db from fields.member_fields import account_fields from libs.helper import TimestampField, timezone diff --git a/api/controllers/console/workspace/load_balancing_config.py b/api/controllers/console/workspace/load_balancing_config.py index 9d2697f11d..114905cf1d 100644 --- a/api/controllers/console/workspace/load_balancing_config.py +++ b/api/controllers/console/workspace/load_balancing_config.py @@ -2,8 +2,7 @@ from flask_restful import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from libs.login import current_user, login_required diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index 3e87bebf59..8f694c65e0 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -4,8 +4,11 @@ from flask_restful import Resource, abort, marshal_with, reqparse import services from configs import dify_config from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check +from controllers.console.wraps import ( + account_initialization_required, + cloud_edition_billing_resource_check, + setup_required, +) from extensions.ext_database import db from fields.member_fields import account_with_role_list_fields from libs.login import login_required diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index b9f13e3ce4..b9612c0f9d 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -6,8 +6,7 @@ from flask_restful import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index 7bbedc8828..daa3455e2f 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -5,8 +5,7 @@ from flask_restful import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 3959e59307..910b991de1 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -7,8 +7,7 @@ from werkzeug.exceptions import Forbidden from configs import dify_config from controllers.console import api -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required +from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from libs.helper import alphanumeric, uuid_value from libs.login import login_required diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index 96f866fca2..76d76f6b58 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -6,6 +6,7 @@ from flask_restful import Resource, fields, inputs, marshal, marshal_with, reqpa from werkzeug.exceptions import Unauthorized import services +from controllers.common.errors import FilenameNotExistsError from controllers.console import api from controllers.console.admin import admin_required from controllers.console.datasets.error import ( @@ -15,8 +16,11 @@ from controllers.console.datasets.error import ( UnsupportedFileTypeError, ) from controllers.console.error import AccountNotLinkTenantError -from controllers.console.setup import setup_required -from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check +from controllers.console.wraps import ( + account_initialization_required, + cloud_edition_billing_resource_check, + setup_required, +) from extensions.ext_database import db from libs.helper import TimestampField from libs.login import login_required @@ -193,12 +197,20 @@ class WebappLogoWorkspaceApi(Resource): if len(request.files) > 1: raise TooManyFilesError() + if not file.filename: + raise FilenameNotExistsError + extension = file.filename.split(".")[-1] if extension.lower() not in {"svg", "png"}: raise UnsupportedFileTypeError() try: - upload_file = FileService.upload_file(file=file, user=current_user) + upload_file = FileService.upload_file( + filename=file.filename, + content=file.read(), + mimetype=file.mimetype, + user=current_user, + ) except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index 46223d104f..9f294cb93c 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -1,4 +1,5 @@ import json +import os from functools import wraps from flask import abort, request @@ -6,9 +7,12 @@ from flask_login import current_user from configs import dify_config from controllers.console.workspace.error import AccountNotInitializedError +from models.model import DifySetup from services.feature_service import FeatureService from services.operation_service import OperationService +from .error import NotInitValidateError, NotSetupError + def account_initialization_required(view): @wraps(view) @@ -124,3 +128,17 @@ def cloud_utm_record(view): return view(*args, **kwargs) return decorated + + +def setup_required(view): + @wraps(view) + def decorated(*args, **kwargs): + # check setup + if dify_config.EDITION == "SELF_HOSTED" and os.environ.get("INIT_PASSWORD") and not DifySetup.query.first(): + raise NotInitValidateError() + elif dify_config.EDITION == "SELF_HOSTED" and not DifySetup.query.first(): + raise NotSetupError() + + return view(*args, **kwargs) + + return decorated diff --git a/api/controllers/files/image_preview.py b/api/controllers/files/image_preview.py index 4b2d61e7c3..6b3ac93cdf 100644 --- a/api/controllers/files/image_preview.py +++ b/api/controllers/files/image_preview.py @@ -1,5 +1,5 @@ from flask import Response, request -from flask_restful import Resource +from flask_restful import Resource, reqparse from werkzeug.exceptions import NotFound import services @@ -41,24 +41,39 @@ class FilePreviewApi(Resource): def get(self, file_id): file_id = str(file_id) - timestamp = request.args.get("timestamp") - nonce = request.args.get("nonce") - sign = request.args.get("sign") + parser = reqparse.RequestParser() + parser.add_argument("timestamp", type=str, required=True, location="args") + parser.add_argument("nonce", type=str, required=True, location="args") + parser.add_argument("sign", type=str, required=True, location="args") + parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args") - if not timestamp or not nonce or not sign: + args = parser.parse_args() + + if not args["timestamp"] or not args["nonce"] or not args["sign"]: return {"content": "Invalid request."}, 400 try: - generator, mimetype = FileService.get_signed_file_preview( + generator, upload_file = FileService.get_file_generator_by_file_id( file_id=file_id, - timestamp=timestamp, - nonce=nonce, - sign=sign, + timestamp=args["timestamp"], + nonce=args["nonce"], + sign=args["sign"], ) except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() - return Response(generator, mimetype=mimetype) + response = Response( + generator, + mimetype=upload_file.mime_type, + direct_passthrough=True, + headers={}, + ) + if upload_file.size > 0: + response.headers["Content-Length"] = str(upload_file.size) + if args["as_attachment"]: + response.headers["Content-Disposition"] = f"attachment; filename={upload_file.name}" + + return response class WorkspaceWebappLogoApi(Resource): diff --git a/api/controllers/inner_api/workspace/workspace.py b/api/controllers/inner_api/workspace/workspace.py index 302eb684e8..64cb5e54ff 100644 --- a/api/controllers/inner_api/workspace/workspace.py +++ b/api/controllers/inner_api/workspace/workspace.py @@ -1,6 +1,6 @@ from flask_restful import Resource, reqparse -from controllers.console.setup import setup_required +from controllers.console.wraps import setup_required from controllers.inner_api import api from controllers.inner_api.wraps import enterprise_inner_api_only from events.tenant_event import tenant_was_created @@ -21,7 +21,7 @@ class EnterpriseWorkspace(Resource): if account is None: return {"message": "owner account not found."}, 404 - tenant = TenantService.create_tenant(args["name"]) + tenant = TenantService.create_tenant(args["name"], is_from_dashboard=True) TenantService.create_tenant_member(tenant, account, role="owner") tenant_was_created.send(tenant) diff --git a/api/controllers/service_api/app/app.py b/api/controllers/service_api/app/app.py index f7c091217b..9a4cdc26cd 100644 --- a/api/controllers/service_api/app/app.py +++ b/api/controllers/service_api/app/app.py @@ -21,7 +21,12 @@ class AppParameterApi(Resource): "options": fields.List(fields.String), } - system_parameters_fields = {"image_file_size_limit": fields.String} + system_parameters_fields = { + "image_file_size_limit": fields.Integer, + "video_file_size_limit": fields.Integer, + "audio_file_size_limit": fields.Integer, + "file_size_limit": fields.Integer, + } parameters_fields = { "opening_statement": fields.String, @@ -81,7 +86,12 @@ class AppParameterApi(Resource): } }, ), - "system_parameters": {"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT}, + "system_parameters": { + "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, + "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, + }, } diff --git a/api/controllers/service_api/app/file.py b/api/controllers/service_api/app/file.py index e0a772eb31..b0126058de 100644 --- a/api/controllers/service_api/app/file.py +++ b/api/controllers/service_api/app/file.py @@ -2,6 +2,7 @@ from flask import request from flask_restful import Resource, marshal_with import services +from controllers.common.errors import FilenameNotExistsError from controllers.service_api import api from controllers.service_api.app.error import ( FileTooLargeError, @@ -31,8 +32,17 @@ class FileApi(Resource): if len(request.files) > 1: raise TooManyFilesError() + if not file.filename: + raise FilenameNotExistsError + try: - upload_file = FileService.upload_file(file, end_user) + upload_file = FileService.upload_file( + filename=file.filename, + content=file.read(), + mimetype=file.mimetype, + user=end_user, + source="datasets", + ) except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) except services.errors.file.UnsupportedFileTypeError: diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index f076cff6c8..799fccc228 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -66,6 +66,13 @@ class DatasetListApi(DatasetApiResource): help="type is required. Name must be between 1 to 40 characters.", type=_validate_name, ) + parser.add_argument( + "description", + type=str, + nullable=True, + required=False, + default="", + ) parser.add_argument( "indexing_technique", type=str, @@ -108,6 +115,7 @@ class DatasetListApi(DatasetApiResource): dataset = DatasetService.create_empty_dataset( tenant_id=tenant_id, name=args["name"], + description=args["description"], indexing_technique=args["indexing_technique"], account=current_user, permission=args["permission"], diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index fb48a6c76c..5c3fc7b241 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -6,6 +6,7 @@ from sqlalchemy import desc from werkzeug.exceptions import NotFound import services.dataset_service +from controllers.common.errors import FilenameNotExistsError from controllers.service_api import api from controllers.service_api.app.error import ProviderNotInitializeError from controllers.service_api.dataset.error import ( @@ -55,7 +56,12 @@ class DocumentAddByTextApi(DatasetApiResource): if not dataset.indexing_technique and not args["indexing_technique"]: raise ValueError("indexing_technique is required.") - upload_file = FileService.upload_text(args.get("text"), args.get("name")) + text = args.get("text") + name = args.get("name") + if text is None or name is None: + raise ValueError("Both 'text' and 'name' must be non-null values.") + + upload_file = FileService.upload_text(text=str(text), text_name=str(name)) data_source = { "type": "upload_file", "info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}}, @@ -104,7 +110,11 @@ class DocumentUpdateByTextApi(DatasetApiResource): raise ValueError("Dataset is not exist.") if args["text"]: - upload_file = FileService.upload_text(args.get("text"), args.get("name")) + text = args.get("text") + name = args.get("name") + if text is None or name is None: + raise ValueError("Both text and name must be strings.") + upload_file = FileService.upload_text(text=str(text), text_name=str(name)) data_source = { "type": "upload_file", "info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}}, @@ -163,7 +173,16 @@ class DocumentAddByFileApi(DatasetApiResource): if len(request.files) > 1: raise TooManyFilesError() - upload_file = FileService.upload_file(file, current_user) + if not file.filename: + raise FilenameNotExistsError + + upload_file = FileService.upload_file( + filename=file.filename, + content=file.read(), + mimetype=file.mimetype, + user=current_user, + source="datasets", + ) data_source = {"type": "upload_file", "info_list": {"file_info_list": {"file_ids": [upload_file.id]}}} args["data_source"] = data_source # validate args @@ -212,7 +231,16 @@ class DocumentUpdateByFileApi(DatasetApiResource): if len(request.files) > 1: raise TooManyFilesError() - upload_file = FileService.upload_file(file, current_user) + if not file.filename: + raise FilenameNotExistsError + + upload_file = FileService.upload_file( + filename=file.filename, + content=file.read(), + mimetype=file.mimetype, + user=current_user, + source="datasets", + ) data_source = {"type": "upload_file", "info_list": {"file_info_list": {"file_ids": [upload_file.id]}}} args["data_source"] = data_source # validate args @@ -230,7 +258,7 @@ class DocumentUpdateByFileApi(DatasetApiResource): except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) document = documents[0] - documents_and_batch_fields = {"document": marshal(document, document_fields), "batch": batch} + documents_and_batch_fields = {"document": marshal(document, document_fields), "batch": document.batch} return documents_and_batch_fields, 200 @@ -331,10 +359,26 @@ class DocumentIndexingStatusApi(DatasetApiResource): return data -api.add_resource(DocumentAddByTextApi, "/datasets//document/create_by_text") -api.add_resource(DocumentAddByFileApi, "/datasets//document/create_by_file") -api.add_resource(DocumentUpdateByTextApi, "/datasets//documents//update_by_text") -api.add_resource(DocumentUpdateByFileApi, "/datasets//documents//update_by_file") +api.add_resource( + DocumentAddByTextApi, + "/datasets//document/create_by_text", + "/datasets//document/create-by-text", +) +api.add_resource( + DocumentAddByFileApi, + "/datasets//document/create_by_file", + "/datasets//document/create-by-file", +) +api.add_resource( + DocumentUpdateByTextApi, + "/datasets//documents//update_by_text", + "/datasets//documents//update-by-text", +) +api.add_resource( + DocumentUpdateByFileApi, + "/datasets//documents//update_by_file", + "/datasets//documents//update-by-file", +) api.add_resource(DocumentDeleteApi, "/datasets//documents/") api.add_resource(DocumentListApi, "/datasets//documents") api.add_resource(DocumentIndexingStatusApi, "/datasets//documents//indexing-status") diff --git a/api/controllers/service_api/dataset/hit_testing.py b/api/controllers/service_api/dataset/hit_testing.py index 9c9a4302c9..465f71bf03 100644 --- a/api/controllers/service_api/dataset/hit_testing.py +++ b/api/controllers/service_api/dataset/hit_testing.py @@ -14,4 +14,4 @@ class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase): return self.perform_hit_testing(dataset, args) -api.add_resource(HitTestingApi, "/datasets//hit-testing") +api.add_resource(HitTestingApi, "/datasets//hit-testing", "/datasets//retrieve") diff --git a/api/controllers/web/__init__.py b/api/controllers/web/__init__.py index 630b9468a7..50a04a6254 100644 --- a/api/controllers/web/__init__.py +++ b/api/controllers/web/__init__.py @@ -2,8 +2,17 @@ from flask import Blueprint from libs.external_api import ExternalApi +from .files import FileApi +from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi + bp = Blueprint("web", __name__, url_prefix="/api") api = ExternalApi(bp) +# Files +api.add_resource(FileApi, "/files/upload") -from . import app, audio, completion, conversation, feature, file, message, passport, saved_message, site, workflow +# Remote files +api.add_resource(RemoteFileInfoApi, "/remote-files/") +api.add_resource(RemoteFileUploadApi, "/remote-files/upload") + +from . import app, audio, completion, conversation, feature, message, passport, saved_message, site, workflow diff --git a/api/controllers/web/app.py b/api/controllers/web/app.py index 20b4e4674c..974d2cff94 100644 --- a/api/controllers/web/app.py +++ b/api/controllers/web/app.py @@ -21,7 +21,12 @@ class AppParameterApi(WebApiResource): "options": fields.List(fields.String), } - system_parameters_fields = {"image_file_size_limit": fields.String} + system_parameters_fields = { + "image_file_size_limit": fields.Integer, + "video_file_size_limit": fields.Integer, + "audio_file_size_limit": fields.Integer, + "file_size_limit": fields.Integer, + } parameters_fields = { "opening_statement": fields.String, @@ -80,7 +85,12 @@ class AppParameterApi(WebApiResource): } }, ), - "system_parameters": {"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT}, + "system_parameters": { + "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, + "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, + }, } diff --git a/api/controllers/web/file.py b/api/controllers/web/file.py deleted file mode 100644 index 6eeaa0e3f0..0000000000 --- a/api/controllers/web/file.py +++ /dev/null @@ -1,56 +0,0 @@ -import urllib.parse - -from flask import request -from flask_restful import marshal_with, reqparse - -import services -from controllers.web import api -from controllers.web.error import FileTooLargeError, NoFileUploadedError, TooManyFilesError, UnsupportedFileTypeError -from controllers.web.wraps import WebApiResource -from core.helper import ssrf_proxy -from fields.file_fields import file_fields, remote_file_info_fields -from services.file_service import FileService - - -class FileApi(WebApiResource): - @marshal_with(file_fields) - def post(self, app_model, end_user): - # get file from request - file = request.files["file"] - - parser = reqparse.RequestParser() - parser.add_argument("source", type=str, required=False, location="args") - source = parser.parse_args().get("source") - - # check file - if "file" not in request.files: - raise NoFileUploadedError() - - if len(request.files) > 1: - raise TooManyFilesError() - try: - upload_file = FileService.upload_file(file=file, user=end_user, source=source) - except services.errors.file.FileTooLargeError as file_too_large_error: - raise FileTooLargeError(file_too_large_error.description) - except services.errors.file.UnsupportedFileTypeError: - raise UnsupportedFileTypeError() - - return upload_file, 201 - - -class RemoteFileInfoApi(WebApiResource): - @marshal_with(remote_file_info_fields) - def get(self, url): - decoded_url = urllib.parse.unquote(url) - try: - response = ssrf_proxy.head(decoded_url) - return { - "file_type": response.headers.get("Content-Type", "application/octet-stream"), - "file_length": int(response.headers.get("Content-Length", -1)), - } - except Exception as e: - return {"error": str(e)}, 400 - - -api.add_resource(FileApi, "/files/upload") -api.add_resource(RemoteFileInfoApi, "/remote-files/") diff --git a/api/controllers/web/files.py b/api/controllers/web/files.py new file mode 100644 index 0000000000..a282fc63a8 --- /dev/null +++ b/api/controllers/web/files.py @@ -0,0 +1,43 @@ +from flask import request +from flask_restful import marshal_with + +import services +from controllers.common.errors import FilenameNotExistsError +from controllers.web.error import FileTooLargeError, NoFileUploadedError, TooManyFilesError, UnsupportedFileTypeError +from controllers.web.wraps import WebApiResource +from fields.file_fields import file_fields +from services.file_service import FileService + + +class FileApi(WebApiResource): + @marshal_with(file_fields) + def post(self, app_model, end_user): + file = request.files["file"] + source = request.form.get("source") + + if "file" not in request.files: + raise NoFileUploadedError() + + if len(request.files) > 1: + raise TooManyFilesError() + + if not file.filename: + raise FilenameNotExistsError + + if source not in ("datasets", None): + source = None + + try: + upload_file = FileService.upload_file( + filename=file.filename, + content=file.read(), + mimetype=file.mimetype, + user=end_user, + source=source, + ) + except services.errors.file.FileTooLargeError as file_too_large_error: + raise FileTooLargeError(file_too_large_error.description) + except services.errors.file.UnsupportedFileTypeError: + raise UnsupportedFileTypeError() + + return upload_file, 201 diff --git a/api/controllers/web/remote_files.py b/api/controllers/web/remote_files.py new file mode 100644 index 0000000000..cb529340af --- /dev/null +++ b/api/controllers/web/remote_files.py @@ -0,0 +1,69 @@ +import urllib.parse + +from flask_login import current_user +from flask_restful import marshal_with, reqparse + +from controllers.common import helpers +from controllers.web.wraps import WebApiResource +from core.file import helpers as file_helpers +from core.helper import ssrf_proxy +from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields +from services.file_service import FileService + + +class RemoteFileInfoApi(WebApiResource): + @marshal_with(remote_file_info_fields) + def get(self, url): + decoded_url = urllib.parse.unquote(url) + try: + response = ssrf_proxy.head(decoded_url) + return { + "file_type": response.headers.get("Content-Type", "application/octet-stream"), + "file_length": int(response.headers.get("Content-Length", -1)), + } + except Exception as e: + return {"error": str(e)}, 400 + + +class RemoteFileUploadApi(WebApiResource): + @marshal_with(file_fields_with_signed_url) + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("url", type=str, required=True, help="URL is required") + args = parser.parse_args() + + url = args["url"] + + response = ssrf_proxy.head(url) + response.raise_for_status() + + file_info = helpers.guess_file_info_from_response(response) + + if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size): + return {"error": "File size exceeded"}, 400 + + response = ssrf_proxy.get(url) + response.raise_for_status() + content = response.content + + try: + upload_file = FileService.upload_file( + filename=file_info.filename, + content=content, + mimetype=file_info.mimetype, + user=current_user, + source_url=url, + ) + except Exception as e: + return {"error": str(e)}, 400 + + return { + "id": upload_file.id, + "name": upload_file.name, + "size": upload_file.size, + "extension": upload_file.extension, + "url": file_helpers.get_signed_file_url(upload_file_id=upload_file.id), + "mime_type": upload_file.mime_type, + "created_by": upload_file.created_by, + "created_at": upload_file.created_at, + }, 201 diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index b271048839..5250961597 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -156,6 +156,12 @@ class BaseAgentRunner(AppRunner): continue parameter_type = parameter.type.as_normal_type() + if parameter.type in { + ToolParameter.ToolParameterType.SYSTEM_FILES, + ToolParameter.ToolParameterType.FILE, + ToolParameter.ToolParameterType.FILES, + }: + continue enum = [] if parameter.type == ToolParameter.ToolParameterType.SELECT: enum = [option.value for option in parameter.options] @@ -243,6 +249,12 @@ class BaseAgentRunner(AppRunner): continue parameter_type = parameter.type.as_normal_type() + if parameter.type in { + ToolParameter.ToolParameterType.SYSTEM_FILES, + ToolParameter.ToolParameterType.FILE, + ToolParameter.ToolParameterType.FILES, + }: + continue enum = [] if parameter.type == ToolParameter.ToolParameterType.SELECT: enum = [option.value for option in parameter.options] diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index 2b5597e055..bae64368e3 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -27,6 +27,7 @@ from core.app.task_pipeline.easy_ui_based_generate_task_pipeline import EasyUIBa from core.prompt.utils.prompt_template_parser import PromptTemplateParser from extensions.ext_database import db from models import Account +from models.enums import CreatedByRole from models.model import App, AppMode, AppModelConfig, Conversation, EndUser, Message, MessageFile from services.errors.app_model_config import AppModelConfigBrokenError from services.errors.conversation import ConversationCompletedError, ConversationNotExistsError @@ -240,7 +241,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): belongs_to="user", url=file.remote_url, upload_file_id=file.related_id, - created_by_role=("account" if account_id else "end_user"), + created_by_role=(CreatedByRole.ACCOUNT if account_id else CreatedByRole.END_USER), created_by=account_id or end_user_id or "", ) db.session.add(message_file) diff --git a/api/core/file/file_manager.py b/api/core/file/file_manager.py index 0c6ce8ce75..b69d7a74c0 100644 --- a/api/core/file/file_manager.py +++ b/api/core/file/file_manager.py @@ -76,8 +76,16 @@ def to_prompt_message_content(f: File, /): def download(f: File, /): - upload_file = file_repository.get_upload_file(session=db.session(), file=f) - return _download_file_content(upload_file.key) + if f.transfer_method == FileTransferMethod.TOOL_FILE: + tool_file = file_repository.get_tool_file(session=db.session(), file=f) + return _download_file_content(tool_file.file_key) + elif f.transfer_method == FileTransferMethod.LOCAL_FILE: + upload_file = file_repository.get_upload_file(session=db.session(), file=f) + return _download_file_content(upload_file.key) + # remote file + response = ssrf_proxy.get(f.remote_url, follow_redirects=True) + response.raise_for_status() + return response.content def _download_file_content(path: str, /): diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index 8df26172b7..fb9fe8f210 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -17,6 +17,7 @@ from core.errors.error import ProviderTokenNotInitError from core.llm_generator.llm_generator import LLMGenerator from core.model_manager import ModelInstance, ModelManager from core.model_runtime.entities.model_entities import ModelType +from core.rag.cleaner.clean_processor import CleanProcessor from core.rag.datasource.keyword.keyword_factory import Keyword from core.rag.docstore.dataset_docstore import DatasetDocumentStore from core.rag.extractor.entity.extract_setting import ExtractSetting @@ -597,26 +598,9 @@ class IndexingRunner: rules = DatasetProcessRule.AUTOMATIC_RULES else: rules = json.loads(processing_rule.rules) if processing_rule.rules else {} + document_text = CleanProcessor.clean(text, rules) - if "pre_processing_rules" in rules: - pre_processing_rules = rules["pre_processing_rules"] - for pre_processing_rule in pre_processing_rules: - if pre_processing_rule["id"] == "remove_extra_spaces" and pre_processing_rule["enabled"] is True: - # Remove extra spaces - pattern = r"\n{3,}" - text = re.sub(pattern, "\n\n", text) - pattern = r"[\t\f\r\x20\u00a0\u1680\u180e\u2000-\u200a\u202f\u205f\u3000]{2,}" - text = re.sub(pattern, " ", text) - elif pre_processing_rule["id"] == "remove_urls_emails" and pre_processing_rule["enabled"] is True: - # Remove email - pattern = r"([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)" - text = re.sub(pattern, "", text) - - # Remove URL - pattern = r"https?://[^\s]+" - text = re.sub(pattern, "", text) - - return text + return document_text @staticmethod def format_split_text(text): diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index 39bd6fee69..9cf9ed75c0 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -8,6 +8,8 @@ from core.llm_generator.output_parser.suggested_questions_after_answer import Su from core.llm_generator.prompts import ( CONVERSATION_TITLE_PROMPT, GENERATOR_QA_PROMPT, + JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE, + PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE, WORKFLOW_RULE_CONFIG_PROMPT_GENERATE_TEMPLATE, ) from core.model_manager import ModelManager @@ -239,6 +241,54 @@ class LLMGenerator: return rule_config + @classmethod + def generate_code( + cls, + tenant_id: str, + instruction: str, + model_config: dict, + code_language: str = "javascript", + max_tokens: int = 1000, + ) -> dict: + if code_language == "python": + prompt_template = PromptTemplateParser(PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE) + else: + prompt_template = PromptTemplateParser(JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE) + + prompt = prompt_template.format( + inputs={ + "INSTRUCTION": instruction, + "CODE_LANGUAGE": code_language, + }, + remove_template_variables=False, + ) + + model_manager = ModelManager() + model_instance = model_manager.get_model_instance( + tenant_id=tenant_id, + model_type=ModelType.LLM, + provider=model_config.get("provider") if model_config else None, + model=model_config.get("name") if model_config else None, + ) + + prompt_messages = [UserPromptMessage(content=prompt)] + model_parameters = {"max_tokens": max_tokens, "temperature": 0.01} + + try: + response = model_instance.invoke_llm( + prompt_messages=prompt_messages, model_parameters=model_parameters, stream=False + ) + + generated_code = response.message.content + return {"code": generated_code, "language": code_language, "error": ""} + + except InvokeError as e: + error = str(e) + return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"} + except Exception as e: + logging.exception(e) + return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"} + @classmethod def generate_qa_document(cls, tenant_id: str, query, document_language: str): prompt = GENERATOR_QA_PROMPT.format(language=document_language) diff --git a/api/core/llm_generator/prompts.py b/api/core/llm_generator/prompts.py index e5b6784516..7c0f247052 100644 --- a/api/core/llm_generator/prompts.py +++ b/api/core/llm_generator/prompts.py @@ -61,6 +61,73 @@ User Input: yo, 你今天咋样? User Input: """ # noqa: E501 +PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE = ( + "You are an expert programmer. Generate code based on the following instructions:\n\n" + "Instructions: {{INSTRUCTION}}\n\n" + "Write the code in {{CODE_LANGUAGE}}.\n\n" + "Please ensure that you meet the following requirements:\n" + "1. Define a function named 'main'.\n" + "2. The 'main' function must return a dictionary (dict).\n" + "3. You may modify the arguments of the 'main' function, but include appropriate type hints.\n" + "4. The returned dictionary should contain at least one key-value pair.\n\n" + "5. You may ONLY use the following libraries in your code: \n" + "- json\n" + "- datetime\n" + "- math\n" + "- random\n" + "- re\n" + "- string\n" + "- sys\n" + "- time\n" + "- traceback\n" + "- uuid\n" + "- os\n" + "- base64\n" + "- hashlib\n" + "- hmac\n" + "- binascii\n" + "- collections\n" + "- functools\n" + "- operator\n" + "- itertools\n\n" + "Example:\n" + "def main(arg1: str, arg2: int) -> dict:\n" + " return {\n" + ' "result": arg1 * arg2,\n' + " }\n\n" + "IMPORTANT:\n" + "- Provide ONLY the code without any additional explanations, comments, or markdown formatting.\n" + "- DO NOT use markdown code blocks (``` or ``` python). Return the raw code directly.\n" + "- The code should start immediately after this instruction, without any preceding newlines or spaces.\n" + "- The code should be complete, functional, and follow best practices for {{CODE_LANGUAGE}}.\n\n" + "- Always use the format return {'result': ...} for the output.\n\n" + "Generated Code:\n" +) +JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE = ( + "You are an expert programmer. Generate code based on the following instructions:\n\n" + "Instructions: {{INSTRUCTION}}\n\n" + "Write the code in {{CODE_LANGUAGE}}.\n\n" + "Please ensure that you meet the following requirements:\n" + "1. Define a function named 'main'.\n" + "2. The 'main' function must return an object.\n" + "3. You may modify the arguments of the 'main' function, but include appropriate JSDoc annotations.\n" + "4. The returned object should contain at least one key-value pair.\n\n" + "5. The returned object should always be in the format: {result: ...}\n\n" + "Example:\n" + "function main(arg1, arg2) {\n" + " return {\n" + " result: arg1 * arg2\n" + " };\n" + "}\n\n" + "IMPORTANT:\n" + "- Provide ONLY the code without any additional explanations, comments, or markdown formatting.\n" + "- DO NOT use markdown code blocks (``` or ``` javascript). Return the raw code directly.\n" + "- The code should start immediately after this instruction, without any preceding newlines or spaces.\n" + "- The code should be complete, functional, and follow best practices for {{CODE_LANGUAGE}}.\n\n" + "Generated Code:\n" +) + + SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = ( "Please help me predict the three most likely questions that human would ask, " "and keeping each question under 20 characters.\n" diff --git a/api/core/model_runtime/entities/llm_entities.py b/api/core/model_runtime/entities/llm_entities.py index 52b590f66a..88531d8ae0 100644 --- a/api/core/model_runtime/entities/llm_entities.py +++ b/api/core/model_runtime/entities/llm_entities.py @@ -105,6 +105,7 @@ class LLMResult(BaseModel): Model class for llm result. """ + id: Optional[str] = None model: str prompt_messages: list[PromptMessage] message: AssistantPromptMessage diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v2.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v2.yaml new file mode 100644 index 0000000000..b1e5698375 --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v2.yaml @@ -0,0 +1,60 @@ +model: anthropic.claude-3-5-sonnet-20241022-v2:0 +label: + en_US: Claude 3.5 Sonnet V2 +model_type: llm +features: + - agent-thought + - vision + - tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 200000 +# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html +parameter_rules: + - name: max_tokens + use_template: max_tokens + required: true + type: int + default: 4096 + min: 1 + max: 4096 + help: + zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 + en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. + - name: temperature + use_template: temperature + required: false + type: float + default: 1 + min: 0.0 + max: 1.0 + help: + zh_Hans: 生成内容的随机性。 + en_US: The amount of randomness injected into the response. + - name: top_p + required: false + type: float + default: 0.999 + min: 0.000 + max: 1.000 + help: + zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 + en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. + - name: top_k + required: false + type: int + default: 0 + min: 0 + # tip docs from aws has error, max value is 500 + max: 500 + help: + zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 + en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. + - name: response_format + use_template: response_format +pricing: + input: '0.003' + output: '0.015' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v2.yaml b/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v2.yaml new file mode 100644 index 0000000000..8d831e6fcb --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v2.yaml @@ -0,0 +1,60 @@ +model: eu.anthropic.claude-3-5-sonnet-20241022-v2:0 +label: + en_US: Claude 3.5 Sonnet V2(EU.Cross Region Inference) +model_type: llm +features: + - agent-thought + - vision + - tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 200000 +# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html +parameter_rules: + - name: max_tokens + use_template: max_tokens + required: true + type: int + default: 4096 + min: 1 + max: 4096 + help: + zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 + en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. + - name: temperature + use_template: temperature + required: false + type: float + default: 1 + min: 0.0 + max: 1.0 + help: + zh_Hans: 生成内容的随机性。 + en_US: The amount of randomness injected into the response. + - name: top_p + required: false + type: float + default: 0.999 + min: 0.000 + max: 1.000 + help: + zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 + en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. + - name: top_k + required: false + type: int + default: 0 + min: 0 + # tip docs from aws has error, max value is 500 + max: 500 + help: + zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 + en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. + - name: response_format + use_template: response_format +pricing: + input: '0.003' + output: '0.015' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v2.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v2.yaml new file mode 100644 index 0000000000..31a403289b --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v2.yaml @@ -0,0 +1,60 @@ +model: us.anthropic.claude-3-5-sonnet-20241022-v2:0 +label: + en_US: Claude 3.5 Sonnet V2(US.Cross Region Inference) +model_type: llm +features: + - agent-thought + - vision + - tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 200000 +# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html +parameter_rules: + - name: max_tokens + use_template: max_tokens + required: true + type: int + default: 4096 + min: 1 + max: 4096 + help: + zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 + en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. + - name: temperature + use_template: temperature + required: false + type: float + default: 1 + min: 0.0 + max: 1.0 + help: + zh_Hans: 生成内容的随机性。 + en_US: The amount of randomness injected into the response. + - name: top_p + required: false + type: float + default: 0.999 + min: 0.000 + max: 1.000 + help: + zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 + en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. + - name: top_k + required: false + type: int + default: 0 + min: 0 + # tip docs from aws has error, max value is 500 + max: 500 + help: + zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 + en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. + - name: response_format + use_template: response_format +pricing: + input: '0.003' + output: '0.015' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo-full.svg b/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo-full.svg new file mode 100644 index 0000000000..f9738b585b --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo-full.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo.svg b/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo.svg new file mode 100644 index 0000000000..1f51187f19 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo.svg @@ -0,0 +1,3 @@ + + + diff --git a/api/core/model_runtime/model_providers/gitee_ai/_common.py b/api/core/model_runtime/model_providers/gitee_ai/_common.py new file mode 100644 index 0000000000..0750f3b75d --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/_common.py @@ -0,0 +1,47 @@ +from dashscope.common.error import ( + AuthenticationError, + InvalidParameter, + RequestFailure, + ServiceUnavailableError, + UnsupportedHTTPMethod, + UnsupportedModel, +) + +from core.model_runtime.errors.invoke import ( + InvokeAuthorizationError, + InvokeBadRequestError, + InvokeConnectionError, + InvokeError, + InvokeRateLimitError, + InvokeServerUnavailableError, +) + + +class _CommonGiteeAI: + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + """ + Map model invoke error to unified error + The key is the error type thrown to the caller + The value is the error type thrown by the model, + which needs to be converted into a unified error type for the caller. + + :return: Invoke error mapping + """ + return { + InvokeConnectionError: [ + RequestFailure, + ], + InvokeServerUnavailableError: [ + ServiceUnavailableError, + ], + InvokeRateLimitError: [], + InvokeAuthorizationError: [ + AuthenticationError, + ], + InvokeBadRequestError: [ + InvalidParameter, + UnsupportedModel, + UnsupportedHTTPMethod, + ], + } diff --git a/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.py b/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.py new file mode 100644 index 0000000000..ca67594ce4 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.py @@ -0,0 +1,25 @@ +import logging + +from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.model_provider import ModelProvider + +logger = logging.getLogger(__name__) + + +class GiteeAIProvider(ModelProvider): + def validate_provider_credentials(self, credentials: dict) -> None: + """ + Validate provider credentials + if validate failed, raise exception + + :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. + """ + try: + model_instance = self.get_model_instance(ModelType.LLM) + model_instance.validate_credentials(model="Qwen2-7B-Instruct", credentials=credentials) + except CredentialsValidateFailedError as ex: + raise ex + except Exception as ex: + logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") + raise ex diff --git a/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.yaml b/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.yaml new file mode 100644 index 0000000000..7f7d0f2e53 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.yaml @@ -0,0 +1,35 @@ +provider: gitee_ai +label: + en_US: Gitee AI + zh_Hans: Gitee AI +description: + en_US: 快速体验大模型,领先探索 AI 开源世界 + zh_Hans: 快速体验大模型,领先探索 AI 开源世界 +icon_small: + en_US: Gitee-AI-Logo.svg +icon_large: + en_US: Gitee-AI-Logo-full.svg +help: + title: + en_US: Get your token from Gitee AI + zh_Hans: 从 Gitee AI 获取 token + url: + en_US: https://ai.gitee.com/dashboard/settings/tokens +supported_model_types: + - llm + - text-embedding + - rerank + - speech2text + - tts +configurate_methods: + - predefined-model +provider_credential_schema: + credential_form_schemas: + - variable: api_key + label: + en_US: API Key + type: secret-input + required: true + placeholder: + zh_Hans: 在此输入您的 API Key + en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-72B-Instruct.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-72B-Instruct.yaml new file mode 100644 index 0000000000..0348438a75 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-72B-Instruct.yaml @@ -0,0 +1,105 @@ +model: Qwen2-72B-Instruct +label: + zh_Hans: Qwen2-72B-Instruct + en_US: Qwen2-72B-Instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 6400 +parameter_rules: + - name: stream + use_template: boolean + label: + en_US: "Stream" + zh_Hans: "流式" + type: boolean + default: true + required: true + help: + en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." + zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" + + - name: max_tokens + use_template: max_tokens + label: + en_US: "Max Tokens" + zh_Hans: "最大Token数" + type: int + default: 512 + min: 1 + required: true + help: + en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." + zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" + + - name: temperature + use_template: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + use_template: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_k + use_template: top_k + label: + en_US: "Top K" + zh_Hans: "Top K" + type: int + default: 50 + min: 0 + max: 100 + required: true + help: + en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." + zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: -1.0 + max: 1.0 + precision: 1 + required: false + help: + en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." + zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-7B-Instruct.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-7B-Instruct.yaml new file mode 100644 index 0000000000..ba1ad788f5 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-7B-Instruct.yaml @@ -0,0 +1,105 @@ +model: Qwen2-7B-Instruct +label: + zh_Hans: Qwen2-7B-Instruct + en_US: Qwen2-7B-Instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 32768 +parameter_rules: + - name: stream + use_template: boolean + label: + en_US: "Stream" + zh_Hans: "流式" + type: boolean + default: true + required: true + help: + en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." + zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" + + - name: max_tokens + use_template: max_tokens + label: + en_US: "Max Tokens" + zh_Hans: "最大Token数" + type: int + default: 512 + min: 1 + required: true + help: + en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." + zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" + + - name: temperature + use_template: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + use_template: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_k + use_template: top_k + label: + en_US: "Top K" + zh_Hans: "Top K" + type: int + default: 50 + min: 0 + max: 100 + required: true + help: + en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." + zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: -1.0 + max: 1.0 + precision: 1 + required: false + help: + en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." + zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/Yi-1.5-34B-Chat.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/Yi-1.5-34B-Chat.yaml new file mode 100644 index 0000000000..f7260c987b --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/Yi-1.5-34B-Chat.yaml @@ -0,0 +1,105 @@ +model: Yi-1.5-34B-Chat +label: + zh_Hans: Yi-1.5-34B-Chat + en_US: Yi-1.5-34B-Chat +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 4096 +parameter_rules: + - name: stream + use_template: boolean + label: + en_US: "Stream" + zh_Hans: "流式" + type: boolean + default: true + required: true + help: + en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." + zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" + + - name: max_tokens + use_template: max_tokens + label: + en_US: "Max Tokens" + zh_Hans: "最大Token数" + type: int + default: 512 + min: 1 + required: true + help: + en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." + zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" + + - name: temperature + use_template: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + use_template: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_k + use_template: top_k + label: + en_US: "Top K" + zh_Hans: "Top K" + type: int + default: 50 + min: 0 + max: 100 + required: true + help: + en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." + zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: -1.0 + max: 1.0 + precision: 1 + required: false + help: + en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." + zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/_position.yaml new file mode 100644 index 0000000000..21f6120742 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/_position.yaml @@ -0,0 +1,7 @@ +- Qwen2-7B-Instruct +- Qwen2-72B-Instruct +- Yi-1.5-34B-Chat +- glm-4-9b-chat +- deepseek-coder-33B-instruct-chat +- deepseek-coder-33B-instruct-completions +- codegeex4-all-9b diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/codegeex4-all-9b.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/codegeex4-all-9b.yaml new file mode 100644 index 0000000000..8632cd92ab --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/codegeex4-all-9b.yaml @@ -0,0 +1,105 @@ +model: codegeex4-all-9b +label: + zh_Hans: codegeex4-all-9b + en_US: codegeex4-all-9b +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 40960 +parameter_rules: + - name: stream + use_template: boolean + label: + en_US: "Stream" + zh_Hans: "流式" + type: boolean + default: true + required: true + help: + en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." + zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" + + - name: max_tokens + use_template: max_tokens + label: + en_US: "Max Tokens" + zh_Hans: "最大Token数" + type: int + default: 512 + min: 1 + required: true + help: + en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." + zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" + + - name: temperature + use_template: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + use_template: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_k + use_template: top_k + label: + en_US: "Top K" + zh_Hans: "Top K" + type: int + default: 50 + min: 0 + max: 100 + required: true + help: + en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." + zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: -1.0 + max: 1.0 + precision: 1 + required: false + help: + en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." + zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-chat.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-chat.yaml new file mode 100644 index 0000000000..2ac00761d5 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-chat.yaml @@ -0,0 +1,105 @@ +model: deepseek-coder-33B-instruct-chat +label: + zh_Hans: deepseek-coder-33B-instruct-chat + en_US: deepseek-coder-33B-instruct-chat +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 9000 +parameter_rules: + - name: stream + use_template: boolean + label: + en_US: "Stream" + zh_Hans: "流式" + type: boolean + default: true + required: true + help: + en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." + zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" + + - name: max_tokens + use_template: max_tokens + label: + en_US: "Max Tokens" + zh_Hans: "最大Token数" + type: int + default: 512 + min: 1 + required: true + help: + en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." + zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" + + - name: temperature + use_template: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + use_template: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_k + use_template: top_k + label: + en_US: "Top K" + zh_Hans: "Top K" + type: int + default: 50 + min: 0 + max: 100 + required: true + help: + en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." + zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: -1.0 + max: 1.0 + precision: 1 + required: false + help: + en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." + zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-completions.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-completions.yaml new file mode 100644 index 0000000000..7c364d89f7 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-completions.yaml @@ -0,0 +1,91 @@ +model: deepseek-coder-33B-instruct-completions +label: + zh_Hans: deepseek-coder-33B-instruct-completions + en_US: deepseek-coder-33B-instruct-completions +model_type: llm +features: + - agent-thought +model_properties: + mode: completion + context_size: 9000 +parameter_rules: + - name: stream + use_template: boolean + label: + en_US: "Stream" + zh_Hans: "流式" + type: boolean + default: true + required: true + help: + en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." + zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" + + - name: max_tokens + use_template: max_tokens + label: + en_US: "Max Tokens" + zh_Hans: "最大Token数" + type: int + default: 512 + min: 1 + required: true + help: + en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." + zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" + + - name: temperature + use_template: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + use_template: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: -1.0 + max: 1.0 + precision: 1 + required: false + help: + en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." + zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/glm-4-9b-chat.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/glm-4-9b-chat.yaml new file mode 100644 index 0000000000..2afe1cf959 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/glm-4-9b-chat.yaml @@ -0,0 +1,105 @@ +model: glm-4-9b-chat +label: + zh_Hans: glm-4-9b-chat + en_US: glm-4-9b-chat +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 32768 +parameter_rules: + - name: stream + use_template: boolean + label: + en_US: "Stream" + zh_Hans: "流式" + type: boolean + default: true + required: true + help: + en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." + zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" + + - name: max_tokens + use_template: max_tokens + label: + en_US: "Max Tokens" + zh_Hans: "最大Token数" + type: int + default: 512 + min: 1 + required: true + help: + en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." + zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" + + - name: temperature + use_template: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + use_template: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_k + use_template: top_k + label: + en_US: "Top K" + zh_Hans: "Top K" + type: int + default: 50 + min: 0 + max: 100 + required: true + help: + en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." + zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: -1.0 + max: 1.0 + precision: 1 + required: false + help: + en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." + zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/llm.py b/api/core/model_runtime/model_providers/gitee_ai/llm/llm.py new file mode 100644 index 0000000000..b65db6f665 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/llm/llm.py @@ -0,0 +1,47 @@ +from collections.abc import Generator +from typing import Optional, Union + +from core.model_runtime.entities.llm_entities import LLMMode, LLMResult +from core.model_runtime.entities.message_entities import ( + PromptMessage, + PromptMessageTool, +) +from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel + + +class GiteeAILargeLanguageModel(OAIAPICompatLargeLanguageModel): + MODEL_TO_IDENTITY: dict[str, str] = { + "Yi-1.5-34B-Chat": "Yi-34B-Chat", + "deepseek-coder-33B-instruct-completions": "deepseek-coder-33B-instruct", + "deepseek-coder-33B-instruct-chat": "deepseek-coder-33B-instruct", + } + + def _invoke( + self, + model: str, + credentials: dict, + prompt_messages: list[PromptMessage], + model_parameters: dict, + tools: Optional[list[PromptMessageTool]] = None, + stop: Optional[list[str]] = None, + stream: bool = True, + user: Optional[str] = None, + ) -> Union[LLMResult, Generator]: + self._add_custom_parameters(credentials, model, model_parameters) + return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) + + def validate_credentials(self, model: str, credentials: dict) -> None: + self._add_custom_parameters(credentials, model, None) + super().validate_credentials(model, credentials) + + @staticmethod + def _add_custom_parameters(credentials: dict, model: str, model_parameters: dict) -> None: + if model is None: + model = "bge-large-zh-v1.5" + + model_identity = GiteeAILargeLanguageModel.MODEL_TO_IDENTITY.get(model, model) + credentials["endpoint_url"] = f"https://ai.gitee.com/api/serverless/{model_identity}/" + if model.endswith("completions"): + credentials["mode"] = LLMMode.COMPLETION.value + else: + credentials["mode"] = LLMMode.CHAT.value diff --git a/api/core/model_runtime/model_providers/gitee_ai/rerank/__init__.py b/api/core/model_runtime/model_providers/gitee_ai/rerank/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/gitee_ai/rerank/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/rerank/_position.yaml new file mode 100644 index 0000000000..83162fd338 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/rerank/_position.yaml @@ -0,0 +1 @@ +- bge-reranker-v2-m3 diff --git a/api/core/model_runtime/model_providers/gitee_ai/rerank/bge-reranker-v2-m3.yaml b/api/core/model_runtime/model_providers/gitee_ai/rerank/bge-reranker-v2-m3.yaml new file mode 100644 index 0000000000..f0681641e1 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/rerank/bge-reranker-v2-m3.yaml @@ -0,0 +1,4 @@ +model: bge-reranker-v2-m3 +model_type: rerank +model_properties: + context_size: 1024 diff --git a/api/core/model_runtime/model_providers/gitee_ai/rerank/rerank.py b/api/core/model_runtime/model_providers/gitee_ai/rerank/rerank.py new file mode 100644 index 0000000000..231345c2f4 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/rerank/rerank.py @@ -0,0 +1,128 @@ +from typing import Optional + +import httpx + +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType +from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult +from core.model_runtime.errors.invoke import ( + InvokeAuthorizationError, + InvokeBadRequestError, + InvokeConnectionError, + InvokeError, + InvokeRateLimitError, + InvokeServerUnavailableError, +) +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.rerank_model import RerankModel + + +class GiteeAIRerankModel(RerankModel): + """ + Model class for rerank model. + """ + + def _invoke( + self, + model: str, + credentials: dict, + query: str, + docs: list[str], + score_threshold: Optional[float] = None, + top_n: Optional[int] = None, + user: Optional[str] = None, + ) -> RerankResult: + """ + Invoke rerank model + + :param model: model name + :param credentials: model credentials + :param query: search query + :param docs: docs for reranking + :param score_threshold: score threshold + :param top_n: top n documents to return + :param user: unique user id + :return: rerank result + """ + if len(docs) == 0: + return RerankResult(model=model, docs=[]) + + base_url = credentials.get("base_url", "https://ai.gitee.com/api/serverless") + base_url = base_url.removesuffix("/") + + try: + body = {"model": model, "query": query, "documents": docs} + if top_n is not None: + body["top_n"] = top_n + response = httpx.post( + f"{base_url}/{model}/rerank", + json=body, + headers={"Authorization": f"Bearer {credentials.get('api_key')}"}, + ) + + response.raise_for_status() + results = response.json() + + rerank_documents = [] + for result in results["results"]: + rerank_document = RerankDocument( + index=result["index"], + text=result["document"]["text"], + score=result["relevance_score"], + ) + if score_threshold is None or result["relevance_score"] >= score_threshold: + rerank_documents.append(rerank_document) + return RerankResult(model=model, docs=rerank_documents) + except httpx.HTTPStatusError as e: + raise InvokeServerUnavailableError(str(e)) + + def validate_credentials(self, model: str, credentials: dict) -> None: + """ + Validate model credentials + + :param model: model name + :param credentials: model credentials + :return: + """ + try: + self._invoke( + model=model, + credentials=credentials, + query="What is the capital of the United States?", + docs=[ + "Carson City is the capital city of the American state of Nevada. At the 2010 United States " + "Census, Carson City had a population of 55,274.", + "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " + "are a political division controlled by the United States. Its capital is Saipan.", + ], + score_threshold=0.01, + ) + except Exception as ex: + raise CredentialsValidateFailedError(str(ex)) + + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + """ + Map model invoke error to unified error + """ + return { + InvokeConnectionError: [httpx.ConnectError], + InvokeServerUnavailableError: [httpx.RemoteProtocolError], + InvokeRateLimitError: [], + InvokeAuthorizationError: [httpx.HTTPStatusError], + InvokeBadRequestError: [httpx.RequestError], + } + + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: + """ + generate custom model entities from credentials + """ + entity = AIModelEntity( + model=model, + label=I18nObject(en_US=model), + model_type=ModelType.RERANK, + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size"))}, + ) + + return entity diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/__init__.py b/api/core/model_runtime/model_providers/gitee_ai/speech2text/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/speech2text/_position.yaml new file mode 100644 index 0000000000..8e9b47598b --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/speech2text/_position.yaml @@ -0,0 +1,2 @@ +- whisper-base +- whisper-large diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/speech2text.py b/api/core/model_runtime/model_providers/gitee_ai/speech2text/speech2text.py new file mode 100644 index 0000000000..5597f5b43e --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/speech2text/speech2text.py @@ -0,0 +1,53 @@ +import os +from typing import IO, Optional + +import requests + +from core.model_runtime.errors.invoke import InvokeBadRequestError +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel +from core.model_runtime.model_providers.gitee_ai._common import _CommonGiteeAI + + +class GiteeAISpeech2TextModel(_CommonGiteeAI, Speech2TextModel): + """ + Model class for OpenAI Compatible Speech to text model. + """ + + def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: + """ + Invoke speech2text model + + :param model: model name + :param credentials: model credentials + :param file: audio file + :param user: unique user id + :return: text for given audio file + """ + # doc: https://ai.gitee.com/docs/openapi/serverless#tag/serverless/POST/{service}/speech-to-text + + endpoint_url = f"https://ai.gitee.com/api/serverless/{model}/speech-to-text" + files = [("file", file)] + _, file_ext = os.path.splitext(file.name) + headers = {"Content-Type": f"audio/{file_ext}", "Authorization": f"Bearer {credentials.get('api_key')}"} + response = requests.post(endpoint_url, headers=headers, files=files) + if response.status_code != 200: + raise InvokeBadRequestError(response.text) + response_data = response.json() + return response_data["text"] + + def validate_credentials(self, model: str, credentials: dict) -> None: + """ + Validate model credentials + + :param model: model name + :param credentials: model credentials + :return: + """ + try: + audio_file_path = self._get_demo_file_path() + + with open(audio_file_path, "rb") as audio_file: + self._invoke(model, credentials, audio_file) + except Exception as ex: + raise CredentialsValidateFailedError(str(ex)) diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-base.yaml b/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-base.yaml new file mode 100644 index 0000000000..a50bf5fc2d --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-base.yaml @@ -0,0 +1,5 @@ +model: whisper-base +model_type: speech2text +model_properties: + file_upload_limit: 1 + supported_file_extensions: flac,mp3,mp4,mpeg,mpga,m4a,ogg,wav,webm diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-large.yaml b/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-large.yaml new file mode 100644 index 0000000000..1be7b1a391 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-large.yaml @@ -0,0 +1,5 @@ +model: whisper-large +model_type: speech2text +model_properties: + file_upload_limit: 1 + supported_file_extensions: flac,mp3,mp4,mpeg,mpga,m4a,ogg,wav,webm diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/_position.yaml new file mode 100644 index 0000000000..e8abe6440d --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/_position.yaml @@ -0,0 +1,3 @@ +- bge-large-zh-v1.5 +- bge-small-zh-v1.5 +- bge-m3 diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-large-zh-v1.5.yaml b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-large-zh-v1.5.yaml new file mode 100644 index 0000000000..9e3ca76e88 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-large-zh-v1.5.yaml @@ -0,0 +1,8 @@ +model: bge-large-zh-v1.5 +label: + zh_Hans: bge-large-zh-v1.5 + en_US: bge-large-zh-v1.5 +model_type: text-embedding +model_properties: + context_size: 200000 + max_chunks: 20 diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-m3.yaml b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-m3.yaml new file mode 100644 index 0000000000..a7a99a98a3 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-m3.yaml @@ -0,0 +1,8 @@ +model: bge-m3 +label: + zh_Hans: bge-m3 + en_US: bge-m3 +model_type: text-embedding +model_properties: + context_size: 200000 + max_chunks: 20 diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-small-zh-v1.5.yaml b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-small-zh-v1.5.yaml new file mode 100644 index 0000000000..bd760408fa --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-small-zh-v1.5.yaml @@ -0,0 +1,8 @@ +model: bge-small-zh-v1.5 +label: + zh_Hans: bge-small-zh-v1.5 + en_US: bge-small-zh-v1.5 +model_type: text-embedding +model_properties: + context_size: 200000 + max_chunks: 20 diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/text_embedding.py new file mode 100644 index 0000000000..b833c5652c --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/text_embedding.py @@ -0,0 +1,31 @@ +from typing import Optional + +from core.entities.embedding_type import EmbeddingInputType +from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult +from core.model_runtime.model_providers.openai_api_compatible.text_embedding.text_embedding import ( + OAICompatEmbeddingModel, +) + + +class GiteeAIEmbeddingModel(OAICompatEmbeddingModel): + def _invoke( + self, + model: str, + credentials: dict, + texts: list[str], + user: Optional[str] = None, + input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, + ) -> TextEmbeddingResult: + self._add_custom_parameters(credentials, model) + return super()._invoke(model, credentials, texts, user, input_type) + + def validate_credentials(self, model: str, credentials: dict) -> None: + self._add_custom_parameters(credentials, None) + super().validate_credentials(model, credentials) + + @staticmethod + def _add_custom_parameters(credentials: dict, model: str) -> None: + if model is None: + model = "bge-m3" + + credentials["endpoint_url"] = f"https://ai.gitee.com/api/serverless/{model}/v1/" diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/ChatTTS.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/ChatTTS.yaml new file mode 100644 index 0000000000..940391dfab --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/tts/ChatTTS.yaml @@ -0,0 +1,11 @@ +model: ChatTTS +model_type: tts +model_properties: + default_voice: 'default' + voices: + - mode: 'default' + name: 'Default' + language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] + word_limit: 3500 + audio_type: 'mp3' + max_workers: 5 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/FunAudioLLM-CosyVoice-300M.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/FunAudioLLM-CosyVoice-300M.yaml new file mode 100644 index 0000000000..8fc5734801 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/tts/FunAudioLLM-CosyVoice-300M.yaml @@ -0,0 +1,11 @@ +model: FunAudioLLM-CosyVoice-300M +model_type: tts +model_properties: + default_voice: 'default' + voices: + - mode: 'default' + name: 'Default' + language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] + word_limit: 3500 + audio_type: 'mp3' + max_workers: 5 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/__init__.py b/api/core/model_runtime/model_providers/gitee_ai/tts/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/_position.yaml new file mode 100644 index 0000000000..13c6ec8454 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/tts/_position.yaml @@ -0,0 +1,4 @@ +- speecht5_tts +- ChatTTS +- fish-speech-1.2-sft +- FunAudioLLM-CosyVoice-300M diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/fish-speech-1.2-sft.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/fish-speech-1.2-sft.yaml new file mode 100644 index 0000000000..93cc28bc9d --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/tts/fish-speech-1.2-sft.yaml @@ -0,0 +1,11 @@ +model: fish-speech-1.2-sft +model_type: tts +model_properties: + default_voice: 'default' + voices: + - mode: 'default' + name: 'Default' + language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] + word_limit: 3500 + audio_type: 'mp3' + max_workers: 5 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/speecht5_tts.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/speecht5_tts.yaml new file mode 100644 index 0000000000..f9c843bd41 --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/tts/speecht5_tts.yaml @@ -0,0 +1,11 @@ +model: speecht5_tts +model_type: tts +model_properties: + default_voice: 'default' + voices: + - mode: 'default' + name: 'Default' + language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] + word_limit: 3500 + audio_type: 'mp3' + max_workers: 5 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/tts.py b/api/core/model_runtime/model_providers/gitee_ai/tts/tts.py new file mode 100644 index 0000000000..ed2bd5b13d --- /dev/null +++ b/api/core/model_runtime/model_providers/gitee_ai/tts/tts.py @@ -0,0 +1,79 @@ +from typing import Optional + +import requests + +from core.model_runtime.errors.invoke import InvokeBadRequestError +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.tts_model import TTSModel +from core.model_runtime.model_providers.gitee_ai._common import _CommonGiteeAI + + +class GiteeAIText2SpeechModel(_CommonGiteeAI, TTSModel): + """ + Model class for OpenAI Speech to text model. + """ + + def _invoke( + self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None + ) -> any: + """ + _invoke text2speech model + + :param model: model name + :param tenant_id: user tenant id + :param credentials: model credentials + :param content_text: text content to be translated + :param voice: model timbre + :param user: unique user id + :return: text translated to audio file + """ + return self._tts_invoke_streaming(model=model, credentials=credentials, content_text=content_text, voice=voice) + + def validate_credentials(self, model: str, credentials: dict) -> None: + """ + validate credentials text2speech model + + :param model: model name + :param credentials: model credentials + :return: text translated to audio file + """ + try: + self._tts_invoke_streaming( + model=model, + credentials=credentials, + content_text="Hello Dify!", + voice=self._get_model_default_voice(model, credentials), + ) + except Exception as ex: + raise CredentialsValidateFailedError(str(ex)) + + def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: + """ + _tts_invoke_streaming text2speech model + :param model: model name + :param credentials: model credentials + :param content_text: text content to be translated + :param voice: model timbre + :return: text translated to audio file + """ + try: + # doc: https://ai.gitee.com/docs/openapi/serverless#tag/serverless/POST/{service}/text-to-speech + endpoint_url = "https://ai.gitee.com/api/serverless/" + model + "/text-to-speech" + + headers = {"Content-Type": "application/json"} + api_key = credentials.get("api_key") + if api_key: + headers["Authorization"] = f"Bearer {api_key}" + + payload = {"inputs": content_text} + response = requests.post(endpoint_url, headers=headers, json=payload) + + if response.status_code != 200: + raise InvokeBadRequestError(response.text) + + data = response.content + + for i in range(0, len(data), 1024): + yield data[i : i + 1024] + except Exception as ex: + raise InvokeBadRequestError(str(ex)) diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet-v2.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet-v2.yaml new file mode 100644 index 0000000000..0be3e26e7a --- /dev/null +++ b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet-v2.yaml @@ -0,0 +1,55 @@ +model: claude-3-5-sonnet-v2@20241022 +label: + en_US: Claude 3.5 Sonnet v2 +model_type: llm +features: + - agent-thought + - vision +model_properties: + mode: chat + context_size: 200000 +parameter_rules: + - name: max_tokens + use_template: max_tokens + required: true + type: int + default: 4096 + min: 1 + max: 4096 + help: + zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 + en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. + - name: temperature + use_template: temperature + required: false + type: float + default: 1 + min: 0.0 + max: 1.0 + help: + zh_Hans: 生成内容的随机性。 + en_US: The amount of randomness injected into the response. + - name: top_p + required: false + type: float + default: 0.999 + min: 0.000 + max: 1.000 + help: + zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 + en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. + - name: top_k + required: false + type: int + default: 0 + min: 0 + # tip docs from aws has error, max value is 500 + max: 500 + help: + zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 + en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. +pricing: + input: '0.003' + output: '0.015' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/vessl_ai/__init__.py b/api/core/model_runtime/model_providers/vessl_ai/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_l_en.png new file mode 100644 index 0000000000..18ba350fa0 Binary files /dev/null and b/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_l_en.png differ diff --git a/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_s_en.svg new file mode 100644 index 0000000000..242f4e82b2 --- /dev/null +++ b/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_s_en.svg @@ -0,0 +1,3 @@ + + + diff --git a/api/core/model_runtime/model_providers/vessl_ai/llm/__init__.py b/api/core/model_runtime/model_providers/vessl_ai/llm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/vessl_ai/llm/llm.py b/api/core/model_runtime/model_providers/vessl_ai/llm/llm.py new file mode 100644 index 0000000000..034c066ab5 --- /dev/null +++ b/api/core/model_runtime/model_providers/vessl_ai/llm/llm.py @@ -0,0 +1,83 @@ +from decimal import Decimal + +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.llm_entities import LLMMode +from core.model_runtime.entities.model_entities import ( + AIModelEntity, + DefaultParameterName, + FetchFrom, + ModelPropertyKey, + ModelType, + ParameterRule, + ParameterType, + PriceConfig, +) +from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel + + +class VesslAILargeLanguageModel(OAIAPICompatLargeLanguageModel): + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: + features = [] + + entity = AIModelEntity( + model=model, + label=I18nObject(en_US=model), + model_type=ModelType.LLM, + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + features=features, + model_properties={ + ModelPropertyKey.MODE: credentials.get("mode"), + }, + parameter_rules=[ + ParameterRule( + name=DefaultParameterName.TEMPERATURE.value, + label=I18nObject(en_US="Temperature"), + type=ParameterType.FLOAT, + default=float(credentials.get("temperature", 0.7)), + min=0, + max=2, + precision=2, + ), + ParameterRule( + name=DefaultParameterName.TOP_P.value, + label=I18nObject(en_US="Top P"), + type=ParameterType.FLOAT, + default=float(credentials.get("top_p", 1)), + min=0, + max=1, + precision=2, + ), + ParameterRule( + name=DefaultParameterName.TOP_K.value, + label=I18nObject(en_US="Top K"), + type=ParameterType.INT, + default=int(credentials.get("top_k", 50)), + min=-2147483647, + max=2147483647, + precision=0, + ), + ParameterRule( + name=DefaultParameterName.MAX_TOKENS.value, + label=I18nObject(en_US="Max Tokens"), + type=ParameterType.INT, + default=512, + min=1, + max=int(credentials.get("max_tokens_to_sample", 4096)), + ), + ], + pricing=PriceConfig( + input=Decimal(credentials.get("input_price", 0)), + output=Decimal(credentials.get("output_price", 0)), + unit=Decimal(credentials.get("unit", 0)), + currency=credentials.get("currency", "USD"), + ), + ) + + if credentials["mode"] == "chat": + entity.model_properties[ModelPropertyKey.MODE] = LLMMode.CHAT.value + elif credentials["mode"] == "completion": + entity.model_properties[ModelPropertyKey.MODE] = LLMMode.COMPLETION.value + else: + raise ValueError(f"Unknown completion type {credentials['completion_type']}") + + return entity diff --git a/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.py b/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.py new file mode 100644 index 0000000000..7a987c6710 --- /dev/null +++ b/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.py @@ -0,0 +1,10 @@ +import logging + +from core.model_runtime.model_providers.__base.model_provider import ModelProvider + +logger = logging.getLogger(__name__) + + +class VesslAIProvider(ModelProvider): + def validate_provider_credentials(self, credentials: dict) -> None: + pass diff --git a/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.yaml b/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.yaml new file mode 100644 index 0000000000..6052756cae --- /dev/null +++ b/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.yaml @@ -0,0 +1,56 @@ +provider: vessl_ai +label: + en_US: vessl_ai +icon_small: + en_US: icon_s_en.svg +icon_large: + en_US: icon_l_en.png +background: "#F1EFED" +help: + title: + en_US: How to deploy VESSL AI LLM Model Endpoint + url: + en_US: https://docs.vessl.ai/guides/get-started/llama3-deployment +supported_model_types: + - llm +configurate_methods: + - customizable-model +model_credential_schema: + model: + label: + en_US: Model Name + placeholder: + en_US: Enter your model name + credential_form_schemas: + - variable: endpoint_url + label: + en_US: endpoint url + type: text-input + required: true + placeholder: + en_US: Enter the url of your endpoint url + - variable: api_key + required: true + label: + en_US: API Key + type: secret-input + placeholder: + en_US: Enter your VESSL AI secret key + - variable: mode + show_on: + - variable: __model_type + value: llm + label: + en_US: Completion mode + type: select + required: false + default: chat + placeholder: + en_US: Select completion mode + options: + - value: completion + label: + en_US: Completion + - value: chat + label: + en_US: Chat diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 3affbd2d0a..57af05861c 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -34,6 +34,8 @@ class RetrievalService: reranking_mode: Optional[str] = "reranking_model", weights: Optional[dict] = None, ): + if not query: + return [] dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() if not dataset: return [] diff --git a/api/core/rag/datasource/vdb/baidu/baidu_vector.py b/api/core/rag/datasource/vdb/baidu/baidu_vector.py index 1d4bfef76d..eb78e8aa69 100644 --- a/api/core/rag/datasource/vdb/baidu/baidu_vector.py +++ b/api/core/rag/datasource/vdb/baidu/baidu_vector.py @@ -3,11 +3,13 @@ import time import uuid from typing import Any +import numpy as np from pydantic import BaseModel, model_validator from pymochow import MochowClient from pymochow.auth.bce_credentials import BceCredentials from pymochow.configuration import Configuration -from pymochow.model.enum import FieldType, IndexState, IndexType, MetricType, TableState +from pymochow.exception import ServerError +from pymochow.model.enum import FieldType, IndexState, IndexType, MetricType, ServerErrCode, TableState from pymochow.model.schema import Field, HNSWParams, Schema, VectorIndex from pymochow.model.table import AnnSearch, HNSWSearchParams, Partition, Row @@ -116,6 +118,7 @@ class BaiduVector(BaseVector): self._db.table(self._collection_name).delete(filter=f"{key} = '{value}'") def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + query_vector = [float(val) if isinstance(val, np.float64) else val for val in query_vector] anns = AnnSearch( vector_field=self.field_vector, vector_floats=query_vector, @@ -149,7 +152,13 @@ class BaiduVector(BaseVector): return docs def delete(self) -> None: - self._db.drop_table(table_name=self._collection_name) + try: + self._db.drop_table(table_name=self._collection_name) + except ServerError as e: + if e.code == ServerErrCode.TABLE_NOT_EXIST: + pass + else: + raise def _init_client(self, config) -> MochowClient: config = Configuration(credentials=BceCredentials(config.account, config.api_key), endpoint=config.endpoint) @@ -166,7 +175,14 @@ class BaiduVector(BaseVector): if exists: return self._client.database(self._client_config.database) else: - return self._client.create_database(database_name=self._client_config.database) + try: + self._client.create_database(database_name=self._client_config.database) + except ServerError as e: + if e.code == ServerErrCode.DB_ALREADY_EXIST: + pass + else: + raise + return def _table_existed(self) -> bool: tables = self._db.list_table() @@ -175,7 +191,7 @@ class BaiduVector(BaseVector): def _create_table(self, dimension: int) -> None: # Try to grab distributed lock and create table lock_name = "vector_indexing_lock_{}".format(self._collection_name) - with redis_client.lock(lock_name, timeout=20): + with redis_client.lock(lock_name, timeout=60): table_exist_cache_key = "vector_indexing_{}".format(self._collection_name) if redis_client.get(table_exist_cache_key): return @@ -238,15 +254,14 @@ class BaiduVector(BaseVector): description="Table for Dify", ) + # Wait for table created + while True: + time.sleep(1) + table = self._db.describe_table(self._collection_name) + if table.state == TableState.NORMAL: + break redis_client.set(table_exist_cache_key, 1, ex=3600) - # Wait for table created - while True: - time.sleep(1) - table = self._db.describe_table(self._collection_name) - if table.state == TableState.NORMAL: - break - class BaiduVectorFactory(AbstractVectorFactory): def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> BaiduVector: diff --git a/api/core/rag/datasource/vdb/couchbase/__init__.py b/api/core/rag/datasource/vdb/couchbase/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py new file mode 100644 index 0000000000..3f88d2ca2b --- /dev/null +++ b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py @@ -0,0 +1,378 @@ +import json +import logging +import time +import uuid +from datetime import timedelta +from typing import Any + +from couchbase import search +from couchbase.auth import PasswordAuthenticator +from couchbase.cluster import Cluster +from couchbase.management.search import SearchIndex + +# needed for options -- cluster, timeout, SQL++ (N1QL) query, etc. +from couchbase.options import ClusterOptions, SearchOptions +from couchbase.vector_search import VectorQuery, VectorSearch +from flask import current_app +from pydantic import BaseModel, model_validator + +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.embedding.embedding_base import Embeddings +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + +logger = logging.getLogger(__name__) + + +class CouchbaseConfig(BaseModel): + connection_string: str + user: str + password: str + bucket_name: str + scope_name: str + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict) -> dict: + if not values.get("connection_string"): + raise ValueError("config COUCHBASE_CONNECTION_STRING is required") + if not values.get("user"): + raise ValueError("config COUCHBASE_USER is required") + if not values.get("password"): + raise ValueError("config COUCHBASE_PASSWORD is required") + if not values.get("bucket_name"): + raise ValueError("config COUCHBASE_PASSWORD is required") + if not values.get("scope_name"): + raise ValueError("config COUCHBASE_SCOPE_NAME is required") + return values + + +class CouchbaseVector(BaseVector): + def __init__(self, collection_name: str, config: CouchbaseConfig): + super().__init__(collection_name) + self._client_config = config + + """Connect to couchbase""" + + auth = PasswordAuthenticator(config.user, config.password) + options = ClusterOptions(auth) + self._cluster = Cluster(config.connection_string, options) + self._bucket = self._cluster.bucket(config.bucket_name) + self._scope = self._bucket.scope(config.scope_name) + self._bucket_name = config.bucket_name + self._scope_name = config.scope_name + + # Wait until the cluster is ready for use. + self._cluster.wait_until_ready(timedelta(seconds=5)) + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + index_id = str(uuid.uuid4()).replace("-", "") + self._create_collection(uuid=index_id, vector_length=len(embeddings[0])) + self.add_texts(texts, embeddings) + + def _create_collection(self, vector_length: int, uuid: str): + lock_name = "vector_indexing_lock_{}".format(self._collection_name) + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + if redis_client.get(collection_exist_cache_key): + return + if self._collection_exists(self._collection_name): + return + manager = self._bucket.collections() + manager.create_collection(self._client_config.scope_name, self._collection_name) + + index_manager = self._scope.search_indexes() + + index_definition = json.loads(""" +{ + "type": "fulltext-index", + "name": "Embeddings._default.Vector_Search", + "uuid": "26d4db528e78b716", + "sourceType": "gocbcore", + "sourceName": "Embeddings", + "sourceUUID": "2242e4a25b4decd6650c9c7b3afa1dbf", + "planParams": { + "maxPartitionsPerPIndex": 1024, + "indexPartitions": 1 + }, + "params": { + "doc_config": { + "docid_prefix_delim": "", + "docid_regexp": "", + "mode": "scope.collection.type_field", + "type_field": "type" + }, + "mapping": { + "analysis": { }, + "default_analyzer": "standard", + "default_datetime_parser": "dateTimeOptional", + "default_field": "_all", + "default_mapping": { + "dynamic": true, + "enabled": true + }, + "default_type": "_default", + "docvalues_dynamic": false, + "index_dynamic": true, + "store_dynamic": true, + "type_field": "_type", + "types": { + "collection_name": { + "dynamic": true, + "enabled": true, + "properties": { + "embedding": { + "dynamic": false, + "enabled": true, + "fields": [ + { + "dims": 1536, + "index": true, + "name": "embedding", + "similarity": "dot_product", + "type": "vector", + "vector_index_optimized_for": "recall" + } + ] + }, + "metadata": { + "dynamic": true, + "enabled": true + }, + "text": { + "dynamic": false, + "enabled": true, + "fields": [ + { + "index": true, + "name": "text", + "store": true, + "type": "text" + } + ] + } + } + } + } + }, + "store": { + "indexType": "scorch", + "segmentVersion": 16 + } + }, + "sourceParams": { } + } +""") + index_definition["name"] = self._collection_name + "_search" + index_definition["uuid"] = uuid + index_definition["params"]["mapping"]["types"]["collection_name"]["properties"]["embedding"]["fields"][0][ + "dims" + ] = vector_length + index_definition["params"]["mapping"]["types"][self._scope_name + "." + self._collection_name] = ( + index_definition["params"]["mapping"]["types"].pop("collection_name") + ) + time.sleep(2) + index_manager.upsert_index( + SearchIndex( + index_definition["name"], + params=index_definition["params"], + source_name=self._bucket_name, + ), + ) + time.sleep(1) + + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + def _collection_exists(self, name: str): + scope_collection_map: dict[str, Any] = {} + + # Get a list of all scopes in the bucket + for scope in self._bucket.collections().get_all_scopes(): + scope_collection_map[scope.name] = [] + + # Get a list of all the collections in the scope + for collection in scope.collections: + scope_collection_map[scope.name].append(collection.name) + + # Check if the collection exists in the scope + return self._collection_name in scope_collection_map[self._scope_name] + + def get_type(self) -> str: + return VectorType.COUCHBASE + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + uuids = self._get_uuids(documents) + texts = [d.page_content for d in documents] + metadatas = [d.metadata for d in documents] + + doc_ids = [] + + documents_to_insert = [ + {"text": text, "embedding": vector, "metadata": metadata} + for id, text, vector, metadata in zip(uuids, texts, embeddings, metadatas) + ] + for doc, id in zip(documents_to_insert, uuids): + result = self._scope.collection(self._collection_name).upsert(id, doc) + + doc_ids.extend(uuids) + + return doc_ids + + def text_exists(self, id: str) -> bool: + # Use a parameterized query for safety and correctness + query = f""" + SELECT COUNT(1) AS count FROM + `{self._client_config.bucket_name}`.{self._client_config.scope_name}.{self._collection_name} + WHERE META().id = $doc_id + """ + # Pass the id as a parameter to the query + result = self._cluster.query(query, named_parameters={"doc_id": id}).execute() + for row in result: + return row["count"] > 0 + return False # Return False if no rows are returned + + def delete_by_ids(self, ids: list[str]) -> None: + query = f""" + DELETE FROM `{self._bucket_name}`.{self._client_config.scope_name}.{self._collection_name} + WHERE META().id IN $doc_ids; + """ + try: + self._cluster.query(query, named_parameters={"doc_ids": ids}).execute() + except Exception as e: + logger.error(e) + + def delete_by_document_id(self, document_id: str): + query = f""" + DELETE FROM + `{self._client_config.bucket_name}`.{self._client_config.scope_name}.{self._collection_name} + WHERE META().id = $doc_id; + """ + self._cluster.query(query, named_parameters={"doc_id": document_id}).execute() + + # def get_ids_by_metadata_field(self, key: str, value: str): + # query = f""" + # SELECT id FROM + # `{self._client_config.bucket_name}`.{self._client_config.scope_name}.{self._collection_name} + # WHERE `metadata.{key}` = $value; + # """ + # result = self._cluster.query(query, named_parameters={'value':value}) + # return [row['id'] for row in result.rows()] + + def delete_by_metadata_field(self, key: str, value: str) -> None: + query = f""" + DELETE FROM `{self._client_config.bucket_name}`.{self._client_config.scope_name}.{self._collection_name} + WHERE metadata.{key} = $value; + """ + self._cluster.query(query, named_parameters={"value": value}).execute() + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + top_k = kwargs.get("top_k", 5) + score_threshold = kwargs.get("score_threshold") or 0.0 + + search_req = search.SearchRequest.create( + VectorSearch.from_vector_query( + VectorQuery( + "embedding", + query_vector, + top_k, + ) + ) + ) + try: + search_iter = self._scope.search( + self._collection_name + "_search", + search_req, + SearchOptions(limit=top_k, collections=[self._collection_name], fields=["*"]), + ) + + docs = [] + # Parse the results + for row in search_iter.rows(): + text = row.fields.pop("text") + metadata = self._format_metadata(row.fields) + score = row.score + metadata["score"] = score + doc = Document(page_content=text, metadata=metadata) + if score >= score_threshold: + docs.append(doc) + except Exception as e: + raise ValueError(f"Search failed with error: {e}") + + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + top_k = kwargs.get("top_k", 2) + try: + CBrequest = search.SearchRequest.create(search.QueryStringQuery("text:" + query)) + search_iter = self._scope.search( + self._collection_name + "_search", CBrequest, SearchOptions(limit=top_k, fields=["*"]) + ) + + docs = [] + for row in search_iter.rows(): + text = row.fields.pop("text") + metadata = self._format_metadata(row.fields) + score = row.score + metadata["score"] = score + doc = Document(page_content=text, metadata=metadata) + docs.append(doc) + + except Exception as e: + raise ValueError(f"Search failed with error: {e}") + + return docs + + def delete(self): + manager = self._bucket.collections() + scopes = manager.get_all_scopes() + + for scope in scopes: + for collection in scope.collections: + if collection.name == self._collection_name: + manager.drop_collection("_default", self._collection_name) + + def _format_metadata(self, row_fields: dict[str, Any]) -> dict[str, Any]: + """Helper method to format the metadata from the Couchbase Search API. + Args: + row_fields (Dict[str, Any]): The fields to format. + + Returns: + Dict[str, Any]: The formatted metadata. + """ + metadata = {} + for key, value in row_fields.items(): + # Couchbase Search returns the metadata key with a prefix + # `metadata.` We remove it to get the original metadata key + if key.startswith("metadata"): + new_key = key.split("metadata" + ".")[-1] + metadata[new_key] = value + else: + metadata[key] = value + + return metadata + + +class CouchbaseVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> CouchbaseVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.COUCHBASE, collection_name)) + + config = current_app.config + return CouchbaseVector( + collection_name=collection_name, + config=CouchbaseConfig( + connection_string=config.get("COUCHBASE_CONNECTION_STRING"), + user=config.get("COUCHBASE_USER"), + password=config.get("COUCHBASE_PASSWORD"), + bucket_name=config.get("COUCHBASE_BUCKET_NAME"), + scope_name=config.get("COUCHBASE_SCOPE_NAME"), + ), + ) diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py index 052a187225..c62042af80 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py @@ -142,7 +142,7 @@ class ElasticSearchVector(BaseVector): def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: query_str = {"match": {Field.CONTENT_KEY.value: query}} - results = self._client.search(index=self._collection_name, query=query_str) + results = self._client.search(index=self._collection_name, query=query_str, size=kwargs.get("top_k", 4)) docs = [] for hit in results["hits"]["hits"]: docs.append( diff --git a/api/core/rag/datasource/vdb/oceanbase/__init__.py b/api/core/rag/datasource/vdb/oceanbase/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py new file mode 100644 index 0000000000..8dd26a073b --- /dev/null +++ b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py @@ -0,0 +1,209 @@ +import json +import logging +import math +from typing import Any + +from pydantic import BaseModel, model_validator +from pyobvector import VECTOR, ObVecClient +from sqlalchemy import JSON, Column, String, func +from sqlalchemy.dialects.mysql import LONGTEXT + +from configs import dify_config +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.embedding.embedding_base import Embeddings +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + +logger = logging.getLogger(__name__) + +DEFAULT_OCEANBASE_HNSW_BUILD_PARAM = {"M": 16, "efConstruction": 256} +DEFAULT_OCEANBASE_HNSW_SEARCH_PARAM = {"efSearch": 64} +OCEANBASE_SUPPORTED_VECTOR_INDEX_TYPE = "HNSW" +DEFAULT_OCEANBASE_VECTOR_METRIC_TYPE = "l2" + + +class OceanBaseVectorConfig(BaseModel): + host: str + port: int + user: str + password: str + database: str + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict) -> dict: + if not values["host"]: + raise ValueError("config OCEANBASE_VECTOR_HOST is required") + if not values["port"]: + raise ValueError("config OCEANBASE_VECTOR_PORT is required") + if not values["user"]: + raise ValueError("config OCEANBASE_VECTOR_USER is required") + if not values["database"]: + raise ValueError("config OCEANBASE_VECTOR_DATABASE is required") + return values + + +class OceanBaseVector(BaseVector): + def __init__(self, collection_name: str, config: OceanBaseVectorConfig): + super().__init__(collection_name) + self._config = config + self._hnsw_ef_search = -1 + self._client = ObVecClient( + uri=f"{self._config.host}:{self._config.port}", + user=self._config.user, + password=self._config.password, + db_name=self._config.database, + ) + + def get_type(self) -> str: + return VectorType.OCEANBASE + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + self._vec_dim = len(embeddings[0]) + self._create_collection() + self.add_texts(texts, embeddings) + + def _create_collection(self) -> None: + lock_name = "vector_indexing_lock_" + self._collection_name + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = "vector_indexing_" + self._collection_name + if redis_client.get(collection_exist_cache_key): + return + + if self._client.check_table_exists(self._collection_name): + return + + self.delete() + + cols = [ + Column("id", String(36), primary_key=True, autoincrement=False), + Column("vector", VECTOR(self._vec_dim)), + Column("text", LONGTEXT), + Column("metadata", JSON), + ] + vidx_params = self._client.prepare_index_params() + vidx_params.add_index( + field_name="vector", + index_type=OCEANBASE_SUPPORTED_VECTOR_INDEX_TYPE, + index_name="vector_index", + metric_type=DEFAULT_OCEANBASE_VECTOR_METRIC_TYPE, + params=DEFAULT_OCEANBASE_HNSW_BUILD_PARAM, + ) + + self._client.create_table_with_index_params( + table_name=self._collection_name, + columns=cols, + vidxs=vidx_params, + ) + vals = [] + params = self._client.perform_raw_text_sql("SHOW PARAMETERS LIKE '%ob_vector_memory_limit_percentage%'") + for row in params: + val = int(row[6]) + vals.append(val) + if len(vals) == 0: + print("ob_vector_memory_limit_percentage not found in parameters.") + exit(1) + if any(val == 0 for val in vals): + try: + self._client.perform_raw_text_sql("ALTER SYSTEM SET ob_vector_memory_limit_percentage = 30") + except Exception as e: + raise Exception( + "Failed to set ob_vector_memory_limit_percentage. " + + "Maybe the database user has insufficient privilege.", + e, + ) + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + ids = self._get_uuids(documents) + for id, doc, emb in zip(ids, documents, embeddings): + self._client.insert( + table_name=self._collection_name, + data={ + "id": id, + "vector": emb, + "text": doc.page_content, + "metadata": doc.metadata, + }, + ) + + def text_exists(self, id: str) -> bool: + cur = self._client.get(table_name=self._collection_name, id=id) + return cur.rowcount != 0 + + def delete_by_ids(self, ids: list[str]) -> None: + self._client.delete(table_name=self._collection_name, ids=ids) + + def get_ids_by_metadata_field(self, key: str, value: str) -> list[str]: + cur = self._client.get( + table_name=self._collection_name, + where_clause=f"metadata->>'$.{key}' = '{value}'", + output_column_name=["id"], + ) + return [row[0] for row in cur] + + def delete_by_metadata_field(self, key: str, value: str) -> None: + ids = self.get_ids_by_metadata_field(key, value) + self.delete_by_ids(ids) + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + return [] + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + ef_search = kwargs.get("ef_search", self._hnsw_ef_search) + if ef_search != self._hnsw_ef_search: + self._client.set_ob_hnsw_ef_search(ef_search) + self._hnsw_ef_search = ef_search + topk = kwargs.get("top_k", 10) + cur = self._client.ann_search( + table_name=self._collection_name, + vec_column_name="vector", + vec_data=query_vector, + topk=topk, + distance_func=func.l2_distance, + output_column_names=["text", "metadata"], + with_dist=True, + ) + docs = [] + for text, metadata, distance in cur: + metadata = json.loads(metadata) + metadata["score"] = 1 - distance / math.sqrt(2) + docs.append( + Document( + page_content=text, + metadata=metadata, + ) + ) + return docs + + def delete(self) -> None: + self._client.drop_table_if_exist(self._collection_name) + + +class OceanBaseVectorFactory(AbstractVectorFactory): + def init_vector( + self, + dataset: Dataset, + attributes: list, + embeddings: Embeddings, + ) -> BaseVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix.lower() + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower() + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.OCEANBASE, collection_name)) + return OceanBaseVector( + collection_name, + OceanBaseVectorConfig( + host=dify_config.OCEANBASE_VECTOR_HOST, + port=dify_config.OCEANBASE_VECTOR_PORT, + user=dify_config.OCEANBASE_VECTOR_USER, + password=(dify_config.OCEANBASE_VECTOR_PASSWORD or ""), + database=dify_config.OCEANBASE_VECTOR_DATABASE, + ), + ) diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/__init__.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_entities.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_entities.py new file mode 100644 index 0000000000..1e62b3c589 --- /dev/null +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_entities.py @@ -0,0 +1,17 @@ +from typing import Optional + +from pydantic import BaseModel + + +class ClusterEntity(BaseModel): + """ + Model Config Entity. + """ + + name: str + cluster_id: str + displayName: str + region: str + spendingLimit: Optional[int] = 1000 + version: str + createdBy: str diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py new file mode 100644 index 0000000000..a38f84636e --- /dev/null +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -0,0 +1,526 @@ +import json +import os +import uuid +from collections.abc import Generator, Iterable, Sequence +from itertools import islice +from typing import TYPE_CHECKING, Any, Optional, Union, cast + +import qdrant_client +import requests +from flask import current_app +from pydantic import BaseModel +from qdrant_client.http import models as rest +from qdrant_client.http.models import ( + FilterSelector, + HnswConfigDiff, + PayloadSchemaType, + TextIndexParams, + TextIndexType, + TokenizerType, +) +from qdrant_client.local.qdrant_local import QdrantLocal +from requests.auth import HTTPDigestAuth + +from configs import dify_config +from core.rag.datasource.vdb.field import Field +from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.embedding.embedding_base import Embeddings +from core.rag.models.document import Document +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.dataset import Dataset, TidbAuthBinding + +if TYPE_CHECKING: + from qdrant_client import grpc # noqa + from qdrant_client.conversions import common_types + from qdrant_client.http import models as rest + + DictFilter = dict[str, Union[str, int, bool, dict, list]] + MetadataFilter = Union[DictFilter, common_types.Filter] + + +class TidbOnQdrantConfig(BaseModel): + endpoint: str + api_key: Optional[str] = None + timeout: float = 20 + root_path: Optional[str] = None + grpc_port: int = 6334 + prefer_grpc: bool = False + + def to_qdrant_params(self): + if self.endpoint and self.endpoint.startswith("path:"): + path = self.endpoint.replace("path:", "") + if not os.path.isabs(path): + path = os.path.join(self.root_path, path) + + return {"path": path} + else: + return { + "url": self.endpoint, + "api_key": self.api_key, + "timeout": self.timeout, + "verify": False, + "grpc_port": self.grpc_port, + "prefer_grpc": self.prefer_grpc, + } + + +class TidbConfig(BaseModel): + api_url: str + public_key: str + private_key: str + + +class TidbOnQdrantVector(BaseVector): + def __init__(self, collection_name: str, group_id: str, config: TidbOnQdrantConfig, distance_func: str = "Cosine"): + super().__init__(collection_name) + self._client_config = config + self._client = qdrant_client.QdrantClient(**self._client_config.to_qdrant_params()) + self._distance_func = distance_func.upper() + self._group_id = group_id + + def get_type(self) -> str: + return VectorType.TIDB_ON_QDRANT + + def to_index_struct(self) -> dict: + return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}} + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + if texts: + # get embedding vector size + vector_size = len(embeddings[0]) + # get collection name + collection_name = self._collection_name + # create collection + self.create_collection(collection_name, vector_size) + + self.add_texts(texts, embeddings, **kwargs) + + def create_collection(self, collection_name: str, vector_size: int): + lock_name = "vector_indexing_lock_{}".format(collection_name) + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + if redis_client.get(collection_exist_cache_key): + return + collection_name = collection_name or uuid.uuid4().hex + all_collection_name = [] + collections_response = self._client.get_collections() + collection_list = collections_response.collections + for collection in collection_list: + all_collection_name.append(collection.name) + if collection_name not in all_collection_name: + from qdrant_client.http import models as rest + + vectors_config = rest.VectorParams( + size=vector_size, + distance=rest.Distance[self._distance_func], + ) + hnsw_config = HnswConfigDiff( + m=0, + payload_m=16, + ef_construct=100, + full_scan_threshold=10000, + max_indexing_threads=0, + on_disk=False, + ) + self._client.recreate_collection( + collection_name=collection_name, + vectors_config=vectors_config, + hnsw_config=hnsw_config, + timeout=int(self._client_config.timeout), + ) + + # create group_id payload index + self._client.create_payload_index( + collection_name, Field.GROUP_KEY.value, field_schema=PayloadSchemaType.KEYWORD + ) + # create doc_id payload index + self._client.create_payload_index( + collection_name, Field.DOC_ID.value, field_schema=PayloadSchemaType.KEYWORD + ) + # create full text index + text_index_params = TextIndexParams( + type=TextIndexType.TEXT, + tokenizer=TokenizerType.MULTILINGUAL, + min_token_len=2, + max_token_len=20, + lowercase=True, + ) + self._client.create_payload_index( + collection_name, Field.CONTENT_KEY.value, field_schema=text_index_params + ) + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + uuids = self._get_uuids(documents) + texts = [d.page_content for d in documents] + metadatas = [d.metadata for d in documents] + + added_ids = [] + for batch_ids, points in self._generate_rest_batches(texts, embeddings, metadatas, uuids, 64, self._group_id): + self._client.upsert(collection_name=self._collection_name, points=points) + added_ids.extend(batch_ids) + + return added_ids + + def _generate_rest_batches( + self, + texts: Iterable[str], + embeddings: list[list[float]], + metadatas: Optional[list[dict]] = None, + ids: Optional[Sequence[str]] = None, + batch_size: int = 64, + group_id: Optional[str] = None, + ) -> Generator[tuple[list[str], list[rest.PointStruct]], None, None]: + from qdrant_client.http import models as rest + + texts_iterator = iter(texts) + embeddings_iterator = iter(embeddings) + metadatas_iterator = iter(metadatas or []) + ids_iterator = iter(ids or [uuid.uuid4().hex for _ in iter(texts)]) + while batch_texts := list(islice(texts_iterator, batch_size)): + # Take the corresponding metadata and id for each text in a batch + batch_metadatas = list(islice(metadatas_iterator, batch_size)) or None + batch_ids = list(islice(ids_iterator, batch_size)) + + # Generate the embeddings for all the texts in a batch + batch_embeddings = list(islice(embeddings_iterator, batch_size)) + + points = [ + rest.PointStruct( + id=point_id, + vector=vector, + payload=payload, + ) + for point_id, vector, payload in zip( + batch_ids, + batch_embeddings, + self._build_payloads( + batch_texts, + batch_metadatas, + Field.CONTENT_KEY.value, + Field.METADATA_KEY.value, + group_id, + Field.GROUP_KEY.value, + ), + ) + ] + + yield batch_ids, points + + @classmethod + def _build_payloads( + cls, + texts: Iterable[str], + metadatas: Optional[list[dict]], + content_payload_key: str, + metadata_payload_key: str, + group_id: str, + group_payload_key: str, + ) -> list[dict]: + payloads = [] + for i, text in enumerate(texts): + if text is None: + raise ValueError( + "At least one of the texts is None. Please remove it before " + "calling .from_texts or .add_texts on Qdrant instance." + ) + metadata = metadatas[i] if metadatas is not None else None + payloads.append({content_payload_key: text, metadata_payload_key: metadata, group_payload_key: group_id}) + + return payloads + + def delete_by_metadata_field(self, key: str, value: str): + from qdrant_client.http import models + from qdrant_client.http.exceptions import UnexpectedResponse + + try: + filter = models.Filter( + must=[ + models.FieldCondition( + key=f"metadata.{key}", + match=models.MatchValue(value=value), + ), + ], + ) + + self._reload_if_needed() + + self._client.delete( + collection_name=self._collection_name, + points_selector=FilterSelector(filter=filter), + ) + except UnexpectedResponse as e: + # Collection does not exist, so return + if e.status_code == 404: + return + # Some other error occurred, so re-raise the exception + else: + raise e + + def delete(self): + from qdrant_client.http.exceptions import UnexpectedResponse + + try: + self._client.delete_collection(collection_name=self._collection_name) + except UnexpectedResponse as e: + # Collection does not exist, so return + if e.status_code == 404: + return + # Some other error occurred, so re-raise the exception + else: + raise e + + def delete_by_ids(self, ids: list[str]) -> None: + from qdrant_client.http import models + from qdrant_client.http.exceptions import UnexpectedResponse + + for node_id in ids: + try: + filter = models.Filter( + must=[ + models.FieldCondition( + key="metadata.doc_id", + match=models.MatchValue(value=node_id), + ), + ], + ) + self._client.delete( + collection_name=self._collection_name, + points_selector=FilterSelector(filter=filter), + ) + except UnexpectedResponse as e: + # Collection does not exist, so return + if e.status_code == 404: + return + # Some other error occurred, so re-raise the exception + else: + raise e + + def text_exists(self, id: str) -> bool: + all_collection_name = [] + collections_response = self._client.get_collections() + collection_list = collections_response.collections + for collection in collection_list: + all_collection_name.append(collection.name) + if self._collection_name not in all_collection_name: + return False + response = self._client.retrieve(collection_name=self._collection_name, ids=[id]) + + return len(response) > 0 + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + from qdrant_client.http import models + + filter = models.Filter( + must=[ + models.FieldCondition( + key="group_id", + match=models.MatchValue(value=self._group_id), + ), + ], + ) + results = self._client.search( + collection_name=self._collection_name, + query_vector=query_vector, + query_filter=filter, + limit=kwargs.get("top_k", 4), + with_payload=True, + with_vectors=True, + score_threshold=kwargs.get("score_threshold", 0.0), + ) + docs = [] + for result in results: + metadata = result.payload.get(Field.METADATA_KEY.value) or {} + # duplicate check score threshold + score_threshold = kwargs.get("score_threshold") or 0.0 + if result.score > score_threshold: + metadata["score"] = result.score + doc = Document( + page_content=result.payload.get(Field.CONTENT_KEY.value), + metadata=metadata, + ) + docs.append(doc) + # Sort the documents by score in descending order + docs = sorted(docs, key=lambda x: x.metadata["score"], reverse=True) + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + """Return docs most similar by bm25. + Returns: + List of documents most similar to the query text and distance for each. + """ + from qdrant_client.http import models + + scroll_filter = models.Filter( + must=[ + models.FieldCondition( + key="page_content", + match=models.MatchText(text=query), + ) + ] + ) + response = self._client.scroll( + collection_name=self._collection_name, + scroll_filter=scroll_filter, + limit=kwargs.get("top_k", 2), + with_payload=True, + with_vectors=True, + ) + results = response[0] + documents = [] + for result in results: + if result: + document = self._document_from_scored_point(result, Field.CONTENT_KEY.value, Field.METADATA_KEY.value) + document.metadata["vector"] = result.vector + documents.append(document) + + return documents + + def _reload_if_needed(self): + if isinstance(self._client, QdrantLocal): + self._client = cast(QdrantLocal, self._client) + self._client._load() + + @classmethod + def _document_from_scored_point( + cls, + scored_point: Any, + content_payload_key: str, + metadata_payload_key: str, + ) -> Document: + return Document( + page_content=scored_point.payload.get(content_payload_key), + metadata=scored_point.payload.get(metadata_payload_key) or {}, + ) + + +class TidbOnQdrantVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> TidbOnQdrantVector: + tidb_auth_binding = ( + db.session.query(TidbAuthBinding).filter(TidbAuthBinding.tenant_id == dataset.tenant_id).one_or_none() + ) + if not tidb_auth_binding: + idle_tidb_auth_binding = ( + db.session.query(TidbAuthBinding) + .filter(TidbAuthBinding.active == False, TidbAuthBinding.status == "ACTIVE") + .limit(1) + .one_or_none() + ) + if idle_tidb_auth_binding: + idle_tidb_auth_binding.active = True + idle_tidb_auth_binding.tenant_id = dataset.tenant_id + db.session.commit() + TIDB_ON_QDRANT_API_KEY = f"{idle_tidb_auth_binding.account}:{idle_tidb_auth_binding.password}" + else: + with redis_client.lock("create_tidb_serverless_cluster_lock", timeout=900): + tidb_auth_binding = ( + db.session.query(TidbAuthBinding) + .filter(TidbAuthBinding.tenant_id == dataset.tenant_id) + .one_or_none() + ) + if tidb_auth_binding: + TIDB_ON_QDRANT_API_KEY = f"{tidb_auth_binding.account}:{tidb_auth_binding.password}" + + else: + new_cluster = TidbService.create_tidb_serverless_cluster( + dify_config.TIDB_PROJECT_ID, + dify_config.TIDB_API_URL, + dify_config.TIDB_IAM_API_URL, + dify_config.TIDB_PUBLIC_KEY, + dify_config.TIDB_PRIVATE_KEY, + dify_config.TIDB_REGION, + ) + new_tidb_auth_binding = TidbAuthBinding( + cluster_id=new_cluster["cluster_id"], + cluster_name=new_cluster["cluster_name"], + account=new_cluster["account"], + password=new_cluster["password"], + tenant_id=dataset.tenant_id, + active=True, + status="ACTIVE", + ) + db.session.add(new_tidb_auth_binding) + db.session.commit() + TIDB_ON_QDRANT_API_KEY = f"{new_tidb_auth_binding.account}:{new_tidb_auth_binding.password}" + + else: + TIDB_ON_QDRANT_API_KEY = f"{tidb_auth_binding.account}:{tidb_auth_binding.password}" + + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.TIDB_ON_QDRANT, collection_name)) + + config = current_app.config + + return TidbOnQdrantVector( + collection_name=collection_name, + group_id=dataset.id, + config=TidbOnQdrantConfig( + endpoint=dify_config.TIDB_ON_QDRANT_URL, + api_key=TIDB_ON_QDRANT_API_KEY, + root_path=config.root_path, + timeout=dify_config.TIDB_ON_QDRANT_CLIENT_TIMEOUT, + grpc_port=dify_config.TIDB_ON_QDRANT_GRPC_PORT, + prefer_grpc=dify_config.TIDB_ON_QDRANT_GRPC_ENABLED, + ), + ) + + def create_tidb_serverless_cluster(self, tidb_config: TidbConfig, display_name: str, region: str): + """ + Creates a new TiDB Serverless cluster. + :param tidb_config: The configuration for the TiDB Cloud API. + :param display_name: The user-friendly display name of the cluster (required). + :param region: The region where the cluster will be created (required). + + :return: The response from the API. + """ + region_object = { + "name": region, + } + + labels = { + "tidb.cloud/project": "1372813089454548012", + } + cluster_data = {"displayName": display_name, "region": region_object, "labels": labels} + + response = requests.post( + f"{tidb_config.api_url}/clusters", + json=cluster_data, + auth=HTTPDigestAuth(tidb_config.public_key, tidb_config.private_key), + ) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() + + def change_tidb_serverless_root_password(self, tidb_config: TidbConfig, cluster_id: str, new_password: str): + """ + Changes the root password of a specific TiDB Serverless cluster. + + :param tidb_config: The configuration for the TiDB Cloud API. + :param cluster_id: The ID of the cluster for which the password is to be changed (required). + :param new_password: The new password for the root user (required). + :return: The response from the API. + """ + + body = {"password": new_password} + + response = requests.put( + f"{tidb_config.api_url}/clusters/{cluster_id}/password", + json=body, + auth=HTTPDigestAuth(tidb_config.public_key, tidb_config.private_key), + ) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py new file mode 100644 index 0000000000..a6f3ad7fef --- /dev/null +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py @@ -0,0 +1,251 @@ +import time +import uuid + +import requests +from requests.auth import HTTPDigestAuth + +from configs import dify_config +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.dataset import TidbAuthBinding + + +class TidbService: + @staticmethod + def create_tidb_serverless_cluster( + project_id: str, api_url: str, iam_url: str, public_key: str, private_key: str, region: str + ): + """ + Creates a new TiDB Serverless cluster. + :param project_id: The project ID of the TiDB Cloud project (required). + :param api_url: The URL of the TiDB Cloud API (required). + :param iam_url: The URL of the TiDB Cloud IAM API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param display_name: The user-friendly display name of the cluster (required). + :param region: The region where the cluster will be created (required). + + :return: The response from the API. + """ + + region_object = { + "name": region, + } + + labels = { + "tidb.cloud/project": project_id, + } + + spending_limit = { + "monthly": dify_config.TIDB_SPEND_LIMIT, + } + password = str(uuid.uuid4()).replace("-", "")[:16] + display_name = str(uuid.uuid4()).replace("-", "")[:16] + cluster_data = { + "displayName": display_name, + "region": region_object, + "labels": labels, + "spendingLimit": spending_limit, + "rootPassword": password, + } + + response = requests.post(f"{api_url}/clusters", json=cluster_data, auth=HTTPDigestAuth(public_key, private_key)) + + if response.status_code == 200: + response_data = response.json() + cluster_id = response_data["clusterId"] + retry_count = 0 + max_retries = 30 + while retry_count < max_retries: + cluster_response = TidbService.get_tidb_serverless_cluster(api_url, public_key, private_key, cluster_id) + if cluster_response["state"] == "ACTIVE": + user_prefix = cluster_response["userPrefix"] + return { + "cluster_id": cluster_id, + "cluster_name": display_name, + "account": f"{user_prefix}.root", + "password": password, + } + time.sleep(30) # wait 30 seconds before retrying + retry_count += 1 + else: + response.raise_for_status() + + @staticmethod + def delete_tidb_serverless_cluster(api_url: str, public_key: str, private_key: str, cluster_id: str): + """ + Deletes a specific TiDB Serverless cluster. + + :param api_url: The URL of the TiDB Cloud API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param cluster_id: The ID of the cluster to be deleted (required). + :return: The response from the API. + """ + + response = requests.delete(f"{api_url}/clusters/{cluster_id}", auth=HTTPDigestAuth(public_key, private_key)) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() + + @staticmethod + def get_tidb_serverless_cluster(api_url: str, public_key: str, private_key: str, cluster_id: str): + """ + Deletes a specific TiDB Serverless cluster. + + :param api_url: The URL of the TiDB Cloud API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param cluster_id: The ID of the cluster to be deleted (required). + :return: The response from the API. + """ + + response = requests.get(f"{api_url}/clusters/{cluster_id}", auth=HTTPDigestAuth(public_key, private_key)) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() + + @staticmethod + def change_tidb_serverless_root_password( + api_url: str, public_key: str, private_key: str, cluster_id: str, account: str, new_password: str + ): + """ + Changes the root password of a specific TiDB Serverless cluster. + + :param api_url: The URL of the TiDB Cloud API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param cluster_id: The ID of the cluster for which the password is to be changed (required).+ + :param account: The account for which the password is to be changed (required). + :param new_password: The new password for the root user (required). + :return: The response from the API. + """ + + body = {"password": new_password, "builtinRole": "role_admin", "customRoles": []} + + response = requests.patch( + f"{api_url}/clusters/{cluster_id}/sqlUsers/{account}", + json=body, + auth=HTTPDigestAuth(public_key, private_key), + ) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() + + @staticmethod + def batch_update_tidb_serverless_cluster_status( + tidb_serverless_list: list[TidbAuthBinding], + project_id: str, + api_url: str, + iam_url: str, + public_key: str, + private_key: str, + ) -> list[dict]: + """ + Update the status of a new TiDB Serverless cluster. + :param project_id: The project ID of the TiDB Cloud project (required). + :param api_url: The URL of the TiDB Cloud API (required). + :param iam_url: The URL of the TiDB Cloud IAM API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param display_name: The user-friendly display name of the cluster (required). + :param region: The region where the cluster will be created (required). + + :return: The response from the API. + """ + clusters = [] + tidb_serverless_list_map = {item.cluster_id: item for item in tidb_serverless_list} + cluster_ids = [item.cluster_id for item in tidb_serverless_list] + params = {"clusterIds": cluster_ids, "view": "FULL"} + response = requests.get( + f"{api_url}/clusters:batchGet", params=params, auth=HTTPDigestAuth(public_key, private_key) + ) + + if response.status_code == 200: + response_data = response.json() + cluster_infos = [] + for item in response_data["clusters"]: + state = item["state"] + userPrefix = item["userPrefix"] + if state == "ACTIVE" and len(userPrefix) > 0: + cluster_info = tidb_serverless_list_map[item["clusterId"]] + cluster_info.status = "ACTIVE" + cluster_info.account = f"{userPrefix}.root" + db.session.add(cluster_info) + db.session.commit() + else: + response.raise_for_status() + + @staticmethod + def batch_create_tidb_serverless_cluster( + batch_size: int, project_id: str, api_url: str, iam_url: str, public_key: str, private_key: str, region: str + ) -> list[dict]: + """ + Creates a new TiDB Serverless cluster. + :param project_id: The project ID of the TiDB Cloud project (required). + :param api_url: The URL of the TiDB Cloud API (required). + :param iam_url: The URL of the TiDB Cloud IAM API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param display_name: The user-friendly display name of the cluster (required). + :param region: The region where the cluster will be created (required). + + :return: The response from the API. + """ + clusters = [] + for _ in range(batch_size): + region_object = { + "name": region, + } + + labels = { + "tidb.cloud/project": project_id, + } + + spending_limit = { + "monthly": dify_config.TIDB_SPEND_LIMIT, + } + password = str(uuid.uuid4()).replace("-", "")[:16] + display_name = str(uuid.uuid4()).replace("-", "") + cluster_data = { + "cluster": { + "displayName": display_name, + "region": region_object, + "labels": labels, + "spendingLimit": spending_limit, + "rootPassword": password, + } + } + cache_key = f"tidb_serverless_cluster_password:{display_name}" + redis_client.setex(cache_key, 3600, password) + clusters.append(cluster_data) + + request_body = {"requests": clusters} + response = requests.post( + f"{api_url}/clusters:batchCreate", json=request_body, auth=HTTPDigestAuth(public_key, private_key) + ) + + if response.status_code == 200: + response_data = response.json() + cluster_infos = [] + for item in response_data["clusters"]: + cache_key = f"tidb_serverless_cluster_password:{item['displayName']}" + password = redis_client.get(cache_key) + if not password: + continue + cluster_info = { + "cluster_id": item["clusterId"], + "cluster_name": item["displayName"], + "account": "root", + "password": password.decode("utf-8"), + } + cluster_infos.append(cluster_info) + return cluster_infos + else: + response.raise_for_status() diff --git a/api/core/rag/datasource/vdb/upstash/__init__.py b/api/core/rag/datasource/vdb/upstash/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/upstash/upstash_vector.py b/api/core/rag/datasource/vdb/upstash/upstash_vector.py new file mode 100644 index 0000000000..df1b550b40 --- /dev/null +++ b/api/core/rag/datasource/vdb/upstash/upstash_vector.py @@ -0,0 +1,129 @@ +import json +from typing import Any +from uuid import uuid4 + +from pydantic import BaseModel, model_validator +from upstash_vector import Index, Vector + +from configs import dify_config +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.embedding.embedding_base import Embeddings +from core.rag.models.document import Document +from models.dataset import Dataset + + +class UpstashVectorConfig(BaseModel): + url: str + token: str + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict) -> dict: + if not values["url"]: + raise ValueError("Upstash URL is required") + if not values["token"]: + raise ValueError("Upstash Token is required") + return values + + +class UpstashVector(BaseVector): + def __init__(self, collection_name: str, config: UpstashVectorConfig): + super().__init__(collection_name) + self._table_name = collection_name + self.index = Index(url=config.url, token=config.token) + + def _get_index_dimension(self) -> int: + index_info = self.index.info() + if index_info and index_info.dimension: + return index_info.dimension + else: + return 1536 + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + self.add_texts(texts, embeddings) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + vectors = [ + Vector( + id=str(uuid4()), + vector=embedding, + metadata=doc.metadata, + data=doc.page_content, + ) + for doc, embedding in zip(documents, embeddings) + ] + self.index.upsert(vectors=vectors) + + def text_exists(self, id: str) -> bool: + response = self.get_ids_by_metadata_field("doc_id", id) + return len(response) > 0 + + def delete_by_ids(self, ids: list[str]) -> None: + item_ids = [] + for doc_id in ids: + ids = self.get_ids_by_metadata_field("doc_id", doc_id) + if id: + item_ids += ids + self._delete_by_ids(ids=item_ids) + + def _delete_by_ids(self, ids: list[str]) -> None: + if ids: + self.index.delete(ids=ids) + + def get_ids_by_metadata_field(self, key: str, value: str) -> list[str]: + query_result = self.index.query( + vector=[1.001 * i for i in range(self._get_index_dimension())], + include_metadata=True, + top_k=1000, + filter=f"{key} = '{value}'", + ) + return [result.id for result in query_result] + + def delete_by_metadata_field(self, key: str, value: str) -> None: + ids = self.get_ids_by_metadata_field(key, value) + if ids: + self._delete_by_ids(ids) + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + top_k = kwargs.get("top_k", 4) + result = self.index.query(vector=query_vector, top_k=top_k, include_metadata=True, include_data=True) + docs = [] + score_threshold = float(kwargs.get("score_threshold") or 0.0) + for record in result: + metadata = record.metadata + text = record.data + score = record.score + metadata["score"] = score + if score > score_threshold: + docs.append(Document(page_content=text, metadata=metadata)) + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + return [] + + def delete(self) -> None: + self.index.reset() + + def get_type(self) -> str: + return VectorType.UPSTASH + + +class UpstashVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> UpstashVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix.lower() + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower() + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.UPSTASH, collection_name)) + + return UpstashVector( + collection_name=collection_name, + config=UpstashVectorConfig( + url=dify_config.UPSTASH_VECTOR_URL, + token=dify_config.UPSTASH_VECTOR_TOKEN, + ), + ) diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index fb956a16ed..c8cb007ae8 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -9,8 +9,9 @@ from core.rag.datasource.vdb.vector_type import VectorType from core.rag.embedding.cached_embedding import CacheEmbedding from core.rag.embedding.embedding_base import Embeddings from core.rag.models.document import Document +from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.dataset import Dataset +from models.dataset import Dataset, Whitelist class AbstractVectorFactory(ABC): @@ -35,8 +36,18 @@ class Vector: def _init_vector(self) -> BaseVector: vector_type = dify_config.VECTOR_STORE + if self._dataset.index_struct_dict: vector_type = self._dataset.index_struct_dict["type"] + else: + if dify_config.VECTOR_STORE_WHITELIST_ENABLE: + whitelist = ( + db.session.query(Whitelist) + .filter(Whitelist.tenant_id == self._dataset.tenant_id, Whitelist.category == "vector_db") + .one_or_none() + ) + if whitelist: + vector_type = VectorType.TIDB_ON_QDRANT if not vector_type: raise ValueError("Vector store must be specified.") @@ -103,6 +114,10 @@ class Vector: from core.rag.datasource.vdb.analyticdb.analyticdb_vector import AnalyticdbVectorFactory return AnalyticdbVectorFactory + case VectorType.COUCHBASE: + from core.rag.datasource.vdb.couchbase.couchbase_vector import CouchbaseVectorFactory + + return CouchbaseVectorFactory case VectorType.BAIDU: from core.rag.datasource.vdb.baidu.baidu_vector import BaiduVectorFactory @@ -111,6 +126,18 @@ class Vector: from core.rag.datasource.vdb.vikingdb.vikingdb_vector import VikingDBVectorFactory return VikingDBVectorFactory + case VectorType.UPSTASH: + from core.rag.datasource.vdb.upstash.upstash_vector import UpstashVectorFactory + + return UpstashVectorFactory + case VectorType.TIDB_ON_QDRANT: + from core.rag.datasource.vdb.tidb_on_qdrant.tidb_on_qdrant_vector import TidbOnQdrantVectorFactory + + return TidbOnQdrantVectorFactory + case VectorType.OCEANBASE: + from core.rag.datasource.vdb.oceanbase.oceanbase_vector import OceanBaseVectorFactory + + return OceanBaseVectorFactory case _: raise ValueError(f"Vector store {vector_type} is not supported.") diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index b4d604a080..e3b37ece88 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -16,5 +16,9 @@ class VectorType(str, Enum): TENCENT = "tencent" ORACLE = "oracle" ELASTICSEARCH = "elasticsearch" + COUCHBASE = "couchbase" BAIDU = "baidu" VIKINGDB = "vikingdb" + UPSTASH = "upstash" + TIDB_ON_QDRANT = "tidb_on_qdrant" + OCEANBASE = "oceanbase" diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index 706a42b735..a0b1aa4cef 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -105,7 +105,7 @@ class ExtractProcessor: extractor = PdfExtractor(file_path) elif file_extension in {".md", ".markdown"}: extractor = ( - UnstructuredMarkdownExtractor(file_path, unstructured_api_url) + UnstructuredMarkdownExtractor(file_path, unstructured_api_url, unstructured_api_key) if is_automatic else MarkdownExtractor(file_path, autodetect_encoding=True) ) @@ -116,17 +116,19 @@ class ExtractProcessor: elif file_extension == ".csv": extractor = CSVExtractor(file_path, autodetect_encoding=True) elif file_extension == ".msg": - extractor = UnstructuredMsgExtractor(file_path, unstructured_api_url) + extractor = UnstructuredMsgExtractor(file_path, unstructured_api_url, unstructured_api_key) elif file_extension == ".eml": - extractor = UnstructuredEmailExtractor(file_path, unstructured_api_url) + extractor = UnstructuredEmailExtractor(file_path, unstructured_api_url, unstructured_api_key) elif file_extension == ".ppt": extractor = UnstructuredPPTExtractor(file_path, unstructured_api_url, unstructured_api_key) + # You must first specify the API key + # because unstructured_api_key is necessary to parse .ppt documents elif file_extension == ".pptx": - extractor = UnstructuredPPTXExtractor(file_path, unstructured_api_url) + extractor = UnstructuredPPTXExtractor(file_path, unstructured_api_url, unstructured_api_key) elif file_extension == ".xml": - extractor = UnstructuredXmlExtractor(file_path, unstructured_api_url) + extractor = UnstructuredXmlExtractor(file_path, unstructured_api_url, unstructured_api_key) elif file_extension == ".epub": - extractor = UnstructuredEpubExtractor(file_path, unstructured_api_url) + extractor = UnstructuredEpubExtractor(file_path, unstructured_api_url, unstructured_api_key) else: # txt extractor = ( diff --git a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py index 34c6811b67..bd669bbad3 100644 --- a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py @@ -10,24 +10,26 @@ logger = logging.getLogger(__name__) class UnstructuredEmailExtractor(BaseExtractor): - """Load msg files. + """Load eml files. Args: file_path: Path to the file to load. """ - def __init__( - self, - file_path: str, - api_url: str, - ): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.email import partition_email + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_email(filename=self._file_path) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.email import partition_email + + elements = partition_email(filename=self._file_path) # noinspection PyBroadException try: diff --git a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py index a41ed3a558..35220b558a 100644 --- a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py @@ -19,15 +19,23 @@ class UnstructuredEpubExtractor(BaseExtractor): self, file_path: str, api_url: Optional[str] = None, + api_key: Optional[str] = None, ): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.epub import partition_epub + if self._api_url: + from unstructured.partition.api import partition_via_api + + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.epub import partition_epub + + elements = partition_epub(filename=self._file_path, xml_keep_tags=True) - elements = partition_epub(filename=self._file_path, xml_keep_tags=True) from unstructured.chunking.title import chunk_by_title chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) diff --git a/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py b/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py index fc3ff10693..4173d4d122 100644 --- a/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py @@ -24,19 +24,21 @@ class UnstructuredMarkdownExtractor(BaseExtractor): if the specified encoding fails. """ - def __init__( - self, - file_path: str, - api_url: str, - ): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.md import partition_md + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_md(filename=self._file_path) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.md import partition_md + + elements = partition_md(filename=self._file_path) from unstructured.chunking.title import chunk_by_title chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) diff --git a/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py b/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py index 8091e83e85..57affb8d36 100644 --- a/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py @@ -14,15 +14,21 @@ class UnstructuredMsgExtractor(BaseExtractor): file_path: Path to the file to load. """ - def __init__(self, file_path: str, api_url: str): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.msg import partition_msg + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_msg(filename=self._file_path) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.msg import partition_msg + + elements = partition_msg(filename=self._file_path) from unstructured.chunking.title import chunk_by_title chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) diff --git a/api/core/rag/extractor/unstructured/unstructured_pdf_extractor.py b/api/core/rag/extractor/unstructured/unstructured_pdf_extractor.py new file mode 100644 index 0000000000..dd8a979e70 --- /dev/null +++ b/api/core/rag/extractor/unstructured/unstructured_pdf_extractor.py @@ -0,0 +1,47 @@ +import logging + +from core.rag.extractor.extractor_base import BaseExtractor +from core.rag.models.document import Document + +logger = logging.getLogger(__name__) + + +class UnstructuredPDFExtractor(BaseExtractor): + """Load pdf files. + + + Args: + file_path: Path to the file to load. + + api_url: Unstructured API URL + + api_key: Unstructured API Key + """ + + def __init__(self, file_path: str, api_url: str, api_key: str): + """Initialize with file path.""" + self._file_path = file_path + self._api_url = api_url + self._api_key = api_key + + def extract(self) -> list[Document]: + if self._api_url: + from unstructured.partition.api import partition_via_api + + elements = partition_via_api( + filename=self._file_path, api_url=self._api_url, api_key=self._api_key, strategy="auto" + ) + else: + from unstructured.partition.pdf import partition_pdf + + elements = partition_pdf(filename=self._file_path, strategy="auto") + + from unstructured.chunking.title import chunk_by_title + + chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) + documents = [] + for chunk in chunks: + text = chunk.text.strip() + documents.append(Document(page_content=text)) + + return documents diff --git a/api/core/rag/extractor/unstructured/unstructured_ppt_extractor.py b/api/core/rag/extractor/unstructured/unstructured_ppt_extractor.py index b69394b3b1..0fdcd58b2e 100644 --- a/api/core/rag/extractor/unstructured/unstructured_ppt_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_ppt_extractor.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) class UnstructuredPPTExtractor(BaseExtractor): - """Load msg files. + """Load ppt files. Args: @@ -21,9 +21,12 @@ class UnstructuredPPTExtractor(BaseExtractor): self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.api import partition_via_api + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + raise NotImplementedError("Unstructured API Url is not configured") text_by_page = {} for element in elements: page = element.metadata.page_number diff --git a/api/core/rag/extractor/unstructured/unstructured_pptx_extractor.py b/api/core/rag/extractor/unstructured/unstructured_pptx_extractor.py index 6ed4a0dfb3..ab41290fbc 100644 --- a/api/core/rag/extractor/unstructured/unstructured_pptx_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_pptx_extractor.py @@ -7,22 +7,28 @@ logger = logging.getLogger(__name__) class UnstructuredPPTXExtractor(BaseExtractor): - """Load msg files. + """Load pptx files. Args: file_path: Path to the file to load. """ - def __init__(self, file_path: str, api_url: str): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.pptx import partition_pptx + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_pptx(filename=self._file_path) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.pptx import partition_pptx + + elements = partition_pptx(filename=self._file_path) text_by_page = {} for element in elements: page = element.metadata.page_number diff --git a/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py index 3bffc01fbf..ef46ab0e70 100644 --- a/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py @@ -7,22 +7,29 @@ logger = logging.getLogger(__name__) class UnstructuredXmlExtractor(BaseExtractor): - """Load msg files. + """Load xml files. Args: file_path: Path to the file to load. """ - def __init__(self, file_path: str, api_url: str): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.xml import partition_xml + if self._api_url: + from unstructured.partition.api import partition_via_api + + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.xml import partition_xml + + elements = partition_xml(filename=self._file_path, xml_keep_tags=True) - elements = partition_xml(filename=self._file_path, xml_keep_tags=True) from unstructured.chunking.title import chunk_by_title chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index a5375991b4..ae3c25125c 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -234,7 +234,7 @@ class WordExtractor(BaseExtractor): def parse_paragraph(paragraph): paragraph_content = [] for run in paragraph.runs: - if hasattr(run.element, "tag") and isinstance(element.tag, str) and run.element.tag.endswith("r"): + if hasattr(run.element, "tag") and isinstance(run.element.tag, str) and run.element.tag.endswith("r"): drawing_elements = run.element.findall( ".//{http://schemas.openxmlformats.org/wordprocessingml/2006/main}drawing" ) diff --git a/api/core/rag/rerank/rerank_model.py b/api/core/rag/rerank/rerank_model.py index 40ebf0befd..fc82b2080b 100644 --- a/api/core/rag/rerank/rerank_model.py +++ b/api/core/rag/rerank/rerank_model.py @@ -27,18 +27,17 @@ class RerankModelRunner(BaseRerankRunner): :return: """ docs = [] - doc_id = [] + doc_id = set() unique_documents = [] - dify_documents = [item for item in documents if item.provider == "dify"] - external_documents = [item for item in documents if item.provider == "external"] - for document in dify_documents: - if document.metadata["doc_id"] not in doc_id: - doc_id.append(document.metadata["doc_id"]) + for document in documents: + if document.provider == "dify" and document.metadata["doc_id"] not in doc_id: + doc_id.add(document.metadata["doc_id"]) docs.append(document.page_content) unique_documents.append(document) - for document in external_documents: - docs.append(document.page_content) - unique_documents.append(document) + elif document.provider == "external": + if document not in unique_documents: + docs.append(document.page_content) + unique_documents.append(document) documents = unique_documents diff --git a/api/core/rag/rerank/rerank_type.py b/api/core/rag/rerank/rerank_type.py index d4894e3cc6..d71eb2daa8 100644 --- a/api/core/rag/rerank/rerank_type.py +++ b/api/core/rag/rerank/rerank_type.py @@ -1,6 +1,6 @@ from enum import Enum -class RerankMode(Enum): +class RerankMode(str, Enum): RERANKING_MODEL = "reranking_model" WEIGHTED_SCORE = "weighted_score" diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index f344bc1e3e..04c9244263 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -22,6 +22,7 @@ from core.rag.datasource.keyword.jieba.jieba_keyword_table_handler import JiebaK from core.rag.datasource.retrieval_service import RetrievalService from core.rag.entities.context_entities import DocumentContext from core.rag.models.document import Document +from core.rag.rerank.rerank_type import RerankMode from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.rag.retrieval.router.multi_dataset_function_call_router import FunctionCallMultiDatasetRouter from core.rag.retrieval.router.multi_dataset_react_route import ReactMultiDatasetRouter @@ -359,10 +360,39 @@ class DatasetRetrieval: reranking_enable: bool = True, message_id: Optional[str] = None, ): + if not available_datasets: + return [] threads = [] all_documents = [] dataset_ids = [dataset.id for dataset in available_datasets] - index_type = None + index_type_check = all( + item.indexing_technique == available_datasets[0].indexing_technique for item in available_datasets + ) + if not index_type_check and (not reranking_enable or reranking_mode != RerankMode.RERANKING_MODEL): + raise ValueError( + "The configured knowledge base list have different indexing technique, please set reranking model." + ) + index_type = available_datasets[0].indexing_technique + if index_type == "high_quality": + embedding_model_check = all( + item.embedding_model == available_datasets[0].embedding_model for item in available_datasets + ) + embedding_model_provider_check = all( + item.embedding_model_provider == available_datasets[0].embedding_model_provider + for item in available_datasets + ) + if ( + reranking_enable + and reranking_mode == "weighted_score" + and (not embedding_model_check or not embedding_model_provider_check) + ): + raise ValueError( + "The configured knowledge base list have different embedding model, please set reranking model." + ) + if reranking_enable and reranking_mode == RerankMode.WEIGHTED_SCORE: + weights["vector_setting"]["embedding_provider_name"] = available_datasets[0].embedding_model_provider + weights["vector_setting"]["embedding_model_name"] = available_datasets[0].embedding_model + for dataset in available_datasets: index_type = dataset.indexing_technique retrieval_thread = threading.Thread( diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index e88748b353..83a2e8ef0f 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -308,11 +308,15 @@ class ToolManager: parameters = tool_entity.get_merged_runtime_parameters() for parameter in parameters: # check file types - if parameter.type in { - ToolParameter.ToolParameterType.SYSTEM_FILES, - ToolParameter.ToolParameterType.FILE, - ToolParameter.ToolParameterType.FILES, - }: + if ( + parameter.type + in { + ToolParameter.ToolParameterType.SYSTEM_FILES, + ToolParameter.ToolParameterType.FILE, + ToolParameter.ToolParameterType.FILES, + } + and parameter.required + ): raise ValueError(f"file type parameter {parameter.name} not supported in agent") if parameter.form == ToolParameter.ToolParameterForm.FORM: diff --git a/api/core/tools/utils/feishu_api_utils.py b/api/core/tools/utils/feishu_api_utils.py index 245b296d18..722cf4b538 100644 --- a/api/core/tools/utils/feishu_api_utils.py +++ b/api/core/tools/utils/feishu_api_utils.py @@ -1,3 +1,4 @@ +import json from typing import Optional import httpx @@ -17,6 +18,41 @@ def auth(credentials): raise ToolProviderCredentialValidationError(str(e)) +def convert_add_records(json_str): + try: + data = json.loads(json_str) + if not isinstance(data, list): + raise ValueError("Parsed data must be a list") + converted_data = [{"fields": json.dumps(item, ensure_ascii=False)} for item in data] + return converted_data + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + except Exception as e: + raise ValueError(f"An error occurred while processing the data: {e}") + + +def convert_update_records(json_str): + try: + data = json.loads(json_str) + if not isinstance(data, list): + raise ValueError("Parsed data must be a list") + + converted_data = [ + {"fields": json.dumps(record["fields"], ensure_ascii=False), "record_id": record["record_id"]} + for record in data + if "fields" in record and "record_id" in record + ] + + if len(converted_data) != len(data): + raise ValueError("Each record must contain 'fields' and 'record_id'") + + return converted_data + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + except Exception as e: + raise ValueError(f"An error occurred while processing the data: {e}") + + class FeishuRequest: API_BASE_URL = "https://lark-plugin-api.solutionsuite.cn/lark-plugin" @@ -517,3 +553,270 @@ class FeishuRequest: } res = self._send_request(url, method="GET", params=params) return res.get("data") + + def create_base( + self, + name: str, + folder_token: str, + ) -> dict: + # 创建多维表格 + url = f"{self.API_BASE_URL}/base/create_base" + payload = { + "name": name, + "folder_token": folder_token, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def add_records( + self, + app_token: str, + table_id: str, + table_name: str, + records: str, + user_id_type: str = "open_id", + ) -> dict: + # 新增多条记录 + url = f"{self.API_BASE_URL}/base/add_records" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + "user_id_type": user_id_type, + } + payload = { + "records": convert_add_records(records), + } + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def update_records( + self, + app_token: str, + table_id: str, + table_name: str, + records: str, + user_id_type: str, + ) -> dict: + # 更新多条记录 + url = f"{self.API_BASE_URL}/base/update_records" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + "user_id_type": user_id_type, + } + payload = { + "records": convert_update_records(records), + } + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def delete_records( + self, + app_token: str, + table_id: str, + table_name: str, + record_ids: str, + ) -> dict: + # 删除多条记录 + url = f"{self.API_BASE_URL}/base/delete_records" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + } + if not record_ids: + record_id_list = [] + else: + try: + record_id_list = json.loads(record_ids) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + payload = { + "records": record_id_list, + } + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def search_record( + self, + app_token: str, + table_id: str, + table_name: str, + view_id: str, + field_names: str, + sort: str, + filters: str, + page_token: str, + automatic_fields: bool = False, + user_id_type: str = "open_id", + page_size: int = 20, + ) -> dict: + # 查询记录,单次最多查询 500 行记录。 + url = f"{self.API_BASE_URL}/base/search_record" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + "user_id_type": user_id_type, + "page_token": page_token, + "page_size": page_size, + } + + if not field_names: + field_name_list = [] + else: + try: + field_name_list = json.loads(field_names) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + if not sort: + sort_list = [] + else: + try: + sort_list = json.loads(sort) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + if not filters: + filter_dict = {} + else: + try: + filter_dict = json.loads(filters) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + payload = {} + + if view_id: + payload["view_id"] = view_id + if field_names: + payload["field_names"] = field_name_list + if sort: + payload["sort"] = sort_list + if filters: + payload["filter"] = filter_dict + if automatic_fields: + payload["automatic_fields"] = automatic_fields + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def get_base_info( + self, + app_token: str, + ) -> dict: + # 获取多维表格元数据 + url = f"{self.API_BASE_URL}/base/get_base_info" + params = { + "app_token": app_token, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def create_table( + self, + app_token: str, + table_name: str, + default_view_name: str, + fields: str, + ) -> dict: + # 新增一个数据表 + url = f"{self.API_BASE_URL}/base/create_table" + params = { + "app_token": app_token, + } + if not fields: + fields_list = [] + else: + try: + fields_list = json.loads(fields) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + payload = { + "name": table_name, + "fields": fields_list, + } + if default_view_name: + payload["default_view_name"] = default_view_name + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def delete_tables( + self, + app_token: str, + table_ids: str, + table_names: str, + ) -> dict: + # 删除多个数据表 + url = f"{self.API_BASE_URL}/base/delete_tables" + params = { + "app_token": app_token, + } + if not table_ids: + table_id_list = [] + else: + try: + table_id_list = json.loads(table_ids) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + if not table_names: + table_name_list = [] + else: + try: + table_name_list = json.loads(table_names) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + payload = { + "table_ids": table_id_list, + "table_names": table_name_list, + } + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def list_tables( + self, + app_token: str, + page_token: str, + page_size: int = 20, + ) -> dict: + # 列出多维表格下的全部数据表 + url = f"{self.API_BASE_URL}/base/list_tables" + params = { + "app_token": app_token, + "page_token": page_token, + "page_size": page_size, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def read_records( + self, + app_token: str, + table_id: str, + table_name: str, + record_ids: str, + user_id_type: str = "open_id", + ) -> dict: + url = f"{self.API_BASE_URL}/base/read_records" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + } + if not record_ids: + record_id_list = [] + else: + try: + record_id_list = json.loads(record_ids) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + payload = { + "record_ids": record_id_list, + "user_id_type": user_id_type, + } + res = self._send_request(url, method="GET", params=params, payload=payload) + return res.get("data") diff --git a/api/core/variables/segments.py b/api/core/variables/segments.py index 782798411e..b71882b043 100644 --- a/api/core/variables/segments.py +++ b/api/core/variables/segments.py @@ -56,15 +56,15 @@ class NoneSegment(Segment): @property def text(self) -> str: - return "null" + return "" @property def log(self) -> str: - return "null" + return "" @property def markdown(self) -> str: - return "null" + return "" class StringSegment(Segment): diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/entities/variable_pool.py index f8968990d4..3dc3395da1 100644 --- a/api/core/workflow/entities/variable_pool.py +++ b/api/core/workflow/entities/variable_pool.py @@ -124,11 +124,15 @@ class VariablePool(BaseModel): if value is None: selector, attr = selector[:-1], selector[-1] + # Python support `attr in FileAttribute` after 3.12 + if attr not in {item.value for item in FileAttribute}: + return None value = self.get(selector) - if isinstance(value, FileSegment): - attr = FileAttribute(attr) - attr_value = file_manager.get_attr(file=value.value, attr=attr) - return variable_factory.build_segment(attr_value) + if not isinstance(value, FileSegment): + return None + attr = FileAttribute(attr) + attr_value = file_manager.get_attr(file=value.value, attr=attr) + return variable_factory.build_segment(attr_value) return value diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index ada0b14ce4..8f58af00ef 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -130,15 +130,14 @@ class GraphEngine: yield GraphRunStartedEvent() try: - stream_processor_cls: type[AnswerStreamProcessor | EndStreamProcessor] if self.init_params.workflow_type == WorkflowType.CHAT: - stream_processor_cls = AnswerStreamProcessor + stream_processor = AnswerStreamProcessor( + graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool + ) else: - stream_processor_cls = EndStreamProcessor - - stream_processor = stream_processor_cls( - graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool - ) + stream_processor = EndStreamProcessor( + graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool + ) # run graph generator = stream_processor.process(self._run(start_node_id=self.graph.root_node_id)) diff --git a/api/core/workflow/nodes/answer/answer_stream_generate_router.py b/api/core/workflow/nodes/answer/answer_stream_generate_router.py index bce28c5fcb..96e24a7db3 100644 --- a/api/core/workflow/nodes/answer/answer_stream_generate_router.py +++ b/api/core/workflow/nodes/answer/answer_stream_generate_router.py @@ -149,10 +149,11 @@ class AnswerStreamGeneratorRouter: source_node_id = edge.source_node_id source_node_type = node_id_config_mapping[source_node_id].get("data", {}).get("type") if source_node_type in { - NodeType.ANSWER.value, - NodeType.IF_ELSE.value, - NodeType.QUESTION_CLASSIFIER.value, - NodeType.ITERATION.value, + NodeType.ANSWER, + NodeType.IF_ELSE, + NodeType.QUESTION_CLASSIFIER, + NodeType.ITERATION, + NodeType.CONVERSATION_VARIABLE_ASSIGNER, }: answer_dependencies[answer_node_id].append(source_node_id) else: diff --git a/api/core/workflow/nodes/answer/answer_stream_processor.py b/api/core/workflow/nodes/answer/answer_stream_processor.py index e3889941ca..8a768088da 100644 --- a/api/core/workflow/nodes/answer/answer_stream_processor.py +++ b/api/core/workflow/nodes/answer/answer_stream_processor.py @@ -22,7 +22,7 @@ class AnswerStreamProcessor(StreamProcessor): super().__init__(graph, variable_pool) self.generate_routes = graph.answer_stream_generate_routes self.route_position = {} - for answer_node_id, route_chunks in self.generate_routes.answer_generate_route.items(): + for answer_node_id in self.generate_routes.answer_generate_route: self.route_position[answer_node_id] = 0 self.current_stream_chunk_generating_node_ids: dict[str, list[str]] = {} diff --git a/api/core/workflow/nodes/answer/entities.py b/api/core/workflow/nodes/answer/entities.py index e543d02dd7..a05cc44c99 100644 --- a/api/core/workflow/nodes/answer/entities.py +++ b/api/core/workflow/nodes/answer/entities.py @@ -1,3 +1,4 @@ +from collections.abc import Sequence from enum import Enum from pydantic import BaseModel, Field @@ -32,7 +33,7 @@ class VarGenerateRouteChunk(GenerateRouteChunk): type: GenerateRouteChunk.ChunkType = GenerateRouteChunk.ChunkType.VAR """generate route chunk type""" - value_selector: list[str] = Field(..., description="value selector") + value_selector: Sequence[str] = Field(..., description="value selector") class TextGenerateRouteChunk(GenerateRouteChunk): diff --git a/api/core/workflow/nodes/document_extractor/node.py b/api/core/workflow/nodes/document_extractor/node.py index 3efcc373b1..c2f51ad1e5 100644 --- a/api/core/workflow/nodes/document_extractor/node.py +++ b/api/core/workflow/nodes/document_extractor/node.py @@ -1,9 +1,11 @@ import csv import io +import json import docx import pandas as pd import pypdfium2 +import yaml from unstructured.partition.email import partition_email from unstructured.partition.epub import partition_epub from unstructured.partition.msg import partition_msg @@ -75,36 +77,66 @@ class DocumentExtractorNode(BaseNode[DocumentExtractorNodeData]): ) -def _extract_text(*, file_content: bytes, mime_type: str) -> str: +def _extract_text_by_mime_type(*, file_content: bytes, mime_type: str) -> str: """Extract text from a file based on its MIME type.""" - if mime_type.startswith("text/plain") or mime_type in {"text/html", "text/htm", "text/markdown", "text/xml"}: - return _extract_text_from_plain_text(file_content) - elif mime_type == "application/pdf": - return _extract_text_from_pdf(file_content) - elif mime_type in { - "application/vnd.openxmlformats-officedocument.wordprocessingml.document", - "application/msword", - }: - return _extract_text_from_doc(file_content) - elif mime_type == "text/csv": - return _extract_text_from_csv(file_content) - elif mime_type in { - "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", - "application/vnd.ms-excel", - }: - return _extract_text_from_excel(file_content) - elif mime_type == "application/vnd.ms-powerpoint": - return _extract_text_from_ppt(file_content) - elif mime_type == "application/vnd.openxmlformats-officedocument.presentationml.presentation": - return _extract_text_from_pptx(file_content) - elif mime_type == "application/epub+zip": - return _extract_text_from_epub(file_content) - elif mime_type == "message/rfc822": - return _extract_text_from_eml(file_content) - elif mime_type == "application/vnd.ms-outlook": - return _extract_text_from_msg(file_content) - else: - raise UnsupportedFileTypeError(f"Unsupported MIME type: {mime_type}") + match mime_type: + case "text/plain" | "text/html" | "text/htm" | "text/markdown" | "text/xml": + return _extract_text_from_plain_text(file_content) + case "application/pdf": + return _extract_text_from_pdf(file_content) + case "application/vnd.openxmlformats-officedocument.wordprocessingml.document" | "application/msword": + return _extract_text_from_doc(file_content) + case "text/csv": + return _extract_text_from_csv(file_content) + case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" | "application/vnd.ms-excel": + return _extract_text_from_excel(file_content) + case "application/vnd.ms-powerpoint": + return _extract_text_from_ppt(file_content) + case "application/vnd.openxmlformats-officedocument.presentationml.presentation": + return _extract_text_from_pptx(file_content) + case "application/epub+zip": + return _extract_text_from_epub(file_content) + case "message/rfc822": + return _extract_text_from_eml(file_content) + case "application/vnd.ms-outlook": + return _extract_text_from_msg(file_content) + case "application/json": + return _extract_text_from_json(file_content) + case "application/x-yaml" | "text/yaml": + return _extract_text_from_yaml(file_content) + case _: + raise UnsupportedFileTypeError(f"Unsupported MIME type: {mime_type}") + + +def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str) -> str: + """Extract text from a file based on its file extension.""" + match file_extension: + case ".txt" | ".markdown" | ".md" | ".html" | ".htm" | ".xml": + return _extract_text_from_plain_text(file_content) + case ".json": + return _extract_text_from_json(file_content) + case ".yaml" | ".yml": + return _extract_text_from_yaml(file_content) + case ".pdf": + return _extract_text_from_pdf(file_content) + case ".doc" | ".docx": + return _extract_text_from_doc(file_content) + case ".csv": + return _extract_text_from_csv(file_content) + case ".xls" | ".xlsx": + return _extract_text_from_excel(file_content) + case ".ppt": + return _extract_text_from_ppt(file_content) + case ".pptx": + return _extract_text_from_pptx(file_content) + case ".epub": + return _extract_text_from_epub(file_content) + case ".eml": + return _extract_text_from_eml(file_content) + case ".msg": + return _extract_text_from_msg(file_content) + case _: + raise UnsupportedFileTypeError(f"Unsupported Extension Type: {file_extension}") def _extract_text_from_plain_text(file_content: bytes) -> str: @@ -114,6 +146,23 @@ def _extract_text_from_plain_text(file_content: bytes) -> str: raise TextExtractionError("Failed to decode plain text file") from e +def _extract_text_from_json(file_content: bytes) -> str: + try: + json_data = json.loads(file_content.decode("utf-8")) + return json.dumps(json_data, indent=2, ensure_ascii=False) + except (UnicodeDecodeError, json.JSONDecodeError) as e: + raise TextExtractionError(f"Failed to decode or parse JSON file: {e}") from e + + +def _extract_text_from_yaml(file_content: bytes) -> str: + """Extract the content from yaml file""" + try: + yaml_data = yaml.safe_load_all(file_content.decode("utf-8")) + return yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False) + except (UnicodeDecodeError, yaml.YAMLError) as e: + raise TextExtractionError(f"Failed to decode or parse YAML file: {e}") from e + + def _extract_text_from_pdf(file_content: bytes) -> str: try: pdf_file = io.BytesIO(file_content) @@ -156,10 +205,13 @@ def _download_file_content(file: File) -> bytes: def _extract_text_from_file(file: File): - if file.mime_type is None: - raise UnsupportedFileTypeError("Unable to determine file type: MIME type is missing") file_content = _download_file_content(file) - extracted_text = _extract_text(file_content=file_content, mime_type=file.mime_type) + if file.extension: + extracted_text = _extract_text_by_file_extension(file_content=file_content, file_extension=file.extension) + elif file.mime_type: + extracted_text = _extract_text_by_mime_type(file_content=file_content, mime_type=file.mime_type) + else: + raise UnsupportedFileTypeError("Unable to determine file type: MIME type or file extension is missing") return extracted_text @@ -172,7 +224,7 @@ def _extract_text_from_csv(file_content: bytes) -> str: if not rows: return "" - # Create markdown table + # Create Markdown table markdown_table = "| " + " | ".join(rows[0]) + " |\n" markdown_table += "| " + " | ".join(["---"] * len(rows[0])) + " |\n" for row in rows[1:]: @@ -192,7 +244,7 @@ def _extract_text_from_excel(file_content: bytes) -> str: # Drop rows where all elements are NaN df.dropna(how="all", inplace=True) - # Convert DataFrame to markdown table + # Convert DataFrame to Markdown table markdown_table = df.to_markdown(index=False) return markdown_table except Exception as e: diff --git a/api/core/workflow/nodes/http_request/entities.py b/api/core/workflow/nodes/http_request/entities.py index dec76a277e..36ded104c1 100644 --- a/api/core/workflow/nodes/http_request/entities.py +++ b/api/core/workflow/nodes/http_request/entities.py @@ -94,7 +94,7 @@ class Response: @property def is_file(self): content_type = self.content_type - content_disposition = self.response.headers.get("Content-Disposition", "") + content_disposition = self.response.headers.get("content-disposition", "") return "attachment" in content_disposition or ( not any(non_file in content_type for non_file in NON_FILE_CONTENT_TYPES) @@ -103,7 +103,7 @@ class Response: @property def content_type(self) -> str: - return self.headers.get("Content-Type", "") + return self.headers.get("content-type", "") @property def text(self) -> str: diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index 0270d7e0fd..6872478299 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -33,7 +33,7 @@ class Executor: params: Mapping[str, str] | None content: str | bytes | None data: Mapping[str, Any] | None - files: Mapping[str, bytes] | None + files: Mapping[str, tuple[str | None, bytes, str]] | None json: Any headers: dict[str, str] auth: HttpRequestNodeAuthorization @@ -141,7 +141,11 @@ class Executor: files = {k: self.variable_pool.get_file(selector) for k, selector in file_selectors.items()} files = {k: v for k, v in files.items() if v is not None} files = {k: variable.value for k, variable in files.items()} - files = {k: file_manager.download(v) for k, v in files.items() if v.related_id is not None} + files = { + k: (v.filename, file_manager.download(v), v.mime_type or "application/octet-stream") + for k, v in files.items() + if v.related_id is not None + } self.data = form_data self.files = files diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 483d0e2b7e..a037bee665 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -142,10 +142,11 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): Extract files from response """ files = [] + is_file = response.is_file content_type = response.content_type content = response.content - if content_type: + if is_file and content_type: # extract filename from url filename = path.basename(url) # extract extension if possible diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 94aa8c5eab..b4728e6abf 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -127,9 +127,10 @@ class LLMNode(BaseNode[LLMNodeData]): context=context, memory=memory, model_config=model_config, - vision_detail=self.node_data.vision.configs.detail, prompt_template=self.node_data.prompt_template, memory_config=self.node_data.memory, + vision_enabled=self.node_data.vision.enabled, + vision_detail=self.node_data.vision.configs.detail, ) process_data = { @@ -326,7 +327,7 @@ class LLMNode(BaseNode[LLMNodeData]): if variable is None: raise ValueError(f"Variable {variable_selector.variable} not found") if isinstance(variable, NoneSegment): - continue + inputs[variable_selector.variable] = "" inputs[variable_selector.variable] = variable.to_object() memory = node_data.memory @@ -348,13 +349,11 @@ class LLMNode(BaseNode[LLMNodeData]): variable = self.graph_runtime_state.variable_pool.get(selector) if variable is None: return [] - if isinstance(variable, FileSegment): + elif isinstance(variable, FileSegment): return [variable.value] - if isinstance(variable, ArrayFileSegment): + elif isinstance(variable, ArrayFileSegment): return variable.value - # FIXME: Temporary fix for empty array, - # all variables added to variable pool should be a Segment instance. - if isinstance(variable, ArrayAnySegment) and len(variable.value) == 0: + elif isinstance(variable, NoneSegment | ArrayAnySegment): return [] raise ValueError(f"Invalid variable type: {type(variable)}") @@ -518,6 +517,7 @@ class LLMNode(BaseNode[LLMNodeData]): model_config: ModelConfigWithCredentialsEntity, prompt_template: Sequence[LLMNodeChatModelMessage] | LLMNodeCompletionModelPromptTemplate, memory_config: MemoryConfig | None = None, + vision_enabled: bool = False, vision_detail: ImagePromptMessageContent.DETAIL, ) -> tuple[list[PromptMessage], Optional[list[str]]]: inputs = inputs or {} @@ -542,6 +542,10 @@ class LLMNode(BaseNode[LLMNodeData]): if not isinstance(prompt_message.content, str): prompt_message_content = [] for content_item in prompt_message.content or []: + # Skip image if vision is disabled + if not vision_enabled and content_item.type == PromptMessageContentType.IMAGE: + continue + if isinstance(content_item, ImagePromptMessageContent): # Override vision config if LLM node has vision config, # cuz vision detail is related to the configuration from FileUpload feature. diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index e6af453dcf..ee160e7c69 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -88,6 +88,7 @@ class QuestionClassifierNode(LLMNode): memory=memory, model_config=model_config, files=files, + vision_enabled=node_data.vision.enabled, vision_detail=node_data.vision.configs.detail, ) diff --git a/api/core/workflow/utils/condition/entities.py b/api/core/workflow/utils/condition/entities.py index 1d96743879..799c735f54 100644 --- a/api/core/workflow/utils/condition/entities.py +++ b/api/core/workflow/utils/condition/entities.py @@ -25,6 +25,9 @@ SupportedComparisonOperator = Literal[ "≤", "null", "not null", + # for file + "exists", + "not exists", ] diff --git a/api/core/workflow/utils/condition/processor.py b/api/core/workflow/utils/condition/processor.py index f4a80fa5e1..19473f39d2 100644 --- a/api/core/workflow/utils/condition/processor.py +++ b/api/core/workflow/utils/condition/processor.py @@ -2,7 +2,7 @@ from collections.abc import Sequence from typing import Any, Literal from core.file import FileAttribute, file_manager -from core.variables.segments import ArrayFileSegment +from core.variables import ArrayFileSegment from core.workflow.entities.variable_pool import VariablePool from .entities import Condition, SubCondition, SupportedComparisonOperator @@ -21,6 +21,8 @@ class ConditionProcessor: for condition in conditions: variable = variable_pool.get(condition.variable_selector) + if variable is None: + raise ValueError(f"Variable {condition.variable_selector} not found") if isinstance(variable, ArrayFileSegment) and condition.comparison_operator in { "contains", @@ -35,6 +37,15 @@ class ConditionProcessor: sub_conditions=condition.sub_variable_condition.conditions, operator=condition.sub_variable_condition.logical_operator, ) + elif condition.comparison_operator in { + "exists", + "not exists", + }: + result = _evaluate_condition( + value=variable.value, + operator=condition.comparison_operator, + expected=None, + ) else: actual_value = variable.value if variable else None expected_value = condition.value @@ -103,6 +114,10 @@ def _evaluate_condition( return _assert_not_in(value=value, expected=expected) case "all of" if isinstance(expected, list): return _assert_all_of(value=value, expected=expected) + case "exists": + return _assert_exists(value=value) + case "not exists": + return _assert_not_exists(value=value) case _: raise ValueError(f"Unsupported operator: {operator}") @@ -338,6 +353,14 @@ def _assert_all_of(*, value: Any, expected: Sequence[str]) -> bool: return True +def _assert_exists(*, value: Any) -> bool: + return value is not None + + +def _assert_not_exists(*, value: Any) -> bool: + return value is None + + def _process_sub_conditions( variable: ArrayFileSegment, sub_conditions: Sequence[SubCondition], diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index b9b019373d..504899c276 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -1,6 +1,7 @@ from datetime import timedelta from celery import Celery, Task +from celery.schedules import crontab from flask import Flask from configs import dify_config @@ -55,6 +56,8 @@ def init_app(app: Flask) -> Celery: imports = [ "schedule.clean_embedding_cache_task", "schedule.clean_unused_datasets_task", + "schedule.create_tidb_serverless_task", + "schedule.update_tidb_serverless_status_task", ] day = dify_config.CELERY_BEAT_SCHEDULER_TIME beat_schedule = { @@ -66,6 +69,14 @@ def init_app(app: Flask) -> Celery: "task": "schedule.clean_unused_datasets_task.clean_unused_datasets_task", "schedule": timedelta(days=day), }, + "create_tidb_serverless_task": { + "task": "schedule.create_tidb_serverless_task.create_tidb_serverless_task", + "schedule": crontab(minute="0", hour="*"), + }, + "update_tidb_serverless_status_task": { + "task": "schedule.update_tidb_serverless_status_task.update_tidb_serverless_status_task", + "schedule": crontab(minute="30", hour="*"), + }, } celery_app.conf.update(beat_schedule=beat_schedule, imports=imports) diff --git a/api/extensions/ext_logging.py b/api/extensions/ext_logging.py index 9e1a241b67..56b1d6bd28 100644 --- a/api/extensions/ext_logging.py +++ b/api/extensions/ext_logging.py @@ -17,8 +17,8 @@ def init_app(app: Flask): log_handlers = [ RotatingFileHandler( filename=log_file, - maxBytes=1024 * 1024 * 1024, - backupCount=5, + maxBytes=dify_config.LOG_FILE_MAX_SIZE * 1024 * 1024, + backupCount=dify_config.LOG_FILE_BACKUP_COUNT, ), logging.StreamHandler(sys.stdout), ] diff --git a/api/extensions/storage/aliyun_oss_storage.py b/api/extensions/storage/aliyun_oss_storage.py index 01c1000e50..67635b129e 100644 --- a/api/extensions/storage/aliyun_oss_storage.py +++ b/api/extensions/storage/aliyun_oss_storage.py @@ -36,12 +36,9 @@ class AliyunOssStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - obj = self.client.get_object(self.__wrapper_folder_filename(filename)) - while chunk := obj.read(4096): - yield chunk - - return generate() + obj = self.client.get_object(self.__wrapper_folder_filename(filename)) + while chunk := obj.read(4096): + yield chunk def download(self, filename, target_filepath): self.client.get_object_to_file(self.__wrapper_folder_filename(filename), target_filepath) diff --git a/api/extensions/storage/aws_s3_storage.py b/api/extensions/storage/aws_s3_storage.py index cb67313bb2..ab2d0fba3b 100644 --- a/api/extensions/storage/aws_s3_storage.py +++ b/api/extensions/storage/aws_s3_storage.py @@ -62,17 +62,14 @@ class AwsS3Storage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - try: - response = self.client.get_object(Bucket=self.bucket_name, Key=filename) - yield from response["Body"].iter_chunks() - except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": - raise FileNotFoundError("File not found") - else: - raise - - return generate() + try: + response = self.client.get_object(Bucket=self.bucket_name, Key=filename) + yield from response["Body"].iter_chunks() + except ClientError as ex: + if ex.response["Error"]["Code"] == "NoSuchKey": + raise FileNotFoundError("File not found") + else: + raise def download(self, filename, target_filepath): self.client.download_file(self.bucket_name, filename, target_filepath) diff --git a/api/extensions/storage/azure_blob_storage.py b/api/extensions/storage/azure_blob_storage.py index 477507feda..11a7544274 100644 --- a/api/extensions/storage/azure_blob_storage.py +++ b/api/extensions/storage/azure_blob_storage.py @@ -32,13 +32,9 @@ class AzureBlobStorage(BaseStorage): def load_stream(self, filename: str) -> Generator: client = self._sync_client() - - def generate(filename: str = filename) -> Generator: - blob = client.get_blob_client(container=self.bucket_name, blob=filename) - blob_data = blob.download_blob() - yield from blob_data.chunks() - - return generate(filename) + blob = client.get_blob_client(container=self.bucket_name, blob=filename) + blob_data = blob.download_blob() + yield from blob_data.chunks() def download(self, filename, target_filepath): client = self._sync_client() diff --git a/api/extensions/storage/baidu_obs_storage.py b/api/extensions/storage/baidu_obs_storage.py index cd69439749..e0d2140e91 100644 --- a/api/extensions/storage/baidu_obs_storage.py +++ b/api/extensions/storage/baidu_obs_storage.py @@ -39,12 +39,9 @@ class BaiduObsStorage(BaseStorage): return response.data.read() def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - response = self.client.get_object(bucket_name=self.bucket_name, key=filename).data - while chunk := response.read(4096): - yield chunk - - return generate() + response = self.client.get_object(bucket_name=self.bucket_name, key=filename).data + while chunk := response.read(4096): + yield chunk def download(self, filename, target_filepath): self.client.get_object_to_file(bucket_name=self.bucket_name, key=filename, file_name=target_filepath) diff --git a/api/extensions/storage/google_cloud_storage.py b/api/extensions/storage/google_cloud_storage.py index e90392a6ba..26b662d2f0 100644 --- a/api/extensions/storage/google_cloud_storage.py +++ b/api/extensions/storage/google_cloud_storage.py @@ -39,14 +39,11 @@ class GoogleCloudStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - bucket = self.client.get_bucket(self.bucket_name) - blob = bucket.get_blob(filename) - with blob.open(mode="rb") as blob_stream: - while chunk := blob_stream.read(4096): - yield chunk - - return generate() + bucket = self.client.get_bucket(self.bucket_name) + blob = bucket.get_blob(filename) + with blob.open(mode="rb") as blob_stream: + while chunk := blob_stream.read(4096): + yield chunk def download(self, filename, target_filepath): bucket = self.client.get_bucket(self.bucket_name) diff --git a/api/extensions/storage/huawei_obs_storage.py b/api/extensions/storage/huawei_obs_storage.py index 3c443d87ac..20be70ef83 100644 --- a/api/extensions/storage/huawei_obs_storage.py +++ b/api/extensions/storage/huawei_obs_storage.py @@ -27,12 +27,9 @@ class HuaweiObsStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - response = self.client.getObject(bucketName=self.bucket_name, objectKey=filename)["body"].response - while chunk := response.read(4096): - yield chunk - - return generate() + response = self.client.getObject(bucketName=self.bucket_name, objectKey=filename)["body"].response + while chunk := response.read(4096): + yield chunk def download(self, filename, target_filepath): self.client.getObject(bucketName=self.bucket_name, objectKey=filename, downloadPath=target_filepath) diff --git a/api/extensions/storage/local_fs_storage.py b/api/extensions/storage/local_fs_storage.py index e458b3ce8a..5a495ca4d4 100644 --- a/api/extensions/storage/local_fs_storage.py +++ b/api/extensions/storage/local_fs_storage.py @@ -19,68 +19,44 @@ class LocalFsStorage(BaseStorage): folder = os.path.join(current_app.root_path, folder) self.folder = folder - def save(self, filename, data): + def _build_filepath(self, filename: str) -> str: + """Build the full file path based on the folder and filename.""" if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename + return self.folder + filename else: - filename = self.folder + "/" + filename + return self.folder + "/" + filename - folder = os.path.dirname(filename) + def save(self, filename, data): + filepath = self._build_filepath(filename) + folder = os.path.dirname(filepath) os.makedirs(folder, exist_ok=True) - - Path(os.path.join(os.getcwd(), filename)).write_bytes(data) + Path(os.path.join(os.getcwd(), filepath)).write_bytes(data) def load_once(self, filename: str) -> bytes: - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - - if not os.path.exists(filename): + filepath = self._build_filepath(filename) + if not os.path.exists(filepath): raise FileNotFoundError("File not found") - - data = Path(filename).read_bytes() - return data + return Path(filepath).read_bytes() def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - - if not os.path.exists(filename): - raise FileNotFoundError("File not found") - - with open(filename, "rb") as f: - while chunk := f.read(4096): # Read in chunks of 4KB - yield chunk - - return generate() + filepath = self._build_filepath(filename) + if not os.path.exists(filepath): + raise FileNotFoundError("File not found") + with open(filepath, "rb") as f: + while chunk := f.read(4096): # Read in chunks of 4KB + yield chunk def download(self, filename, target_filepath): - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - - if not os.path.exists(filename): + filepath = self._build_filepath(filename) + if not os.path.exists(filepath): raise FileNotFoundError("File not found") - - shutil.copyfile(filename, target_filepath) + shutil.copyfile(filepath, target_filepath) def exists(self, filename): - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - - return os.path.exists(filename) + filepath = self._build_filepath(filename) + return os.path.exists(filepath) def delete(self, filename): - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - if os.path.exists(filename): - os.remove(filename) + filepath = self._build_filepath(filename) + if os.path.exists(filepath): + os.remove(filepath) diff --git a/api/extensions/storage/oracle_oci_storage.py b/api/extensions/storage/oracle_oci_storage.py index e4f50b34e9..b59f83b8de 100644 --- a/api/extensions/storage/oracle_oci_storage.py +++ b/api/extensions/storage/oracle_oci_storage.py @@ -36,17 +36,14 @@ class OracleOCIStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - try: - response = self.client.get_object(Bucket=self.bucket_name, Key=filename) - yield from response["Body"].iter_chunks() - except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": - raise FileNotFoundError("File not found") - else: - raise - - return generate() + try: + response = self.client.get_object(Bucket=self.bucket_name, Key=filename) + yield from response["Body"].iter_chunks() + except ClientError as ex: + if ex.response["Error"]["Code"] == "NoSuchKey": + raise FileNotFoundError("File not found") + else: + raise def download(self, filename, target_filepath): self.client.download_file(self.bucket_name, filename, target_filepath) diff --git a/api/extensions/storage/supabase_storage.py b/api/extensions/storage/supabase_storage.py index 1119244574..9f7c69a9ae 100644 --- a/api/extensions/storage/supabase_storage.py +++ b/api/extensions/storage/supabase_storage.py @@ -36,17 +36,14 @@ class SupabaseStorage(BaseStorage): return content def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - result = self.client.storage.from_(self.bucket_name).download(filename) - byte_stream = io.BytesIO(result) - while chunk := byte_stream.read(4096): # Read in chunks of 4KB - yield chunk - - return generate() + result = self.client.storage.from_(self.bucket_name).download(filename) + byte_stream = io.BytesIO(result) + while chunk := byte_stream.read(4096): # Read in chunks of 4KB + yield chunk def download(self, filename, target_filepath): result = self.client.storage.from_(self.bucket_name).download(filename) - Path(result).write_bytes(result) + Path(target_filepath).write_bytes(result) def exists(self, filename): result = self.client.storage.from_(self.bucket_name).list(filename) diff --git a/api/extensions/storage/tencent_cos_storage.py b/api/extensions/storage/tencent_cos_storage.py index 8fd8e703a1..13a6c9239c 100644 --- a/api/extensions/storage/tencent_cos_storage.py +++ b/api/extensions/storage/tencent_cos_storage.py @@ -29,11 +29,8 @@ class TencentCosStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - response = self.client.get_object(Bucket=self.bucket_name, Key=filename) - yield from response["Body"].get_stream(chunk_size=4096) - - return generate() + response = self.client.get_object(Bucket=self.bucket_name, Key=filename) + yield from response["Body"].get_stream(chunk_size=4096) def download(self, filename, target_filepath): response = self.client.get_object(Bucket=self.bucket_name, Key=filename) diff --git a/api/extensions/storage/volcengine_tos_storage.py b/api/extensions/storage/volcengine_tos_storage.py index 389c5630e3..de82be04ea 100644 --- a/api/extensions/storage/volcengine_tos_storage.py +++ b/api/extensions/storage/volcengine_tos_storage.py @@ -27,12 +27,9 @@ class VolcengineTosStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - response = self.client.get_object(bucket=self.bucket_name, key=filename) - while chunk := response.read(4096): - yield chunk - - return generate() + response = self.client.get_object(bucket=self.bucket_name, key=filename) + while chunk := response.read(4096): + yield chunk def download(self, filename, target_filepath): self.client.get_object_to_file(bucket=self.bucket_name, key=filename, file_path=target_filepath) diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index fa88e2b4fe..1066dc8862 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -160,7 +160,7 @@ def _build_from_local_file( tenant_id=tenant_id, type=file_type, transfer_method=transfer_method, - remote_url=None, + remote_url=row.source_url, related_id=mapping.get("upload_file_id"), _extra_config=config, size=row.size, @@ -179,27 +179,19 @@ def _build_from_remote_url( if not url: raise ValueError("Invalid file url") + mime_type = mimetypes.guess_type(url)[0] or "" + file_size = -1 + filename = url.split("/")[-1].split("?")[0] or "unknown_file" + resp = ssrf_proxy.head(url, follow_redirects=True) if resp.status_code == httpx.codes.OK: - # Try to extract filename from response headers or URL - content_disposition = resp.headers.get("Content-Disposition") - if content_disposition: + if content_disposition := resp.headers.get("Content-Disposition"): filename = content_disposition.split("filename=")[-1].strip('"') - else: - filename = url.split("/")[-1].split("?")[0] - # Create the File object - file_size = int(resp.headers.get("Content-Length", -1)) - mime_type = str(resp.headers.get("Content-Type", "")) - else: - filename = "" - file_size = -1 - mime_type = "" + file_size = int(resp.headers.get("Content-Length", file_size)) + mime_type = mime_type or str(resp.headers.get("Content-Type", "")) - # If filename is empty, set a default one - if not filename: - filename = "unknown_file" # Determine file extension - extension = "." + filename.split(".")[-1] if "." in filename else ".bin" + extension = mimetypes.guess_extension(mime_type) or "." + filename.split(".")[-1] if "." in filename else ".bin" if not mime_type: mime_type, _ = mimetypes.guess_type(url) diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index a758f9981f..d0c8c7e84f 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -10,6 +10,7 @@ from core.variables import ( ArrayNumberVariable, ArrayObjectSegment, ArrayObjectVariable, + ArraySegment, ArrayStringSegment, ArrayStringVariable, FileSegment, @@ -79,7 +80,7 @@ def build_segment(value: Any, /) -> Segment: if isinstance(value, list): items = [build_segment(item) for item in value] types = {item.value_type for item in items} - if len(types) != 1: + if len(types) != 1 or all(isinstance(item, ArraySegment) for item in items): return ArrayAnySegment(value=value) match types.pop(): case SegmentType.STRING: diff --git a/api/fields/conversation_fields.py b/api/fields/conversation_fields.py index bf1c491a05..2eb19c2667 100644 --- a/api/fields/conversation_fields.py +++ b/api/fields/conversation_fields.py @@ -121,6 +121,7 @@ conversation_fields = { "from_account_name": fields.String, "read_at": TimestampField, "created_at": TimestampField, + "updated_at": TimestampField, "annotation": fields.Nested(annotation_fields, allow_null=True), "model_config": fields.Nested(simple_model_config_fields), "user_feedback_stats": fields.Nested(feedback_stat_fields), @@ -182,6 +183,7 @@ conversation_detail_fields = { "from_end_user_id": fields.String, "from_account_id": fields.String, "created_at": TimestampField, + "updated_at": TimestampField, "annotated": fields.Boolean, "introduction": fields.String, "model_config": fields.Nested(model_config_fields), @@ -197,6 +199,7 @@ simple_conversation_fields = { "status": fields.String, "introduction": fields.String, "created_at": TimestampField, + "updated_at": TimestampField, } conversation_infinite_scroll_pagination_fields = { diff --git a/api/fields/file_fields.py b/api/fields/file_fields.py index 4ce7644e9d..1cddc24b2c 100644 --- a/api/fields/file_fields.py +++ b/api/fields/file_fields.py @@ -6,6 +6,8 @@ upload_config_fields = { "file_size_limit": fields.Integer, "batch_count_limit": fields.Integer, "image_file_size_limit": fields.Integer, + "video_file_size_limit": fields.Integer, + "audio_file_size_limit": fields.Integer, } file_fields = { @@ -22,3 +24,15 @@ remote_file_info_fields = { "file_type": fields.String(attribute="file_type"), "file_length": fields.Integer(attribute="file_length"), } + + +file_fields_with_signed_url = { + "id": fields.String, + "name": fields.String, + "size": fields.Integer, + "extension": fields.String, + "url": fields.String, + "mime_type": fields.String, + "created_by": fields.String, + "created_at": TimestampField, +} diff --git a/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py b/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py index 6a7402b16a..153861a71a 100644 --- a/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py +++ b/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py @@ -28,16 +28,12 @@ def upgrade(): sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') ) - with op.batch_alter_table('tracing_app_configs', schema=None) as batch_op: - batch_op.create_index('tracing_app_config_app_id_idx', ['app_id'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ## - with op.batch_alter_table('tracing_app_configs', schema=None) as batch_op: - batch_op.drop_index('tracing_app_config_app_id_idx') - op.drop_table('tracing_app_configs') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py b/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py new file mode 100644 index 0000000000..ca2e410442 --- /dev/null +++ b/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py @@ -0,0 +1,51 @@ +"""add-tidb-auth-binding + +Revision ID: 0251a1c768cc +Revises: 63a83fcf12ba +Create Date: 2024-08-15 09:56:59.012490 + +""" +import sqlalchemy as sa +from alembic import op + +import models as models + +# revision identifiers, used by Alembic. +revision = '0251a1c768cc' +down_revision = 'bbadea11becb' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('tidb_auth_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('cluster_id', sa.String(length=255), nullable=False), + sa.Column('cluster_name', sa.String(length=255), nullable=False), + sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False), + sa.Column('account', sa.String(length=255), nullable=False), + sa.Column('password', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey') + ) + with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op: + batch_op.create_index('tidb_auth_bindings_active_idx', ['active'], unique=False) + batch_op.create_index('tidb_auth_bindings_status_idx', ['status'], unique=False) + batch_op.create_index('tidb_auth_bindings_created_at_idx', ['created_at'], unique=False) + batch_op.create_index('tidb_auth_bindings_tenant_idx', ['tenant_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op: + batch_op.drop_index('tidb_auth_bindings_tenant_idx') + batch_op.drop_index('tidb_auth_bindings_created_at_idx') + batch_op.drop_index('tidb_auth_bindings_active_idx') + batch_op.drop_index('tidb_auth_bindings_status_idx') + op.drop_table('tidb_auth_bindings') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py b/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py new file mode 100644 index 0000000000..9daf148bc4 --- /dev/null +++ b/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py @@ -0,0 +1,42 @@ +"""add_white_list + +Revision ID: 43fa78bc3b7d +Revises: 0251a1c768cc +Create Date: 2024-10-22 09:59:23.713716 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '43fa78bc3b7d' +down_revision = '0251a1c768cc' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('whitelists', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('category', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='whitelists_pkey') + ) + with op.batch_alter_table('whitelists', schema=None) as batch_op: + batch_op.create_index('whitelists_tenant_idx', ['tenant_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('whitelists', schema=None) as batch_op: + batch_op.drop_index('whitelists_tenant_idx') + + op.drop_table('whitelists') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0434-d3f6769a94a3_add_upload_files_source_url.py b/api/migrations/versions/2024_11_01_0434-d3f6769a94a3_add_upload_files_source_url.py new file mode 100644 index 0000000000..a749c8bddf --- /dev/null +++ b/api/migrations/versions/2024_11_01_0434-d3f6769a94a3_add_upload_files_source_url.py @@ -0,0 +1,31 @@ +"""Add upload_files.source_url + +Revision ID: d3f6769a94a3 +Revises: 43fa78bc3b7d +Create Date: 2024-11-01 04:34:23.816198 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd3f6769a94a3' +down_revision = '43fa78bc3b7d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.add_column(sa.Column('source_url', sa.String(length=255), server_default='', nullable=False)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.drop_column('source_url') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0449-93ad8c19c40b_rename_conversation_variables_index_name.py b/api/migrations/versions/2024_11_01_0449-93ad8c19c40b_rename_conversation_variables_index_name.py new file mode 100644 index 0000000000..81a7978f73 --- /dev/null +++ b/api/migrations/versions/2024_11_01_0449-93ad8c19c40b_rename_conversation_variables_index_name.py @@ -0,0 +1,52 @@ +"""rename conversation variables index name + +Revision ID: 93ad8c19c40b +Revises: d3f6769a94a3 +Create Date: 2024-11-01 04:49:53.100250 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '93ad8c19c40b' +down_revision = 'd3f6769a94a3' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + if conn.dialect.name == 'postgresql': + # Rename indexes for PostgreSQL + op.execute('ALTER INDEX workflow__conversation_variables_app_id_idx RENAME TO workflow_conversation_variables_app_id_idx') + op.execute('ALTER INDEX workflow__conversation_variables_created_at_idx RENAME TO workflow_conversation_variables_created_at_idx') + else: + # For other databases, use the original drop and create method + with op.batch_alter_table('workflow_conversation_variables', schema=None) as batch_op: + batch_op.drop_index('workflow__conversation_variables_app_id_idx') + batch_op.drop_index('workflow__conversation_variables_created_at_idx') + batch_op.create_index(batch_op.f('workflow_conversation_variables_app_id_idx'), ['app_id'], unique=False) + batch_op.create_index(batch_op.f('workflow_conversation_variables_created_at_idx'), ['created_at'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + if conn.dialect.name == 'postgresql': + # Rename indexes back for PostgreSQL + op.execute('ALTER INDEX workflow_conversation_variables_app_id_idx RENAME TO workflow__conversation_variables_app_id_idx') + op.execute('ALTER INDEX workflow_conversation_variables_created_at_idx RENAME TO workflow__conversation_variables_created_at_idx') + else: + # For other databases, use the original drop and create method + with op.batch_alter_table('workflow_conversation_variables', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('workflow_conversation_variables_created_at_idx')) + batch_op.drop_index(batch_op.f('workflow_conversation_variables_app_id_idx')) + batch_op.create_index('workflow__conversation_variables_created_at_idx', ['created_at'], unique=False) + batch_op.create_index('workflow__conversation_variables_app_id_idx', ['app_id'], unique=False) + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0540-f4d7ce70a7ca_update_upload_files_source_url.py b/api/migrations/versions/2024_11_01_0540-f4d7ce70a7ca_update_upload_files_source_url.py new file mode 100644 index 0000000000..222379a490 --- /dev/null +++ b/api/migrations/versions/2024_11_01_0540-f4d7ce70a7ca_update_upload_files_source_url.py @@ -0,0 +1,41 @@ +"""update upload_files.source_url + +Revision ID: f4d7ce70a7ca +Revises: 93ad8c19c40b +Create Date: 2024-11-01 05:40:03.531751 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'f4d7ce70a7ca' +down_revision = '93ad8c19c40b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.alter_column('source_url', + existing_type=sa.VARCHAR(length=255), + type_=sa.TEXT(), + existing_nullable=False, + existing_server_default=sa.text("''::character varying")) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.alter_column('source_url', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=255), + existing_nullable=False, + existing_server_default=sa.text("''::character varying")) + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py b/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py new file mode 100644 index 0000000000..9a4ccf352d --- /dev/null +++ b/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py @@ -0,0 +1,67 @@ +"""update type of custom_disclaimer to TEXT + +Revision ID: d07474999927 +Revises: f4d7ce70a7ca +Create Date: 2024-11-01 06:22:27.981398 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd07474999927' +down_revision = 'f4d7ce70a7ca' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute("UPDATE recommended_apps SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") + op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") + op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") + + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=sa.TEXT(), + nullable=False) + + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=sa.TEXT(), + nullable=False) + + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=sa.TEXT(), + nullable=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=255), + nullable=True) + + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=255), + nullable=True) + + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=255), + nullable=True) + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py b/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py new file mode 100644 index 0000000000..117a7351cd --- /dev/null +++ b/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py @@ -0,0 +1,73 @@ +"""update workflows graph, features and updated_at + +Revision ID: 09a8d1878d9b +Revises: d07474999927 +Create Date: 2024-11-01 06:23:59.579186 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '09a8d1878d9b' +down_revision = 'd07474999927' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=False) + + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=False) + + op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL") + op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL") + op.execute("UPDATE workflows SET features = '' WHERE features IS NULL") + + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('graph', + existing_type=sa.TEXT(), + nullable=False) + batch_op.alter_column('features', + existing_type=sa.TEXT(), + nullable=False) + batch_op.alter_column('updated_at', + existing_type=postgresql.TIMESTAMP(), + nullable=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('updated_at', + existing_type=postgresql.TIMESTAMP(), + nullable=True) + batch_op.alter_column('features', + existing_type=sa.TEXT(), + nullable=True) + batch_op.alter_column('graph', + existing_type=sa.TEXT(), + nullable=True) + + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=True) + + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=True) + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py b/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py index 09ef5e186c..99b7010612 100644 --- a/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py +++ b/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py @@ -22,17 +22,11 @@ def upgrade(): with op.batch_alter_table('apps', schema=None) as batch_op: batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True)) - with op.batch_alter_table('trace_app_config', schema=None) as batch_op: - batch_op.create_index('tracing_app_config_app_id_idx', ['app_id'], unique=False) - # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('trace_app_config', schema=None) as batch_op: - batch_op.drop_index('tracing_app_config_app_id_idx') - with op.batch_alter_table('apps', schema=None) as batch_op: batch_op.drop_column('tracing') diff --git a/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py b/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py index 469c04338a..f87819c367 100644 --- a/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py +++ b/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py @@ -30,30 +30,15 @@ def upgrade(): sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False), sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey') ) + with op.batch_alter_table('trace_app_config', schema=None) as batch_op: batch_op.create_index('trace_app_config_app_id_idx', ['app_id'], unique=False) - with op.batch_alter_table('tracing_app_configs', schema=None) as batch_op: - batch_op.drop_index('tracing_app_config_app_id_idx') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tracing_app_configs', - sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False), - sa.Column('app_id', sa.UUID(), autoincrement=False, nullable=False), - sa.Column('tracing_provider', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('tracing_config', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey') - ) - with op.batch_alter_table('tracing_app_configs', schema=None) as batch_op: - batch_op.create_index('trace_app_config_app_id_idx', ['app_id'], unique=False) - - with op.batch_alter_table('trace_app_config', schema=None) as batch_op: - batch_op.drop_index('trace_app_config_app_id_idx') - op.drop_table('trace_app_config') + # ### end Alembic commands ### diff --git a/api/migrations/versions/fecff1c3da27_remove_extra_tracing_app_config_table .py b/api/migrations/versions/fecff1c3da27_remove_extra_tracing_app_config_table .py index 271b2490de..6f76a361d9 100644 --- a/api/migrations/versions/fecff1c3da27_remove_extra_tracing_app_config_table .py +++ b/api/migrations/versions/fecff1c3da27_remove_extra_tracing_app_config_table .py @@ -20,12 +20,10 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('tracing_app_configs') - with op.batch_alter_table('trace_app_config', schema=None) as batch_op: - batch_op.drop_index('tracing_app_config_app_id_idx') - # idx_dataset_permissions_tenant_id with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: batch_op.create_index('idx_dataset_permissions_tenant_id', ['tenant_id']) + # ### end Alembic commands ### @@ -46,9 +44,7 @@ def downgrade(): sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') ) - with op.batch_alter_table('trace_app_config', schema=None) as batch_op: - batch_op.create_index('tracing_app_config_app_id_idx', ['app_id']) - with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: batch_op.drop_index('idx_dataset_permissions_tenant_id') + # ### end Alembic commands ### diff --git a/api/models/dataset.py b/api/models/dataset.py index 4e2ccab7e8..a1a626d7e4 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -560,10 +560,28 @@ class DocumentSegment(db.Model): ) def get_sign_content(self): - pattern = r"/files/([a-f0-9\-]+)/file-preview" - text = self.content - matches = re.finditer(pattern, text) signed_urls = [] + text = self.content + + # For data before v0.10.0 + pattern = r"/files/([a-f0-9\-]+)/image-preview" + matches = re.finditer(pattern, text) + for match in matches: + upload_file_id = match.group(1) + nonce = os.urandom(16).hex() + timestamp = str(int(time.time())) + data_to_sign = f"image-preview|{upload_file_id}|{timestamp}|{nonce}" + secret_key = dify_config.SECRET_KEY.encode() if dify_config.SECRET_KEY else b"" + sign = hmac.new(secret_key, data_to_sign.encode(), hashlib.sha256).digest() + encoded_sign = base64.urlsafe_b64encode(sign).decode() + + params = f"timestamp={timestamp}&nonce={nonce}&sign={encoded_sign}" + signed_url = f"{match.group(0)}?{params}" + signed_urls.append((match.start(), match.end(), signed_url)) + + # For data after v0.10.0 + pattern = r"/files/([a-f0-9\-]+)/file-preview" + matches = re.finditer(pattern, text) for match in matches: upload_file_id = match.group(1) nonce = os.urandom(16).hex() @@ -704,6 +722,38 @@ class DatasetCollectionBinding(db.Model): created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) +class TidbAuthBinding(db.Model): + __tablename__ = "tidb_auth_bindings" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="tidb_auth_bindings_pkey"), + db.Index("tidb_auth_bindings_tenant_idx", "tenant_id"), + db.Index("tidb_auth_bindings_active_idx", "active"), + db.Index("tidb_auth_bindings_created_at_idx", "created_at"), + db.Index("tidb_auth_bindings_status_idx", "status"), + ) + id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + tenant_id = db.Column(StringUUID, nullable=True) + cluster_id = db.Column(db.String(255), nullable=False) + cluster_name = db.Column(db.String(255), nullable=False) + active = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) + status = db.Column(db.String(255), nullable=False, server_default=db.text("CREATING")) + account = db.Column(db.String(255), nullable=False) + password = db.Column(db.String(255), nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + + +class Whitelist(db.Model): + __tablename__ = "whitelists" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="whitelists_pkey"), + db.Index("whitelists_tenant_idx", "tenant_id"), + ) + id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + tenant_id = db.Column(StringUUID, nullable=True) + category = db.Column(db.String(255), nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + + class DatasetPermission(db.Model): __tablename__ = "dataset_permissions" __table_args__ = ( diff --git a/api/models/model.py b/api/models/model.py index 843c6d4e2c..8a619d3f30 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -11,6 +11,7 @@ if TYPE_CHECKING: from typing import Any, Literal +import sqlalchemy as sa from flask import request from flask_login import UserMixin from pydantic import BaseModel, Field @@ -406,7 +407,7 @@ class AppModelConfig(Base): "file_upload": self.file_upload_dict, } - def from_model_config_dict(self, model_config: dict): + def from_model_config_dict(self, model_config: Mapping[str, Any]): self.opening_statement = model_config.get("opening_statement") self.suggested_questions = ( json.dumps(model_config["suggested_questions"]) if model_config.get("suggested_questions") else None @@ -493,7 +494,7 @@ class RecommendedApp(Base): description = db.Column(db.JSON, nullable=False) copyright = db.Column(db.String(255), nullable=False) privacy_policy = db.Column(db.String(255), nullable=False) - custom_disclaimer = db.Column(db.String(255), nullable=True) + custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") category = db.Column(db.String(255), nullable=False) position = db.Column(db.Integer, nullable=False, default=0) is_listed = db.Column(db.Boolean, nullable=False, default=True) @@ -990,6 +991,9 @@ class Message(Base): config=FileExtraConfig(), ) elif message_file.transfer_method == "tool_file": + if message_file.upload_file_id is None: + assert message_file.url is not None + message_file.upload_file_id = message_file.url.split("/")[-1].split(".")[0] mapping = { "id": message_file.id, "type": message_file.type, @@ -1014,6 +1018,7 @@ class Message(Base): for (file, message_file) in zip(files, message_files) ] + db.session.commit() return result @property @@ -1122,7 +1127,7 @@ class MessageFile(Base): self.url = url self.belongs_to = belongs_to self.upload_file_id = upload_file_id - self.created_by_role = created_by_role + self.created_by_role = created_by_role.value self.created_by = created_by id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) @@ -1315,7 +1320,7 @@ class Site(Base): privacy_policy = db.Column(db.String(255)) show_workflow_steps = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) use_icon_as_answer_icon = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - custom_disclaimer = db.Column(db.String(255), nullable=True) + custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") customize_domain = db.Column(db.String(255)) customize_token_strategy = db.Column(db.String(255), nullable=False) prompt_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) @@ -1393,6 +1398,7 @@ class UploadFile(Base): used_by: Mapped[str | None] = db.Column(StringUUID, nullable=True) used_at: Mapped[datetime | None] = db.Column(db.DateTime, nullable=True) hash: Mapped[str | None] = db.Column(db.String(255), nullable=True) + source_url: Mapped[str] = mapped_column(sa.TEXT, default="") def __init__( self, @@ -1411,7 +1417,8 @@ class UploadFile(Base): used_by: str | None = None, used_at: datetime | None = None, hash: str | None = None, - ) -> None: + source_url: str = "", + ): self.tenant_id = tenant_id self.storage_type = storage_type self.key = key @@ -1426,6 +1433,7 @@ class UploadFile(Base): self.used_by = used_by self.used_at = used_at self.hash = hash + self.source_url = source_url class ApiRequest(Base): diff --git a/api/models/tools.py b/api/models/tools.py index 869dd0201f..248e28e0b9 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -2,6 +2,7 @@ import json from datetime import datetime from typing import Optional +import sqlalchemy as sa from deprecated import deprecated from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column @@ -82,7 +83,7 @@ class ApiToolProvider(Base): # privacy policy privacy_policy = db.Column(db.String(255), nullable=True) # custom_disclaimer - custom_disclaimer = db.Column(db.String(255), nullable=True) + custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) diff --git a/api/models/workflow.py b/api/models/workflow.py index da3152ec75..bc4434ae5a 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -2,11 +2,12 @@ import json from collections.abc import Mapping, Sequence from datetime import datetime from enum import Enum -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any, Optional, Union if TYPE_CHECKING: from models.model import AppMode +import sqlalchemy as sa from sqlalchemy import Index, PrimaryKeyConstraint, func from sqlalchemy.orm import Mapped, mapped_column @@ -103,14 +104,14 @@ class Workflow(Base): app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) type: Mapped[str] = mapped_column(db.String(255), nullable=False) version: Mapped[str] = mapped_column(db.String(255), nullable=False) - graph: Mapped[str] = mapped_column(db.Text) - _features: Mapped[str] = mapped_column("features") + graph: Mapped[str] = mapped_column(sa.Text) + _features: Mapped[str] = mapped_column("features", sa.TEXT) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column( db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") ) - updated_by: Mapped[str] = mapped_column(StringUUID) - updated_at: Mapped[datetime] = mapped_column(db.DateTime) + updated_by: Mapped[Optional[str]] = mapped_column(StringUUID) + updated_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False) _environment_variables: Mapped[str] = mapped_column( "environment_variables", db.Text, nullable=False, server_default="{}" ) diff --git a/api/poetry.lock b/api/poetry.lock index 6d9ff3eb5c..d7af124794 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -553,13 +553,13 @@ cryptography = "*" [[package]] name = "azure-ai-inference" -version = "1.0.0b4" +version = "1.0.0b5" description = "Microsoft Azure Ai Inference Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure-ai-inference-1.0.0b4.tar.gz", hash = "sha256:5464404bef337338d4af6eefde3af903400ddb8e5c9e6820f902303542fa0f72"}, - {file = "azure_ai_inference-1.0.0b4-py3-none-any.whl", hash = "sha256:e2c949f91845a8cd96cb9a61ffd432b5b0f4ce236b9be8c29d10f38e0a327412"}, + {file = "azure_ai_inference-1.0.0b5-py3-none-any.whl", hash = "sha256:0147653088033f1fd059d5f4bd0fedac82529fdcc7a0d2183d9508b3f80cf549"}, + {file = "azure_ai_inference-1.0.0b5.tar.gz", hash = "sha256:c95b490bcd670ccdeb1048dc2b45e0f8252a4d69a348ca15d4510d327b64dd0d"}, ] [package.dependencies] @@ -567,6 +567,9 @@ azure-core = ">=1.30.0" isodate = ">=0.6.1" typing-extensions = ">=4.6.0" +[package.extras] +opentelemetry = ["azure-core-tracing-opentelemetry"] + [[package]] name = "azure-ai-ml" version = "1.20.0" @@ -844,13 +847,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.40" +version = "1.35.47" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.40-py3-none-any.whl", hash = "sha256:072cc47f29cb1de4fa77ce6632e4f0480af29b70816973ff415fbaa3f50bd1db"}, - {file = "botocore-1.35.40.tar.gz", hash = "sha256:547e0a983856c7d7aeaa30fca2a283873c57c07366cd806d2d639856341b3c31"}, + {file = "botocore-1.35.47-py3-none-any.whl", hash = "sha256:05f4493119a96799ff84d43e78691efac3177e1aec8840cca99511de940e342a"}, + {file = "botocore-1.35.47.tar.gz", hash = "sha256:f8f703463d3cd8b6abe2bedc443a7ab29f0e2ff1588a2e83164b108748645547"}, ] [package.dependencies] @@ -863,47 +866,47 @@ crt = ["awscrt (==0.22.0)"] [[package]] name = "bottleneck" -version = "1.4.1" +version = "1.4.2" description = "Fast NumPy array functions written in C" optional = false -python-versions = "*" +python-versions = ">=3.9" files = [ - {file = "Bottleneck-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5be5fc34f03216d85f14d01ca12c857ee68f72d7c17dccd22743326200ba3b9f"}, - {file = "Bottleneck-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f44cc6ad1a44d3427009fa2c2298ef0b346b7024e30dc7fc9778f5b78f8c10c"}, - {file = "Bottleneck-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e5babe835350e9f8710b3f8ffb9d5d202b4b13d77bad0e0f9a395af16a55f36"}, - {file = "Bottleneck-1.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d22a9b4d9cef8bb218df15a13d4aa213042c434c595d5c732d7f4ad287dbe565"}, - {file = "Bottleneck-1.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f47cb74ad6675dc7a54ef9f6fa9e649134e98ba71e6c98b8a34054e48014c941"}, - {file = "Bottleneck-1.4.1-cp310-cp310-win32.whl", hash = "sha256:3a4acf90714de7783f4706eb19d42f4d32ac842082c7b42d6956530ef0884b19"}, - {file = "Bottleneck-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:54d60a44bf439c06d35c6c3abdb39aca3de330064c48922a6bad0a96b1f096d5"}, - {file = "Bottleneck-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d491dad4757e9bed4204b3b823dfe9996733e11fbd9d3abaec52329a30a02bcc"}, - {file = "Bottleneck-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9186982d57db422641f30a30201fc7b158166887ca803c2af975cab6c9febb7"}, - {file = "Bottleneck-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e5bdff742067f10c7fda48e981e4b50ba7c7f39f8627e8c9a1c7224457badd"}, - {file = "Bottleneck-1.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ab6086ced21414288a5b79a00fe359d463a3189f82b5a71adce931655ba93c2"}, - {file = "Bottleneck-1.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:359d3a08e7c1a0938b07d6117cab8b38d4140df6712fbe56491ad44f46878d2f"}, - {file = "Bottleneck-1.4.1-cp311-cp311-win32.whl", hash = "sha256:bb642a652e656a20ea239d241aa2c39e0e3d3882aacea7d3f8412d9ec7a5f185"}, - {file = "Bottleneck-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:bd89930c0375799aaf381a2a2924ca53a17ef84734d9e1a5763da7eb4499e53d"}, - {file = "Bottleneck-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32a0e516a66a0d048e993bc926aa878208e8db549d30e39bef7b933a81dcad26"}, - {file = "Bottleneck-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1be7a1589a553ce6575d77a0ae3404b4a0a69c647e1cda85e41f388a61c1210d"}, - {file = "Bottleneck-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:845e2975704c1e40cd27a3d1d3eb45af031e7f25edf826577dbedab33bf1674e"}, - {file = "Bottleneck-1.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fea946c0fed796b62ddd812ea6533d2733d7a070383236a0d9ba585e66619c30"}, - {file = "Bottleneck-1.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:23e16efc311df996321fde53537815acca26dca731395d9e6da36e743fc0d27f"}, - {file = "Bottleneck-1.4.1-cp312-cp312-win32.whl", hash = "sha256:75ab6c9daed8528f9f0f37efb9f15d0b9759b8902325abcfd38cb79126c76db4"}, - {file = "Bottleneck-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:7ce544479354ef3893c04b8f135b454c126cc236907879f552422855de5df35d"}, - {file = "Bottleneck-1.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:681e8889170d349bed1a7c30a4b32ba25f6988e1432217a91967fd7a5a963d7d"}, - {file = "Bottleneck-1.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d774d69ed3ba968b9b2e5271b14d013f85b820612ec4dbd8f0a32f23a07c6d77"}, - {file = "Bottleneck-1.4.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d0a0be22fa01c2698ff746a4e1e27238acd18409a353e2c7172b0e50b2f04a9"}, - {file = "Bottleneck-1.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:eb39b78f99304f174338c486a015d490707374a0c349167ba6ade7b6d3484304"}, - {file = "Bottleneck-1.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:052fcc86f5713a3192f3ce0a4f950c2ff451df8c43c9d0291e7c4bd624ae1292"}, - {file = "Bottleneck-1.4.1-cp313-cp313-win32.whl", hash = "sha256:cbcc5e36ba50a50d5d8084e2772008b33b9507d06c2c1642c8e2299709439881"}, - {file = "Bottleneck-1.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:3e36a7ab42bc0f1e2508f1c33308fb08c8454edc4694164ee8fdd46918adea03"}, - {file = "Bottleneck-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f12ab7f52881065323ac4ac1f8fd424cf5ecde0d9c0746945fb6e8b1a5cbfcac"}, - {file = "Bottleneck-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a76155466718066fcfb77329b29be71d1bce679b1a7daf693a7a08fbf42577b8"}, - {file = "Bottleneck-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde4981d889f41f175a054ad5c3434b81ee8749f31b6e7b9ec92776ec4f7292f"}, - {file = "Bottleneck-1.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:351ddfedc51c2575c90d43a03e350bdc00020e219dfa877480df2b69dbc31eab"}, - {file = "Bottleneck-1.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98032a0efb2a4f46223645600f53cb9f7c50dbec8c146e2869585dc9f81fa0ea"}, - {file = "Bottleneck-1.4.1-cp39-cp39-win32.whl", hash = "sha256:ddc2218dc8e8b626d1645e57596bc353559facb99c362c9ba5eb6d5b4a12d5fc"}, - {file = "Bottleneck-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d6f275a727a59c27897e3e29dabde4d8af299fcfc667f13a9b00fd2a1ab72178"}, - {file = "bottleneck-1.4.1.tar.gz", hash = "sha256:58c66619db62291c9ca3b497b05f40f7b1ff9ac010b88bff1925f3101dae2c89"}, + {file = "Bottleneck-1.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:125436df93751a226eab1732783aa8f6125e88e779587aa61be071fb66e41f9d"}, + {file = "Bottleneck-1.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c6df9a60ec6ab88fec934ca864266ba95edd89c490af71dc9cd8afb2a54ebd9"}, + {file = "Bottleneck-1.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2fe327dc2d0564e295a5857a252755103f8c6e05b07d3ff80a69afaa9f5065"}, + {file = "Bottleneck-1.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6b7790ca8658cd69e3cc0d0e4ff0e9829d60849bf7945fbd7344fbce05b2bbb8"}, + {file = "Bottleneck-1.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6282fa925ac3768f66e3547f89a512376d3f9de7ef53bdd37aa29232fd864054"}, + {file = "Bottleneck-1.4.2-cp310-cp310-win32.whl", hash = "sha256:e56a206fbf48e3b8054a964398bf1ed843e9625d3c6bdbeb7898cb48bf97441b"}, + {file = "Bottleneck-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:eb0c611d15b0fd8f511d288e8964e4725b4b3b0d9d310880cf0ff6b8dd03c859"}, + {file = "Bottleneck-1.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b6902ebf3e85315b481bc084f10c5770f8240275ad1e039ac69c7c8d2013b040"}, + {file = "Bottleneck-1.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2fd34b9b490204f95288f0dd35d37042486a95029617246c88c0f94a0ab49fe"}, + {file = "Bottleneck-1.4.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122845e3106c85465551d4a9a3777841347cfedfbebb3aa985cca110e07030b1"}, + {file = "Bottleneck-1.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1f61658ebdf5a178298544336b65020730bf86cc092dab5f6579a99a86bd888b"}, + {file = "Bottleneck-1.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7c7d29c044a3511b36fd744503c3e697e279c273a8477a6d91a2831d04fd19e0"}, + {file = "Bottleneck-1.4.2-cp311-cp311-win32.whl", hash = "sha256:c663cbba8f52011fd82ee08c6a85c93b34b19e0e7ebba322d2d67809f34e0597"}, + {file = "Bottleneck-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:89651ef18c06616850203bf8875c958c5d316ea48d8ba60d9b450199d39ae391"}, + {file = "Bottleneck-1.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a74ddd0417f42eeaba37375f0fc065b28451e0fba45cb2f99e88880b10b3fa43"}, + {file = "Bottleneck-1.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:070d22f2f62ab81297380a89492cca931e4d9443fa4b84c2baeb52db09c3b1b4"}, + {file = "Bottleneck-1.4.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fc4e7645bd425c05e05acd5541e9e09cb4179e71164e862f082561bf4509eac"}, + {file = "Bottleneck-1.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:037315c56605128a39f77d19af6a6019dc8c21a63694a4bfef3c026ed963be2e"}, + {file = "Bottleneck-1.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99778329331d5fae8df19772a019e8b73ba4d9d1650f110cd995ab7657114db0"}, + {file = "Bottleneck-1.4.2-cp312-cp312-win32.whl", hash = "sha256:7363b3c8ce6ca433779cd7e96bcb94c0e516dcacadff0011adcbf0b3ac86bc9d"}, + {file = "Bottleneck-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:48c6b9d9287c4102b803fcb01ae66ae7ef6b310b711b4b7b7e23bf952894dc05"}, + {file = "Bottleneck-1.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c1c885ad02a6a8fa1f7ee9099f29b9d4c03eb1da2c7ab25839482d5cce739021"}, + {file = "Bottleneck-1.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7a1b023de1de3d84b18826462718fba548fed41870df44354f9ab6a414ea82f"}, + {file = "Bottleneck-1.4.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c9dbaf737b605b30c81611f2c1d197c2fd2e46c33f605876c1d332d3360c4fc"}, + {file = "Bottleneck-1.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7ebbcbe5d4062e37507b9a81e2aacdb1fcccc6193f7feff124ef2b5a6a5eb740"}, + {file = "Bottleneck-1.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:964f6ac4118ddab3bbbac79d4f726b093459be751baba73ee0aa364666e8068e"}, + {file = "Bottleneck-1.4.2-cp313-cp313-win32.whl", hash = "sha256:2db287f6ecdbb1c998085eca9b717fec2bfc48a4ab6ae070a9820ba8ab59c90b"}, + {file = "Bottleneck-1.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:26b5f0531f7044befaad95c20365dd666372e66bdacbfaf009ff65d60285534d"}, + {file = "Bottleneck-1.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:72d6aa95cdd782833d2589f81434fd865ba004b8938e07920b6ef02796ce8918"}, + {file = "Bottleneck-1.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b33e83665e7daf7f513fe1f7b04b13944d44b6635c45d5a9c89c9e5ed11811b6"}, + {file = "Bottleneck-1.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52248f3e0fead78c17912fb086a585c86f567019247d21c69e87645241b97b02"}, + {file = "Bottleneck-1.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dce1a3c5ff89a56fb2678c9bda17b89f60f710d6002ab7cd72b7661bc3fae64d"}, + {file = "Bottleneck-1.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:48d2e101d99a9d72aa86da1a048d2094f4e1db0cf77519d1c33239f9d62da162"}, + {file = "Bottleneck-1.4.2-cp39-cp39-win32.whl", hash = "sha256:9d7b12936516f944e3d981a64038f99acb21f0e99f92fad16d9a468248c2b231"}, + {file = "Bottleneck-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7b459d08f1f3e2da85db0a9e2d3e6e3541105f5866e9026dbca32dafc5106f2b"}, + {file = "bottleneck-1.4.2.tar.gz", hash = "sha256:fa8e8e1799dea5483ce6669462660f9d9a95649f6f98a80d315b84ec89f449f4"}, ] [package.dependencies] @@ -1798,6 +1801,46 @@ requests = ">=2.8" six = "*" xmltodict = "*" +[[package]] +name = "couchbase" +version = "4.3.3" +description = "Python Client for Couchbase" +optional = false +python-versions = ">=3.7" +files = [ + {file = "couchbase-4.3.3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:d8069e4f01332859d56cca597874645c914699162b3979d1b432f0dfc186b124"}, + {file = "couchbase-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1caa6cfef49c785b35b1702102f718227f351df87bba2694b9334520c41e9eb5"}, + {file = "couchbase-4.3.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f4a9a65c44935249fa078fb90a3c28ea71da9d2d5889fcd514b12d0538010ae0"}, + {file = "couchbase-4.3.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4f144b8c482c18283d8e419b844630d41f3249b07d43d40b5e3535444e57d0fb"}, + {file = "couchbase-4.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c534fba6fdc7cf47eed9dee8a57d1e9eb867bf008574e321fa380a77cebf32f"}, + {file = "couchbase-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b841be06e0e4370b69ebef6bca3409c378186f7d6e964cd645ba18e97216c022"}, + {file = "couchbase-4.3.3-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:eee7a73b3acbdc78ae314fddf7f975b3c9e05df07df255f4dcc878939a2abae0"}, + {file = "couchbase-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:53417cafcf90ff4e2fd81ebba2a08b7ad56f17160d1c5019ad3b09c758aeb363"}, + {file = "couchbase-4.3.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0cefd13bea8b0f150f1b9d27fd7614f971f77419b31817781d26ba315ed658bb"}, + {file = "couchbase-4.3.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:78fa1054d7740e2fe38fce0a2aab4e9a2d30263d894e0615ee5df297f02f59a3"}, + {file = "couchbase-4.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb093899cfad5a7472258a9b6a57775dbf23a6e0180241507ba89ce3ab241e41"}, + {file = "couchbase-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f7cfbdc699af5715f49365ffbb05a6a7366a534c0d7161edf270ad3e735a6c5d"}, + {file = "couchbase-4.3.3-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:58352cae9b8affdaa2ac012e0a03c8c2632ee6297a878232888b4e0360d0d5df"}, + {file = "couchbase-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:728e7e3b5e1682706cb9d63993d289226d02a25089527b8ecb4e3889dabc38cf"}, + {file = "couchbase-4.3.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:73014bf098cf14187a39cc13453e0d859c1d54568df28f69cc308a9a5f24feb2"}, + {file = "couchbase-4.3.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a743375804068ae01b73c916bfca738764c8c12f381bb399ef04e784935856a1"}, + {file = "couchbase-4.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:394c122cfe02a76a99e7d5178e64129f6da49843225e78d8629abcab556c24af"}, + {file = "couchbase-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:bf85d7a5cda548d9801614651206068b4445fa37972e62b14d7521a958198693"}, + {file = "couchbase-4.3.3-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:92d23c9cedd571631070791f2afee0e3d7d8c9ce1bf2ea6e9a4f2fdbc37a0f1e"}, + {file = "couchbase-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:38c42eb29a73cce2998ae5df45bd61b16dce9765d3bff968ec5cf6a622faa291"}, + {file = "couchbase-4.3.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:afed137bf0edc642d7b201b6ab7b1e7117bb4c8eac6b2f253cc6e106f334a2a1"}, + {file = "couchbase-4.3.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:954d991377d47883aaf903934c5d0f19577680a2abf80d3ce5bb9b3c80991fc7"}, + {file = "couchbase-4.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5552b9fa684630698dc98d6f3b1082540634c1b7ad5bf53b843b5da57b0169c"}, + {file = "couchbase-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:f88f2b7e0c894f7237d9f3fb5c46abc44b8151a97b3ca8e75f57d23ebf59f9da"}, + {file = "couchbase-4.3.3-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:769e1e2367ea1d4de181fcd4b4e353e9abef97d15b581a6c5aea49ece3dc7d59"}, + {file = "couchbase-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:47f59a0b35ffce060583fd11f98f049f3b70701cf14aab9ac092594aca486aeb"}, + {file = "couchbase-4.3.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:440bb93d611827ba0ea2403c6f204fe931467a6cb5811f0e03bf1779204ef843"}, + {file = "couchbase-4.3.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cdb4dde62e1d41c0b8707121ab68fa78b7a1508541bd48fc850be396f91bc8d9"}, + {file = "couchbase-4.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f8cf45f317b39cc19db5c67b565662f08d6c90305b3aa14e04bc22707258213"}, + {file = "couchbase-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:c97d48ad486c8f201b4482d5594258f949369cb44792ed148d5159a3d12ae21b"}, + {file = "couchbase-4.3.3.tar.gz", hash = "sha256:27808500551564b39b46943cf3daab572694889c1eb638425d363edb48b20da7"}, +] + [[package]] name = "coverage" version = "7.2.7" @@ -1882,38 +1925,38 @@ files = [ [[package]] name = "cryptography" -version = "43.0.1" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] @@ -1926,7 +1969,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -2214,18 +2257,18 @@ files = [ [[package]] name = "duckduckgo-search" -version = "6.3.0" +version = "6.3.2" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." optional = false python-versions = ">=3.8" files = [ - {file = "duckduckgo_search-6.3.0-py3-none-any.whl", hash = "sha256:9a231a7b325226811cf7d35a240f3f501e718ae10a1aa0a638cabc80e129dfe7"}, - {file = "duckduckgo_search-6.3.0.tar.gz", hash = "sha256:e9f56955569325a7d9cacda2488ca78bf6629a459e74415892bee560b664f5eb"}, + {file = "duckduckgo_search-6.3.2-py3-none-any.whl", hash = "sha256:cd631275292460d590d1d496995d002bf2fe6db9752713fab17b9e95924ced98"}, + {file = "duckduckgo_search-6.3.2.tar.gz", hash = "sha256:53dbf45f8749bfc67483eb9f281f2e722a5fe644d61c54ed9e551d26cb6bcbf2"}, ] [package.dependencies] click = ">=8.1.7" -primp = ">=0.6.3" +primp = ">=0.6.4" [package.extras] dev = ["mypy (>=1.11.1)", "pytest (>=8.3.1)", "pytest-asyncio (>=0.23.8)", "ruff (>=0.6.1)"] @@ -2339,6 +2382,20 @@ files = [ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, ] +[[package]] +name = "eval-type-backport" +version = "0.2.0" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." +optional = false +python-versions = ">=3.8" +files = [ + {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"}, + {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -2355,18 +2412,18 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.115.2" +version = "0.115.3" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.2-py3-none-any.whl", hash = "sha256:61704c71286579cc5a598763905928f24ee98bfcc07aabe84cfefb98812bbc86"}, - {file = "fastapi-0.115.2.tar.gz", hash = "sha256:3995739e0b09fa12f984bce8fa9ae197b35d433750d3d312422d846e283697ee"}, + {file = "fastapi-0.115.3-py3-none-any.whl", hash = "sha256:8035e8f9a2b0aa89cea03b6c77721178ed5358e1aea4cd8570d9466895c0638c"}, + {file = "fastapi-0.115.3.tar.gz", hash = "sha256:c091c6a35599c036d676fa24bd4a6e19fa30058d93d950216cdc672881f6f7db"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.41.0" +starlette = ">=0.40.0,<0.42.0" typing-extensions = ">=4.8.0" [package.extras] @@ -2761,99 +2818,114 @@ files = [ [[package]] name = "frozenlist" -version = "1.4.1" +version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, + {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, + {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, + {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, + {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, + {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, + {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, ] [[package]] name = "fsspec" -version = "2024.9.0" +version = "2024.10.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, - {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, + {file = "fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871"}, + {file = "fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493"}, ] [package.extras] @@ -3391,13 +3463,13 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "gotrue" -version = "2.9.2" +version = "2.9.3" description = "Python Client Library for Supabase Auth" optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "gotrue-2.9.2-py3-none-any.whl", hash = "sha256:fcd5279e8f1cc630f3ac35af5485fe39f8030b23906776920d2c32a4e308cff4"}, - {file = "gotrue-2.9.2.tar.gz", hash = "sha256:57b3245e916c5efbf19a21b1181011a903c1276bb1df2d847558f2f24f29abb2"}, + {file = "gotrue-2.9.3-py3-none-any.whl", hash = "sha256:9d2e9c74405d879f4828e0a7b94daf167a6e109c10ae6e5c59a0e21446f6e423"}, + {file = "gotrue-2.9.3.tar.gz", hash = "sha256:051551d80e642bdd2ab42cac78207745d89a2a08f429a1512d82624e675d8255"}, ] [package.dependencies] @@ -3508,70 +3580,70 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [[package]] name = "grpcio" -version = "1.66.2" +version = "1.67.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, - {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, - {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, - {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, - {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, - {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, - {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, - {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, - {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, - {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, - {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, - {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, - {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, - {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, - {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, - {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, - {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, - {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, - {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, - {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, - {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, - {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, - {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, - {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, - {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, + {file = "grpcio-1.67.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:bd79929b3bb96b54df1296cd3bf4d2b770bd1df6c2bdf549b49bab286b925cdc"}, + {file = "grpcio-1.67.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:16724ffc956ea42967f5758c2f043faef43cb7e48a51948ab593570570d1e68b"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:2b7183c80b602b0ad816315d66f2fb7887614ead950416d60913a9a71c12560d"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe32b45dd6d118f5ea2e5deaed417d8a14976325c93812dd831908522b402c9"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe89295219b9c9e47780a0f1c75ca44211e706d1c598242249fe717af3385ec8"}, + {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa8d025fae1595a207b4e47c2e087cb88d47008494db258ac561c00877d4c8f8"}, + {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f95e15db43e75a534420e04822df91f645664bf4ad21dfaad7d51773c80e6bb4"}, + {file = "grpcio-1.67.0-cp310-cp310-win32.whl", hash = "sha256:a6b9a5c18863fd4b6624a42e2712103fb0f57799a3b29651c0e5b8119a519d65"}, + {file = "grpcio-1.67.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6eb68493a05d38b426604e1dc93bfc0137c4157f7ab4fac5771fd9a104bbaa6"}, + {file = "grpcio-1.67.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:e91d154689639932305b6ea6f45c6e46bb51ecc8ea77c10ef25aa77f75443ad4"}, + {file = "grpcio-1.67.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cb204a742997277da678611a809a8409657b1398aaeebf73b3d9563b7d154c13"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:ae6de510f670137e755eb2a74b04d1041e7210af2444103c8c95f193340d17ee"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74b900566bdf68241118f2918d312d3bf554b2ce0b12b90178091ea7d0a17b3d"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4e95e43447a02aa603abcc6b5e727d093d161a869c83b073f50b9390ecf0fa8"}, + {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0bb94e66cd8f0baf29bd3184b6aa09aeb1a660f9ec3d85da615c5003154bc2bf"}, + {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:82e5bd4b67b17c8c597273663794a6a46a45e44165b960517fe6d8a2f7f16d23"}, + {file = "grpcio-1.67.0-cp311-cp311-win32.whl", hash = "sha256:7fc1d2b9fd549264ae585026b266ac2db53735510a207381be509c315b4af4e8"}, + {file = "grpcio-1.67.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac11ecb34a86b831239cc38245403a8de25037b448464f95c3315819e7519772"}, + {file = "grpcio-1.67.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:227316b5631260e0bef8a3ce04fa7db4cc81756fea1258b007950b6efc90c05d"}, + {file = "grpcio-1.67.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d90cfdafcf4b45a7a076e3e2a58e7bc3d59c698c4f6470b0bb13a4d869cf2273"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:77196216d5dd6f99af1c51e235af2dd339159f657280e65ce7e12c1a8feffd1d"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15c05a26a0f7047f720da41dc49406b395c1470eef44ff7e2c506a47ac2c0591"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3840994689cc8cbb73d60485c594424ad8adb56c71a30d8948d6453083624b52"}, + {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5a1e03c3102b6451028d5dc9f8591131d6ab3c8a0e023d94c28cb930ed4b5f81"}, + {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:682968427a63d898759474e3b3178d42546e878fdce034fd7474ef75143b64e3"}, + {file = "grpcio-1.67.0-cp312-cp312-win32.whl", hash = "sha256:d01793653248f49cf47e5695e0a79805b1d9d4eacef85b310118ba1dfcd1b955"}, + {file = "grpcio-1.67.0-cp312-cp312-win_amd64.whl", hash = "sha256:985b2686f786f3e20326c4367eebdaed3e7aa65848260ff0c6644f817042cb15"}, + {file = "grpcio-1.67.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:8c9a35b8bc50db35ab8e3e02a4f2a35cfba46c8705c3911c34ce343bd777813a"}, + {file = "grpcio-1.67.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:42199e704095b62688998c2d84c89e59a26a7d5d32eed86d43dc90e7a3bd04aa"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c4c425f440fb81f8d0237c07b9322fc0fb6ee2b29fbef5f62a322ff8fcce240d"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:323741b6699cd2b04a71cb38f502db98f90532e8a40cb675393d248126a268af"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:662c8e105c5e5cee0317d500eb186ed7a93229586e431c1bf0c9236c2407352c"}, + {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f6bd2ab135c64a4d1e9e44679a616c9bc944547357c830fafea5c3caa3de5153"}, + {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2f55c1e0e2ae9bdd23b3c63459ee4c06d223b68aeb1961d83c48fb63dc29bc03"}, + {file = "grpcio-1.67.0-cp313-cp313-win32.whl", hash = "sha256:fd6bc27861e460fe28e94226e3673d46e294ca4673d46b224428d197c5935e69"}, + {file = "grpcio-1.67.0-cp313-cp313-win_amd64.whl", hash = "sha256:cf51d28063338608cd8d3cd64677e922134837902b70ce00dad7f116e3998210"}, + {file = "grpcio-1.67.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:7f200aca719c1c5dc72ab68be3479b9dafccdf03df530d137632c534bb6f1ee3"}, + {file = "grpcio-1.67.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0892dd200ece4822d72dd0952f7112c542a487fc48fe77568deaaa399c1e717d"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f4d613fbf868b2e2444f490d18af472ccb47660ea3df52f068c9c8801e1f3e85"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c69bf11894cad9da00047f46584d5758d6ebc9b5950c0dc96fec7e0bce5cde9"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9bca3ca0c5e74dea44bf57d27e15a3a3996ce7e5780d61b7c72386356d231db"}, + {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:014dfc020e28a0d9be7e93a91f85ff9f4a87158b7df9952fe23cc42d29d31e1e"}, + {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d4ea4509d42c6797539e9ec7496c15473177ce9abc89bc5c71e7abe50fc25737"}, + {file = "grpcio-1.67.0-cp38-cp38-win32.whl", hash = "sha256:9d75641a2fca9ae1ae86454fd25d4c298ea8cc195dbc962852234d54a07060ad"}, + {file = "grpcio-1.67.0-cp38-cp38-win_amd64.whl", hash = "sha256:cff8e54d6a463883cda2fab94d2062aad2f5edd7f06ae3ed030f2a74756db365"}, + {file = "grpcio-1.67.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:62492bd534979e6d7127b8a6b29093161a742dee3875873e01964049d5250a74"}, + {file = "grpcio-1.67.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eef1dce9d1a46119fd09f9a992cf6ab9d9178b696382439446ca5f399d7b96fe"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f623c57a5321461c84498a99dddf9d13dac0e40ee056d884d6ec4ebcab647a78"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d16383044e681f8beb50f905249e4e7261dd169d4aaf6e52eab67b01cbbbe2"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a44e572fb762c668e4812156b81835f7aba8a721b027e2d4bb29fb50ff4d33"}, + {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:391df8b0faac84d42f5b8dfc65f5152c48ed914e13c522fd05f2aca211f8bfad"}, + {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfd9306511fdfc623a1ba1dc3bc07fbd24e6cfbe3c28b4d1e05177baa2f99617"}, + {file = "grpcio-1.67.0-cp39-cp39-win32.whl", hash = "sha256:30d47dbacfd20cbd0c8be9bfa52fdb833b395d4ec32fe5cff7220afc05d08571"}, + {file = "grpcio-1.67.0-cp39-cp39-win_amd64.whl", hash = "sha256:f55f077685f61f0fbd06ea355142b71e47e4a26d2d678b3ba27248abfe67163a"}, + {file = "grpcio-1.67.0.tar.gz", hash = "sha256:e090b2553e0da1c875449c8e75073dd4415dd71c9bde6a406240fdf4c0ee467c"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.66.2)"] +protobuf = ["grpcio-tools (>=1.67.0)"] [[package]] name = "grpcio-status" @@ -3870,54 +3942,54 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 [[package]] name = "httptools" -version = "0.6.2" +version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" files = [ - {file = "httptools-0.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0238f07780782c018e9801d8f5f5aea3a4680a1af132034b444f677718c6fe88"}, - {file = "httptools-0.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10d28e5597d4349390c640232c9366ddc15568114f56724fe30a53de9686b6ab"}, - {file = "httptools-0.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ddaf99e362ae4169f6a8b3508f3487264e0a1b1e58c0b07b86407bc9ecee831"}, - {file = "httptools-0.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc9d039b6b8a36b182bc60774bb5d456b8ff9ec44cf97719f2f38bb1dcdd546"}, - {file = "httptools-0.6.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b57cb8a4a8a8ffdaf0395326ef3b9c1aba36e58a421438fc04c002a1f511db63"}, - {file = "httptools-0.6.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b73cda1326738eab5d60640ca0b87ac4e4db09a099423c41b59a5681917e8d1d"}, - {file = "httptools-0.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:352a496244360deb1c1d108391d76cd6f3dd9f53ccf975a082e74c6761af30c9"}, - {file = "httptools-0.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2e9d225b178a6cc700c23cf2f5daf85a10f93f1db7c34e9ee4ee0bbc29ad458a"}, - {file = "httptools-0.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49b14fcc9b12a52da8667587efa124a18e1a3eb63bbbcabf9882f4008d171d6"}, - {file = "httptools-0.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d5c33d98b2311ddbe06e92b12b14de334dcfbe64ebcbb2c7a34b5c6036db512"}, - {file = "httptools-0.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53cd2d776700bf0ed0e6fb203d716b041712ea4906479031cc5ac5421ecaa7d2"}, - {file = "httptools-0.6.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7da016a0dab1fcced89dfff8537033c5dc200015e14023368f3f4a69e39b8716"}, - {file = "httptools-0.6.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d6e0ba155a1b3159551ac6b4551eb20028617e2e4bb71f2c61efed0756e6825"}, - {file = "httptools-0.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:ad44569b0f508e046ffe85b4a547d5b68d1548fd90767df69449cc28021ee709"}, - {file = "httptools-0.6.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c92d2b7c1a914ab2f66454961eeaf904f4fe7529b93ff537619d22c18b82d070"}, - {file = "httptools-0.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f920a75c1dbcb5a48a495f384d73ceb41e437a966c318eb7e56f1c1ad1df3e"}, - {file = "httptools-0.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56bcd9ba0adf16edb4e3e45b8b9346f5b3b2372402e953d54c84b345d0f691e0"}, - {file = "httptools-0.6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e350a887adb38ac65c93c2f395b60cf482baca61fd396ed8d6fd313dbcce6fac"}, - {file = "httptools-0.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ddc328c2a2daf2cf4bdc7bbc8a458dc4c840637223d4b8e01bce2168cc79fd23"}, - {file = "httptools-0.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddaf38943dbb32333a182c894b6092a68b56c5e36d0c54ba3761d28119b15447"}, - {file = "httptools-0.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:052f7f50e4a38f069478143878371ed17937f268349bcd68f6f7a9de9fcfce21"}, - {file = "httptools-0.6.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:406f7dc5d9db68cd9ac638d14c74d077085f76b45f704d3ec38d43b842b3cb44"}, - {file = "httptools-0.6.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:77e22c33123ce11231ff2773d8905e20b45d77a69459def7481283b72a583955"}, - {file = "httptools-0.6.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41965586b02715c3d83dd9153001f654e5b621de0c5255f5ef0635485212d0c0"}, - {file = "httptools-0.6.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93b1839d54b80a06a51a31b90d024a1770e250d00de57e7ae069bafba932f398"}, - {file = "httptools-0.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8fdb4634040d1dbde7e0b373e19668cdb61c0ee8690d3b4064ac748d85365bca"}, - {file = "httptools-0.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c30902f9b9da0d74668b6f71d7b57081a4879d9a5ea93d5922dbe15b15b3b24a"}, - {file = "httptools-0.6.2-cp313-cp313-win_amd64.whl", hash = "sha256:cf61238811a75335751b4b17f8b221a35f93f2d57489296742adf98412d2a568"}, - {file = "httptools-0.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8d80878cb40ebf88a48839ff7206ceb62e4b54327e0c2f9f15ee12edbd8b907e"}, - {file = "httptools-0.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5141ccc9dbd8cdc59d1e93e318d405477a940dc6ebadcb8d9f8da17d2812d353"}, - {file = "httptools-0.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67d47f045f56e9a5da4deccf710bdde21212e4b1f4776b7a542449f6a7682"}, - {file = "httptools-0.6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dcb8f5c866f1537ccbaad01ebb3611890d281ef8d25e050d1cc3d90fba6b3d"}, - {file = "httptools-0.6.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1b7bc59362143dc2d02896dde94004ef54ff1989ceedf4b389ad3b530f312364"}, - {file = "httptools-0.6.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c7a5715b1f46e9852442f496c0df2f8c393cc8f293f5396d2c8d95cac852fb51"}, - {file = "httptools-0.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:3f0246ca7f78fa8e3902ddb985b9f55509d417a862f4634a8fa63a7a496266c8"}, - {file = "httptools-0.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1099f73952e18c718ccaaf7a97ae58c94a91839c3d247c6184326f85a2eda7b4"}, - {file = "httptools-0.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3e45d004531330030f7d07abe4865bc17963b9989bc1941cebbf7224010fb82"}, - {file = "httptools-0.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f2fea370361a90cb9330610a95303587eda9d1e69930dbbee9978eac1d5946"}, - {file = "httptools-0.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0481154c91725f7e7b729a535190388be6c7cbae3bbf0e793343ca386282312"}, - {file = "httptools-0.6.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d25f8fdbc6cc6561353c7a384d76295e6a85a4945115b8bc347855db150e8c77"}, - {file = "httptools-0.6.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:054bdee08e4f7c15c186f6e7dbc8f0cf974b8dd1832b5f17f988faf8b12815c9"}, - {file = "httptools-0.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:4502620722b453c2c6306fad392c515dcb804dfa9c6d3b90d8926a07a7a01109"}, - {file = "httptools-0.6.2.tar.gz", hash = "sha256:ae694efefcb61317c79b2fa1caebc122060992408e389bb00889567e463a47f1"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] @@ -4261,6 +4333,17 @@ files = [ [package.dependencies] ply = "*" +[[package]] +name = "jsonpath-python" +version = "1.0.6" +description = "A more powerful JSONPath implementation in modern python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, + {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, +] + [[package]] name = "jsonschema" version = "4.23.0" @@ -4535,13 +4618,13 @@ openai = ["openai (>=0.27.8)"] [[package]] name = "langsmith" -version = "0.1.135" +version = "0.1.137" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.135-py3-none-any.whl", hash = "sha256:b1d1ca3bad483a4239745c57e9b9157b4d099fbf3149be21e3d112c94ede06ac"}, - {file = "langsmith-0.1.135.tar.gz", hash = "sha256:7abed7e141386af99a2177f0b3600b124ae3ad1b482879ba0724ce92ef998a11"}, + {file = "langsmith-0.1.137-py3-none-any.whl", hash = "sha256:4256d5c61133749890f7b5c88321dbb133ce0f440c621ea28e76513285859b81"}, + {file = "langsmith-0.1.137.tar.gz", hash = "sha256:56cdfcc6c74cb20a3f437d5bd144feb5bf93f54c5a2918d1e568cbd084a372d4"}, ] [package.dependencies] @@ -4825,13 +4908,13 @@ urllib3 = ">=1.23" [[package]] name = "mako" -version = "1.3.5" +version = "1.3.6" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, + {file = "Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a"}, + {file = "mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d"}, ] [package.dependencies] @@ -4883,92 +4966,92 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "3.0.1" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" files = [ - {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-win32.whl", hash = "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-win32.whl", hash = "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-win32.whl", hash = "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-win32.whl", hash = "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-win32.whl", hash = "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-win32.whl", hash = "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b"}, - {file = "markupsafe-3.0.1.tar.gz", hash = "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "marshmallow" -version = "3.22.0" +version = "3.23.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, - {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, + {file = "marshmallow-3.23.0-py3-none-any.whl", hash = "sha256:82f20a2397834fe6d9611b241f2f7e7b680ed89c49f84728a1ad937be6b4bdf4"}, + {file = "marshmallow-3.23.0.tar.gz", hash = "sha256:98d8827a9f10c03d44ead298d2e99c6aea8197df18ccfad360dae7f89a50da2e"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "simplejson"] [[package]] name = "matplotlib" @@ -5247,23 +5330,6 @@ files = [ msal = ">=1.29,<2" portalocker = ">=1.4,<3" -[[package]] -name = "msg-parser" -version = "1.2.0" -description = "This module enables reading, parsing and converting Microsoft Outlook MSG E-Mail files." -optional = false -python-versions = ">=3.4" -files = [ - {file = "msg_parser-1.2.0-py2.py3-none-any.whl", hash = "sha256:d47a2f0b2a359cb189fad83cc991b63ea781ecc70d91410324273fbf93e95375"}, - {file = "msg_parser-1.2.0.tar.gz", hash = "sha256:0de858d4fcebb6c8f6f028da83a17a20fe01cdce67c490779cf43b3b0162aa66"}, -] - -[package.dependencies] -olefile = ">=0.46" - -[package.extras] -rtf = ["compressed-rtf (>=1.0.5)"] - [[package]] name = "msrest" version = "0.7.1" @@ -5439,6 +5505,17 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + [[package]] name = "newspaper3k" version = "0.2.8" @@ -5467,13 +5544,13 @@ tldextract = ">=2.0.1" [[package]] name = "nltk" -version = "3.8.1" +version = "3.9.1" description = "Natural Language Toolkit" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, - {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, ] [package.dependencies] @@ -5762,13 +5839,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.52.0" +version = "1.52.2" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.52.0-py3-none-any.whl", hash = "sha256:0c249f20920183b0a2ca4f7dba7b0452df3ecd0fa7985eb1d91ad884bc3ced9c"}, - {file = "openai-1.52.0.tar.gz", hash = "sha256:95c65a5f77559641ab8f3e4c3a050804f7b51d278870e2ec1f7444080bfe565a"}, + {file = "openai-1.52.2-py3-none-any.whl", hash = "sha256:57e9e37bc407f39bb6ec3a27d7e8fb9728b2779936daa1fcf95df17d3edfaccc"}, + {file = "openai-1.52.2.tar.gz", hash = "sha256:87b7d0f69d85f5641678d414b7ee3082363647a5c66a462ed7f3ccb59582da0d"}, ] [package.dependencies] @@ -6089,68 +6166,69 @@ cryptography = ">=3.2.1" [[package]] name = "orjson" -version = "3.10.7" +version = "3.10.10" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, - {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, - {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, - {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, - {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, - {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, - {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, - {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, - {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, - {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, - {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, - {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, - {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, - {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, - {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, - {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, - {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, - {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, - {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, - {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, - {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, - {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, - {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, - {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, - {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, - {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, - {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, - {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, - {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, - {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, - {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, - {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, + {file = "orjson-3.10.10-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b788a579b113acf1c57e0a68e558be71d5d09aa67f62ca1f68e01117e550a998"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:804b18e2b88022c8905bb79bd2cbe59c0cd014b9328f43da8d3b28441995cda4"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9972572a1d042ec9ee421b6da69f7cc823da5962237563fa548ab17f152f0b9b"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc6993ab1c2ae7dd0711161e303f1db69062955ac2668181bfdf2dd410e65258"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d78e4cacced5781b01d9bc0f0cd8b70b906a0e109825cb41c1b03f9c41e4ce86"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6eb2598df518281ba0cbc30d24c5b06124ccf7e19169e883c14e0831217a0bc"}, + {file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23776265c5215ec532de6238a52707048401a568f0fa0d938008e92a147fe2c7"}, + {file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8cc2a654c08755cef90b468ff17c102e2def0edd62898b2486767204a7f5cc9c"}, + {file = "orjson-3.10.10-cp310-none-win32.whl", hash = "sha256:081b3fc6a86d72efeb67c13d0ea7c030017bd95f9868b1e329a376edc456153b"}, + {file = "orjson-3.10.10-cp310-none-win_amd64.whl", hash = "sha256:ff38c5fb749347768a603be1fb8a31856458af839f31f064c5aa74aca5be9efe"}, + {file = "orjson-3.10.10-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:879e99486c0fbb256266c7c6a67ff84f46035e4f8749ac6317cc83dacd7f993a"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019481fa9ea5ff13b5d5d95e6fd5ab25ded0810c80b150c2c7b1cc8660b662a7"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0dd57eff09894938b4c86d4b871a479260f9e156fa7f12f8cad4b39ea8028bb5"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbde6d70cd95ab4d11ea8ac5e738e30764e510fc54d777336eec09bb93b8576c"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2625cb37b8fb42e2147404e5ff7ef08712099197a9cd38895006d7053e69d6"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbf3c20c6a7db69df58672a0d5815647ecf78c8e62a4d9bd284e8621c1fe5ccb"}, + {file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:75c38f5647e02d423807d252ce4528bf6a95bd776af999cb1fb48867ed01d1f6"}, + {file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23458d31fa50ec18e0ec4b0b4343730928296b11111df5f547c75913714116b2"}, + {file = "orjson-3.10.10-cp311-none-win32.whl", hash = "sha256:2787cd9dedc591c989f3facd7e3e86508eafdc9536a26ec277699c0aa63c685b"}, + {file = "orjson-3.10.10-cp311-none-win_amd64.whl", hash = "sha256:6514449d2c202a75183f807bc755167713297c69f1db57a89a1ef4a0170ee269"}, + {file = "orjson-3.10.10-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8564f48f3620861f5ef1e080ce7cd122ee89d7d6dacf25fcae675ff63b4d6e05"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5bf161a32b479034098c5b81f2608f09167ad2fa1c06abd4e527ea6bf4837a9"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b65c93617bcafa7f04b74ae8bc2cc214bd5cb45168a953256ff83015c6747d"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8e28406f97fc2ea0c6150f4c1b6e8261453318930b334abc419214c82314f85"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4d0d9fe174cc7a5bdce2e6c378bcdb4c49b2bf522a8f996aa586020e1b96cee"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3be81c42f1242cbed03cbb3973501fcaa2675a0af638f8be494eaf37143d999"}, + {file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65f9886d3bae65be026219c0a5f32dbbe91a9e6272f56d092ab22561ad0ea33b"}, + {file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:730ed5350147db7beb23ddaf072f490329e90a1d059711d364b49fe352ec987b"}, + {file = "orjson-3.10.10-cp312-none-win32.whl", hash = "sha256:a8f4bf5f1c85bea2170800020d53a8877812892697f9c2de73d576c9307a8a5f"}, + {file = "orjson-3.10.10-cp312-none-win_amd64.whl", hash = "sha256:384cd13579a1b4cd689d218e329f459eb9ddc504fa48c5a83ef4889db7fd7a4f"}, + {file = "orjson-3.10.10-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44bffae68c291f94ff5a9b4149fe9d1bdd4cd0ff0fb575bcea8351d48db629a1"}, + {file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27b4c6437315df3024f0835887127dac2a0a3ff643500ec27088d2588fa5ae1"}, + {file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca84df16d6b49325a4084fd8b2fe2229cb415e15c46c529f868c3387bb1339d"}, + {file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c14ce70e8f39bd71f9f80423801b5d10bf93d1dceffdecd04df0f64d2c69bc01"}, + {file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:24ac62336da9bda1bd93c0491eff0613003b48d3cb5d01470842e7b52a40d5b4"}, + {file = "orjson-3.10.10-cp313-none-win32.whl", hash = "sha256:eb0a42831372ec2b05acc9ee45af77bcaccbd91257345f93780a8e654efc75db"}, + {file = "orjson-3.10.10-cp313-none-win_amd64.whl", hash = "sha256:f0c4f37f8bf3f1075c6cc8dd8a9f843689a4b618628f8812d0a71e6968b95ffd"}, + {file = "orjson-3.10.10-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:829700cc18503efc0cf502d630f612884258020d98a317679cd2054af0259568"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0ceb5e0e8c4f010ac787d29ae6299846935044686509e2f0f06ed441c1ca949"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c25908eb86968613216f3db4d3003f1c45d78eb9046b71056ca327ff92bdbd4"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:218cb0bc03340144b6328a9ff78f0932e642199ac184dd74b01ad691f42f93ff"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2277ec2cea3775640dc81ab5195bb5b2ada2fe0ea6eee4677474edc75ea6785"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:848ea3b55ab5ccc9d7bbd420d69432628b691fba3ca8ae3148c35156cbd282aa"}, + {file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e3e67b537ac0c835b25b5f7d40d83816abd2d3f4c0b0866ee981a045287a54f3"}, + {file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7948cfb909353fce2135dcdbe4521a5e7e1159484e0bb024c1722f272488f2b8"}, + {file = "orjson-3.10.10-cp38-none-win32.whl", hash = "sha256:78bee66a988f1a333dc0b6257503d63553b1957889c17b2c4ed72385cd1b96ae"}, + {file = "orjson-3.10.10-cp38-none-win_amd64.whl", hash = "sha256:f1d647ca8d62afeb774340a343c7fc023efacfd3a39f70c798991063f0c681dd"}, + {file = "orjson-3.10.10-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5a059afddbaa6dd733b5a2d76a90dbc8af790b993b1b5cb97a1176ca713b5df8"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f9b5c59f7e2a1a410f971c5ebc68f1995822837cd10905ee255f96074537ee6"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d5ef198bafdef4aa9d49a4165ba53ffdc0a9e1c7b6f76178572ab33118afea25"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf29ce0bb5d3320824ec3d1508652421000ba466abd63bdd52c64bcce9eb1fa"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dddd5516bcc93e723d029c1633ae79c4417477b4f57dad9bfeeb6bc0315e654a"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12f2003695b10817f0fa8b8fca982ed7f5761dcb0d93cff4f2f9f6709903fd7"}, + {file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:672f9874a8a8fb9bb1b771331d31ba27f57702c8106cdbadad8bda5d10bc1019"}, + {file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1dcbb0ca5fafb2b378b2c74419480ab2486326974826bbf6588f4dc62137570a"}, + {file = "orjson-3.10.10-cp39-none-win32.whl", hash = "sha256:d9bbd3a4b92256875cb058c3381b782649b9a3c68a4aa9a2fff020c2f9cfc1be"}, + {file = "orjson-3.10.10-cp39-none-win_amd64.whl", hash = "sha256:766f21487a53aee8524b97ca9582d5c6541b03ab6210fbaf10142ae2f3ced2aa"}, + {file = "orjson-3.10.10.tar.gz", hash = "sha256:37949383c4df7b4337ce82ee35b6d7471e55195efa7dcb45ab8226ceadb0fe3b"}, ] [[package]] @@ -6307,12 +6385,12 @@ ppft = ">=1.7.6.9" [[package]] name = "peewee" -version = "3.17.6" +version = "3.17.7" description = "a little orm" optional = false python-versions = "*" files = [ - {file = "peewee-3.17.6.tar.gz", hash = "sha256:cea5592c6f4da1592b7cff8eaf655be6648a1f5857469e30037bf920c03fb8fb"}, + {file = "peewee-3.17.7.tar.gz", hash = "sha256:6aefc700bd530fc6ac23fa19c9c5b47041751d92985b799169c8e318e97eabaa"}, ] [[package]] @@ -6352,95 +6430,90 @@ numpy = "*" [[package]] name = "pillow" -version = "10.4.0" +version = "11.0.0" description = "Python Imaging Library (Fork)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, + {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, + {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, + {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, + {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, + {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, + {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc"}, + {file = "pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6"}, + {file = "pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47"}, + {file = "pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb"}, + {file = "pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798"}, + {file = "pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de"}, + {file = "pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a"}, + {file = "pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8"}, + {file = "pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8"}, + {file = "pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904"}, + {file = "pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, + {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, + {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, + {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, + {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -6525,20 +6598,20 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "postgrest" -version = "0.17.1" +version = "0.17.2" description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "postgrest-0.17.1-py3-none-any.whl", hash = "sha256:ec1d00dc8532fe5ffb342cfc7c4e610a1e0e2272eb14f78f9b2b61094f9be510"}, - {file = "postgrest-0.17.1.tar.gz", hash = "sha256:e31d9977dbb80dc5f9fdd4d444014686606692dc4ddb9adc85639e56c6d54c92"}, + {file = "postgrest-0.17.2-py3-none-any.whl", hash = "sha256:f7c4f448e5a5e2d4c1dcf192edae9d1007c4261e9a6fb5116783a0046846ece2"}, + {file = "postgrest-0.17.2.tar.gz", hash = "sha256:445cd4e4a191e279492549df0c4e827d32f9d01d0852599bb8a6efb0f07fcf78"}, ] [package.dependencies] deprecation = ">=2.1.0,<3.0.0" httpx = {version = ">=0.26,<0.28", extras = ["http2"]} pydantic = ">=1.9,<3.0" -strenum = ">=0.4.9,<0.5.0" +strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""} [[package]] name = "posthog" @@ -6590,19 +6663,19 @@ dill = ["dill (>=0.3.9)"] [[package]] name = "primp" -version = "0.6.3" +version = "0.6.4" description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" optional = false python-versions = ">=3.8" files = [ - {file = "primp-0.6.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bdbe6a7cdaaf5c9ed863432a941f4a75bd4c6ff626cbc8d32fc232793c70ba06"}, - {file = "primp-0.6.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:eeb53eb987bdcbcd85740633470255cab887d921df713ffa12a36a13366c9cdb"}, - {file = "primp-0.6.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78da53d3c92a8e3f05bd3286ac76c291f1b6fe5e08ea63b7ba92b0f9141800bb"}, - {file = "primp-0.6.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:86337b44deecdac752bd8112909987fc9fa9b894f30191c80a164dc8f895da53"}, - {file = "primp-0.6.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d3cd9a22b97f3eae42b2a5fb99f00480daf4cd6d9b139e05b0ffb03f7cc037f3"}, - {file = "primp-0.6.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7732bec917e2d3c48a31cdb92e1250f4ad6203a1aa4f802bd9abd84f2286a1e0"}, - {file = "primp-0.6.3-cp38-abi3-win_amd64.whl", hash = "sha256:1e4113c34b86c676ae321af185f03a372caef3ee009f1682c2d62e30ec87348c"}, - {file = "primp-0.6.3.tar.gz", hash = "sha256:17d30ebe26864defad5232dbbe1372e80483940012356e1f68846bb182282039"}, + {file = "primp-0.6.4-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e627330c1f2b723b523dc2e47caacbc5b5d0cd51ca11583b42fb8cde4da60d7d"}, + {file = "primp-0.6.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:e0cb7c05dd56c8b9741042fd568c0983fc19b0f3aa209a3940ecc04b4fd60314"}, + {file = "primp-0.6.4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4adc200ccb39e130c478d8b1a94f43a5b359068c6cb65b7c848812f96d96992"}, + {file = "primp-0.6.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:0ebae2d3aa36b04028e4accf2609d31d2e6981659e8e2effb09ee8ba960192e1"}, + {file = "primp-0.6.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:77f5fa5b34eaf251815622258419a484a2a9179dcbae2a1e702a254d91f613f1"}, + {file = "primp-0.6.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:14cddf535cd2c4987412e90ca3ca35ae52cddbee6e0f0953d26b33a652a95692"}, + {file = "primp-0.6.4-cp38-abi3-win_amd64.whl", hash = "sha256:96177ec2dadc47eaecbf0b22d2e93aeaf964a1be9a71e6e318d2ffb9e4242743"}, + {file = "primp-0.6.4.tar.gz", hash = "sha256:0a3de63e46a50664bcdc76e7aaf7060bf8443698efa902864669c5fca0d1abdd"}, ] [package.extras] @@ -6624,13 +6697,13 @@ wcwidth = "*" [[package]] name = "proto-plus" -version = "1.24.0" +version = "1.25.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.7" files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, + {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"}, + {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"}, ] [package.dependencies] @@ -6661,112 +6734,108 @@ files = [ [[package]] name = "psutil" -version = "6.0.0" +version = "6.1.0" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "psycopg2-binary" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] [[package]] @@ -7200,6 +7269,22 @@ files = [ ed25519 = ["PyNaCl (>=1.4.0)"] rsa = ["cryptography"] +[[package]] +name = "pyobvector" +version = "0.1.6" +description = "A python SDK for OceanBase Vector Store, based on SQLAlchemy, compatible with Milvus API." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "pyobvector-0.1.6-py3-none-any.whl", hash = "sha256:0d700e865a85b4716b9a03384189e49288cd9d5f3cef88aed4740bc82d5fd136"}, + {file = "pyobvector-0.1.6.tar.gz", hash = "sha256:05551addcac8c596992d5e38b480c83ca3481c6cfc6f56a1a1bddfb2e6ae037e"}, +] + +[package.dependencies] +numpy = ">=1.26.0,<2.0.0" +pymysql = ">=1.1.1,<2.0.0" +sqlalchemy = ">=2.0.32,<3.0.0" + [[package]] name = "pyopenssl" version = "24.2.1" @@ -7243,6 +7328,27 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pypdf" +version = "5.0.1" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pypdf-5.0.1-py3-none-any.whl", hash = "sha256:ff8a32da6c7a63fea9c32fa4dd837cdd0db7966adf6c14f043e3f12592e992db"}, + {file = "pypdf-5.0.1.tar.gz", hash = "sha256:a361c3c372b4a659f9c8dd438d5ce29a753c79c620dc6e1fd66977651f5547ea"}, +] + +[package.dependencies] +typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[package.extras] +crypto = ["PyCryptodome", "cryptography"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] +docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] +full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] +image = ["Pillow (>=8.0.0)"] + [[package]] name = "pypdfium2" version = "4.17.0" @@ -7498,13 +7604,13 @@ files = [ [[package]] name = "python-dateutil" -version = "2.9.0.post0" +version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] [package.dependencies] @@ -7541,17 +7647,17 @@ cli = ["click (>=5.0)"] [[package]] name = "python-iso639" -version = "2024.4.27" +version = "2024.10.22" description = "ISO 639 language codes, names, and other associated information" optional = false python-versions = ">=3.8" files = [ - {file = "python_iso639-2024.4.27-py3-none-any.whl", hash = "sha256:27526a84cebc4c4d53fea9d1ebbc7209c8d279bebaa343e6765a1fc8780565ab"}, - {file = "python_iso639-2024.4.27.tar.gz", hash = "sha256:97e63b5603e085c6a56a12a95740010e75d9134e0aab767e0978b53fd8824f13"}, + {file = "python_iso639-2024.10.22-py3-none-any.whl", hash = "sha256:02d3ce2e01c6896b30b9cbbd3e1c8ee0d7221250b5d63ea9803e0d2a81fd1047"}, + {file = "python_iso639-2024.10.22.tar.gz", hash = "sha256:750f21b6a0bc6baa24253a3d8aae92b582bf93aa40988361cd96852c2c6d9a52"}, ] [package.extras] -dev = ["black (==24.4.2)", "build (==1.2.1)", "flake8 (==7.0.0)", "pytest (==8.1.2)", "requests (==2.31.0)", "twine (==5.0.0)"] +dev = ["black (==24.10.0)", "build (==1.2.1)", "flake8 (==7.1.1)", "pytest (==8.3.3)", "requests (==2.32.3)", "twine (==5.1.1)"] [[package]] name = "python-magic" @@ -7565,19 +7671,36 @@ files = [ ] [[package]] -name = "python-pptx" -version = "0.6.23" -description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +name = "python-oxmsg" +version = "0.0.1" +description = "Extract attachments from Outlook .msg files." optional = false -python-versions = "*" +python-versions = ">=3.9" files = [ - {file = "python-pptx-0.6.23.tar.gz", hash = "sha256:587497ff28e779ab18dbb074f6d4052893c85dedc95ed75df319364f331fedee"}, - {file = "python_pptx-0.6.23-py3-none-any.whl", hash = "sha256:dd0527194627a2b7cc05f3ba23ecaa2d9a0d5ac9b6193a28ed1b7a716f4217d4"}, + {file = "python_oxmsg-0.0.1-py3-none-any.whl", hash = "sha256:8ea7d5dda1bc161a413213da9e18ed152927c1fda2feaf5d1f02192d8ad45eea"}, + {file = "python_oxmsg-0.0.1.tar.gz", hash = "sha256:b65c1f93d688b85a9410afa824192a1ddc39da359b04a0bd2cbd3874e84d4994"}, +] + +[package.dependencies] +click = "*" +olefile = "*" +typing-extensions = ">=4.9.0" + +[[package]] +name = "python-pptx" +version = "1.0.2" +description = "Create, read, and update PowerPoint 2007+ (.pptx) files." +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba"}, + {file = "python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095"}, ] [package.dependencies] lxml = ">=3.1.0" Pillow = ">=3.3.2" +typing-extensions = ">=4.9.0" XlsxWriter = ">=0.5.7" [[package]] @@ -8151,13 +8274,13 @@ py = ">=1.4.26,<2.0.0" [[package]] name = "rich" -version = "13.9.2" +version = "13.9.3" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" files = [ - {file = "rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1"}, - {file = "rich-13.9.2.tar.gz", hash = "sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c"}, + {file = "rich-13.9.3-py3-none-any.whl", hash = "sha256:9836f5096eb2172c9e77df411c1b009bace4193d6a481d534fea75ebba758283"}, + {file = "rich-13.9.3.tar.gz", hash = "sha256:bc1e01b899537598cf02579d2b9f4a415104d3fc439313a7a2c165d76557a08e"}, ] [package.dependencies] @@ -8700,13 +8823,13 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "75.1.0" +version = "75.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, ] [package.extras] @@ -8872,60 +8995,68 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.35" +version = "2.0.36" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, - {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, - {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [package.dependencies] @@ -8938,7 +9069,7 @@ aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] @@ -8974,13 +9105,13 @@ doc = ["sphinx"] [[package]] name = "starlette" -version = "0.39.2" +version = "0.41.0" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.39.2-py3-none-any.whl", hash = "sha256:134dd6deb655a9775991d352312d53f1879775e5cc8a481f966e83416a2c3f71"}, - {file = "starlette-0.39.2.tar.gz", hash = "sha256:caaa3b87ef8518ef913dac4f073dea44e85f73343ad2bdc17941931835b2a26a"}, + {file = "starlette-0.41.0-py3-none-any.whl", hash = "sha256:a0193a3c413ebc9c78bff1c3546a45bb8c8bcb4a84cae8747d650a65bd37210a"}, + {file = "starlette-0.41.0.tar.gz", hash = "sha256:39cbd8768b107d68bfe1ff1672b38a2c38b49777de46d2a592841d58e3bf7c2a"}, ] [package.dependencies] @@ -8991,13 +9122,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "storage3" -version = "0.8.1" +version = "0.8.2" description = "Supabase Storage client for Python." optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "storage3-0.8.1-py3-none-any.whl", hash = "sha256:0b21205f43eaf0d1dd33bde6c6d0612f88524b7865f017d2ae9827e3f63d9cdc"}, - {file = "storage3-0.8.1.tar.gz", hash = "sha256:ea60b68b2221b3868ccc1a7f1294d57d0d9c51642cdc639d8115fe5d0adc8892"}, + {file = "storage3-0.8.2-py3-none-any.whl", hash = "sha256:f2e995b18c77a2a9265d1a33047d43e4d6abb11eb3ca5067959f68281c305de3"}, + {file = "storage3-0.8.2.tar.gz", hash = "sha256:db05d3fe8fb73bd30c814c4c4749664f37a5dfc78b629e8c058ef558c2b89f5a"}, ] [package.dependencies] @@ -9057,13 +9188,13 @@ typing-extensions = ">=4.12.2,<5.0.0" [[package]] name = "supafunc" -version = "0.6.1" +version = "0.6.2" description = "Library for Supabase Functions" optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "supafunc-0.6.1-py3-none-any.whl", hash = "sha256:01aeeeb4bf429977664454a32c86418345140faf6d2e6eb0636d52e4547c5fbb"}, - {file = "supafunc-0.6.1.tar.gz", hash = "sha256:3c8761e3999336ccdb7550498a395fd08afc8469382f55ea56f7f640e5a909aa"}, + {file = "supafunc-0.6.2-py3-none-any.whl", hash = "sha256:101b30616b0a1ce8cf938eca1df362fa4cf1deacb0271f53ebbd674190fb0da5"}, + {file = "supafunc-0.6.2.tar.gz", hash = "sha256:c7dfa20db7182f7fe4ae436e94e05c06cd7ed98d697fed75d68c7b9792822adc"}, ] [package.dependencies] @@ -9143,13 +9274,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tencentcloud-sdk-python-common" -version = "3.0.1250" +version = "3.0.1257" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-common-3.0.1250.tar.gz", hash = "sha256:97c15c3f2ffbde60550656eab3e9337d9e0ec8958a533f223c5d5caa2762b6e9"}, - {file = "tencentcloud_sdk_python_common-3.0.1250-py2.py3-none-any.whl", hash = "sha256:e369dee2d920ee365a8e2d314d563d243f2e73f5bc6bd2886f96534c9d00c3a7"}, + {file = "tencentcloud-sdk-python-common-3.0.1257.tar.gz", hash = "sha256:e10b155d598a60c43a491be10f40f7dae5774a2187d55f2da83bdb559434f3c4"}, + {file = "tencentcloud_sdk_python_common-3.0.1257-py2.py3-none-any.whl", hash = "sha256:f474a2969f3cbff91f45780f18bfbb90ab53f66c0085c4e9b4e07c2fcf0e71d9"}, ] [package.dependencies] @@ -9157,17 +9288,17 @@ requests = ">=2.16.0" [[package]] name = "tencentcloud-sdk-python-hunyuan" -version = "3.0.1250" +version = "3.0.1257" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-hunyuan-3.0.1250.tar.gz", hash = "sha256:ac95085edee2a95c69326b2fd6a0f61116fc5d214d5c8cf14a1b42bbb262dba8"}, - {file = "tencentcloud_sdk_python_hunyuan-3.0.1250-py2.py3-none-any.whl", hash = "sha256:caac95c47348639452a78d39cdcb87257f97cec3b52398e3be97a5b8c4c5e496"}, + {file = "tencentcloud-sdk-python-hunyuan-3.0.1257.tar.gz", hash = "sha256:4d38505089bed70dda1f806f8c4835f8a8c520efa86dcecfef444045c21b695d"}, + {file = "tencentcloud_sdk_python_hunyuan-3.0.1257-py2.py3-none-any.whl", hash = "sha256:c9089d3e49304c9c20e7465c82372b2cd234e67f63efdffb6798a4093b3a97c6"}, ] [package.dependencies] -tencentcloud-sdk-python-common = "3.0.1250" +tencentcloud-sdk-python-common = "3.0.1257" [[package]] name = "threadpoolctl" @@ -9426,12 +9557,12 @@ files = [ [[package]] name = "tos" -version = "2.7.1" +version = "2.7.2" description = "Volc TOS (Tinder Object Storage) SDK" optional = false python-versions = "*" files = [ - {file = "tos-2.7.1.tar.gz", hash = "sha256:4bccdbff3cfd63eb44648bb44862903708c4b3e790f0dd55c96305baaeece805"}, + {file = "tos-2.7.2.tar.gz", hash = "sha256:3c31257716785bca7b2cac51474ff32543cda94075a7b7aff70d769c15c7b7ed"}, ] [package.dependencies] @@ -9565,13 +9696,13 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "types-requests" -version = "2.32.0.20240914" +version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, - {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, ] [package.dependencies] @@ -9703,13 +9834,13 @@ files = [ [[package]] name = "unstructured" -version = "0.10.30" +version = "0.16.1" description = "A library that prepares raw documents for downstream ML tasks." optional = false -python-versions = ">=3.7.0" +python-versions = "<3.13,>=3.9.0" files = [ - {file = "unstructured-0.10.30-py3-none-any.whl", hash = "sha256:0615f14daa37450e9c0fcf3c3fd178c3a06b6b8d006a36d1a5e54dbe487aa6b6"}, - {file = "unstructured-0.10.30.tar.gz", hash = "sha256:a86c3d15c572a28322d83cb5ecf0ac7a24f1c36864fb7c68df096de8a1acc106"}, + {file = "unstructured-0.16.1-py3-none-any.whl", hash = "sha256:7512281a2917809a563cbb186876b77d5a361e1f3089eca61e9219aecd1218f9"}, + {file = "unstructured-0.16.1.tar.gz", hash = "sha256:03608b5189a004412cd618ce2d083ff926c56dbbca41b41c92e08ffa9e2bac3a"}, ] [package.dependencies] @@ -9719,71 +9850,84 @@ chardet = "*" dataclasses-json = "*" emoji = "*" filetype = "*" +html5lib = "*" langdetect = "*" lxml = "*" markdown = {version = "*", optional = true, markers = "extra == \"md\""} -msg-parser = {version = "*", optional = true, markers = "extra == \"msg\""} nltk = "*" -numpy = "*" +numpy = "<2" +psutil = "*" pypandoc = {version = "*", optional = true, markers = "extra == \"epub\""} -python-docx = {version = ">=1.1.0", optional = true, markers = "extra == \"docx\""} +python-docx = {version = ">=1.1.2", optional = true, markers = "extra == \"docx\""} python-iso639 = "*" python-magic = "*" -python-pptx = {version = "<=0.6.23", optional = true, markers = "extra == \"ppt\" or extra == \"pptx\""} +python-oxmsg = "*" +python-pptx = {version = ">=1.0.1", optional = true, markers = "extra == \"ppt\" or extra == \"pptx\""} rapidfuzz = "*" requests = "*" -tabulate = "*" +tqdm = "*" typing-extensions = "*" +unstructured-client = "*" +wrapt = "*" [package.extras] -airtable = ["pyairtable"] -all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.1.0)", "python-pptx (<=0.6.23)", "unstructured-inference (==0.7.11)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] -azure = ["adlfs", "fsspec (==2023.9.1)"] -azure-cognitive-search = ["azure-search-documents"] -bedrock = ["boto3", "langchain"] -biomed = ["bs4"] -box = ["boxfs", "fsspec (==2023.9.1)"] -confluence = ["atlassian-python-api"] +all-docs = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pi-heif", "pikepdf", "pypandoc", "pypdf", "python-docx (>=1.1.2)", "python-pptx (>=1.0.1)", "unstructured-inference (==0.8.0)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] csv = ["pandas"] -delta-table = ["deltalake", "fsspec (==2023.9.1)"] -discord = ["discord-py"] -doc = ["python-docx (>=1.1.0)"] -docx = ["python-docx (>=1.1.0)"] -dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] -elasticsearch = ["elasticsearch", "jq"] -embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +doc = ["python-docx (>=1.1.2)"] +docx = ["python-docx (>=1.1.2)"] epub = ["pypandoc"] -gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] -github = ["pygithub (>1.58.0)"] -gitlab = ["python-gitlab"] -google-drive = ["google-api-python-client"] huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] -image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.11)", "unstructured.pytesseract (>=0.3.12)"] -jira = ["atlassian-python-api"] -local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.1.0)", "python-pptx (<=0.6.23)", "unstructured-inference (==0.7.11)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +image = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pi-heif", "pikepdf", "pypdf", "unstructured-inference (==0.8.0)", "unstructured.pytesseract (>=0.3.12)"] +local-inference = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pi-heif", "pikepdf", "pypandoc", "pypdf", "python-docx (>=1.1.2)", "python-pptx (>=1.0.1)", "unstructured-inference (==0.8.0)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] md = ["markdown"] -msg = ["msg-parser"] -notion = ["htmlBuilder", "notion-client"] -odt = ["pypandoc", "python-docx (>=1.1.0)"] -onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] -openai = ["langchain", "openai", "tiktoken"] +odt = ["pypandoc", "python-docx (>=1.1.2)"] org = ["pypandoc"] -outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] -paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] -pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.11)", "unstructured.pytesseract (>=0.3.12)"] -ppt = ["python-pptx (<=0.6.23)"] -pptx = ["python-pptx (<=0.6.23)"] -reddit = ["praw"] +paddleocr = ["paddlepaddle (==3.0.0b1)", "unstructured.paddleocr (==2.8.1.0)"] +pdf = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pi-heif", "pikepdf", "pypdf", "unstructured-inference (==0.8.0)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (>=1.0.1)"] +pptx = ["python-pptx (>=1.0.1)"] rst = ["pypandoc"] rtf = ["pypandoc"] -s3 = ["fsspec (==2023.9.1)", "s3fs"] -salesforce = ["simple-salesforce"] -sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] -slack = ["slack-sdk"] tsv = ["pandas"] -wikipedia = ["wikipedia"] xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] +[[package]] +name = "unstructured-client" +version = "0.26.1" +description = "Python Client SDK for Unstructured API" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "unstructured_client-0.26.1-py3-none-any.whl", hash = "sha256:b8b839d477122bab3f37242cbe44b39f7eb7b564b07b53500321f953710119b6"}, + {file = "unstructured_client-0.26.1.tar.gz", hash = "sha256:907cceb470529b45b0fddb2d0f1bbf4d6568f347c757ab68639a7bb620ec2484"}, +] + +[package.dependencies] +cryptography = ">=3.1" +eval-type-backport = ">=0.2.0,<0.3.0" +httpx = ">=0.27.0" +jsonpath-python = ">=1.0.6,<2.0.0" +nest-asyncio = ">=1.6.0" +pydantic = ">=2.9.0,<2.10.0" +pypdf = ">=4.0" +python-dateutil = "2.8.2" +requests-toolbelt = ">=1.0.0" +typing-inspect = ">=0.9.0,<0.10.0" + +[[package]] +name = "upstash-vector" +version = "0.6.0" +description = "Serverless Vector SDK from Upstash" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c"}, + {file = "upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b"}, +] + +[package.dependencies] +httpx = ">=0.23.0,<1" + [[package]] name = "uritemplate" version = "4.1.1" @@ -9814,13 +9958,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.31.1" +version = "0.32.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.31.1-py3-none-any.whl", hash = "sha256:adc42d9cac80cf3e51af97c1851648066841e7cfb6993a4ca8de29ac1548ed41"}, - {file = "uvicorn-0.31.1.tar.gz", hash = "sha256:f5167919867b161b7bcaf32646c6a94cdbd4c3aa2eb5c17d36bb9aa5cfd8c493"}, + {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, + {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, ] [package.dependencies] @@ -10430,13 +10574,13 @@ files = [ [[package]] name = "xmltodict" -version = "0.14.1" +version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false python-versions = ">=3.6" files = [ - {file = "xmltodict-0.14.1-py2.py3-none-any.whl", hash = "sha256:3ef4a7b71c08f19047fcbea572e1d7f4207ab269da1565b5d40e9823d3894e63"}, - {file = "xmltodict-0.14.1.tar.gz", hash = "sha256:338c8431e4fc554517651972d62f06958718f6262b04316917008e8fd677a6b0"}, + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, ] [[package]] @@ -10546,13 +10690,13 @@ multidict = ">=4.0" [[package]] name = "yfinance" -version = "0.2.44" +version = "0.2.46" description = "Download market data from Yahoo! Finance API" optional = false python-versions = "*" files = [ - {file = "yfinance-0.2.44-py2.py3-none-any.whl", hash = "sha256:fdc18791662f286539f7a08dccd7e8191b1ca509814f7b0faac264623bebe8a8"}, - {file = "yfinance-0.2.44.tar.gz", hash = "sha256:532ad1644ee9cf4024ec0d9cade0cc073664ec0d140cc6c22a0cce8a9118b523"}, + {file = "yfinance-0.2.46-py2.py3-none-any.whl", hash = "sha256:371860d532cae76605195678a540e29382bfd0607f8aa61695f753e714916ffc"}, + {file = "yfinance-0.2.46.tar.gz", hash = "sha256:a6e2a128915532a54b8f6614cfdb7a8c242d2386e05f95c89b15865b5d9c0352"}, ] [package.dependencies] @@ -10629,48 +10773,48 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "7.1.0" +version = "7.1.1" description = "Interfaces for Python" optional = false python-versions = ">=3.8" files = [ - {file = "zope.interface-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2bd9e9f366a5df08ebbdc159f8224904c1c5ce63893984abb76954e6fbe4381a"}, - {file = "zope.interface-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:661d5df403cd3c5b8699ac480fa7f58047a3253b029db690efa0c3cf209993ef"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91b6c30689cfd87c8f264acb2fc16ad6b3c72caba2aec1bf189314cf1a84ca33"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b6a4924f5bad9fe21d99f66a07da60d75696a136162427951ec3cb223a5570d"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a3c00b35f6170be5454b45abe2719ea65919a2f09e8a6e7b1362312a872cd3"}, - {file = "zope.interface-7.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b936d61dbe29572fd2cfe13e30b925e5383bed1aba867692670f5a2a2eb7b4e9"}, - {file = "zope.interface-7.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ac20581fc6cd7c754f6dff0ae06fedb060fa0e9ea6309d8be8b2701d9ea51c4"}, - {file = "zope.interface-7.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:848b6fa92d7c8143646e64124ed46818a0049a24ecc517958c520081fd147685"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1ef1fdb6f014d5886b97e52b16d0f852364f447d2ab0f0c6027765777b6667"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bcff5c09d0215f42ba64b49205a278e44413d9bf9fa688fd9e42bfe472b5f4f"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07add15de0cc7e69917f7d286b64d54125c950aeb43efed7a5ea7172f000fbc1"}, - {file = "zope.interface-7.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:9940d5bc441f887c5f375ec62bcf7e7e495a2d5b1da97de1184a88fb567f06af"}, - {file = "zope.interface-7.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f245d039f72e6f802902375755846f5de1ee1e14c3e8736c078565599bcab621"}, - {file = "zope.interface-7.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6159e767d224d8f18deff634a1d3722e68d27488c357f62ebeb5f3e2f5288b1f"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e956b1fd7f3448dd5e00f273072e73e50dfafcb35e4227e6d5af208075593c9"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff115ef91c0eeac69cd92daeba36a9d8e14daee445b504eeea2b1c0b55821984"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec001798ab62c3fc5447162bf48496ae9fba02edc295a9e10a0b0c639a6452e"}, - {file = "zope.interface-7.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:124149e2d42067b9c6597f4dafdc7a0983d0163868f897b7bb5dc850b14f9a87"}, - {file = "zope.interface-7.1.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:9733a9a0f94ef53d7aa64661811b20875b5bc6039034c6e42fb9732170130573"}, - {file = "zope.interface-7.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5fcf379b875c610b5a41bc8a891841533f98de0520287d7f85e25386cd10d3e9"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0a45b5af9f72c805ee668d1479480ca85169312211bed6ed18c343e39307d5f"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af4a12b459a273b0b34679a5c3dc5e34c1847c3dd14a628aa0668e19e638ea2"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a735f82d2e3ed47ca01a20dfc4c779b966b16352650a8036ab3955aad151ed8a"}, - {file = "zope.interface-7.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:5501e772aff595e3c54266bc1bfc5858e8f38974ce413a8f1044aae0f32a83a3"}, - {file = "zope.interface-7.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec59fe53db7d32abb96c6d4efeed84aab4a7c38c62d7a901a9b20c09dd936e7a"}, - {file = "zope.interface-7.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e53c291debef523b09e1fe3dffe5f35dde164f1c603d77f770b88a1da34b7ed6"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:711eebc77f2092c6a8b304bad0b81a6ce3cf5490b25574e7309fbc07d881e3af"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a00ead2e24c76436e1b457a5132d87f83858330f6c923640b7ef82d668525d1"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e28ea0bc4b084fc93a483877653a033062435317082cdc6388dec3438309faf"}, - {file = "zope.interface-7.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:27cfb5205d68b12682b6e55ab8424662d96e8ead19550aad0796b08dd2c9a45e"}, - {file = "zope.interface-7.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e3e48f3dea21c147e1b10c132016cb79af1159facca9736d231694ef5a740a8"}, - {file = "zope.interface-7.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a99240b1d02dc469f6afbe7da1bf617645e60290c272968f4e53feec18d7dce8"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8a318162123eddbdf22fcc7b751288ce52e4ad096d3766ff1799244352449d"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7b25db127db3e6b597c5f74af60309c4ad65acd826f89609662f0dc33a54728"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a29ac607e970b5576547f0e3589ec156e04de17af42839eedcf478450687317"}, - {file = "zope.interface-7.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a14c9decf0eb61e0892631271d500c1e306c7b6901c998c7035e194d9150fdd1"}, - {file = "zope_interface-7.1.0.tar.gz", hash = "sha256:3f005869a1a05e368965adb2075f97f8ee9a26c61898a9e52a9764d93774f237"}, + {file = "zope.interface-7.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6650bd56ef350d37c8baccfd3ee8a0483ed6f8666e641e4b9ae1a1827b79f9e5"}, + {file = "zope.interface-7.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84e87eba6b77a3af187bae82d8de1a7c208c2a04ec9f6bd444fd091b811ad92e"}, + {file = "zope.interface-7.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c4e1b4c06d9abd1037c088dae1566c85f344a3e6ae4350744c3f7f7259d9c67"}, + {file = "zope.interface-7.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cd5e3d910ac87652a09f6e5db8e41bc3b49cf08ddd2d73d30afc644801492cd"}, + {file = "zope.interface-7.1.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca95594d936ee349620900be5b46c0122a1ff6ce42d7d5cb2cf09dc84071ef16"}, + {file = "zope.interface-7.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:ad339509dcfbbc99bf8e147db6686249c4032f26586699ec4c82f6e5909c9fe2"}, + {file = "zope.interface-7.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e59f175e868f856a77c0a77ba001385c377df2104fdbda6b9f99456a01e102a"}, + {file = "zope.interface-7.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0de23bcb93401994ea00bc5c677ef06d420340ac0a4e9c10d80e047b9ce5af3f"}, + {file = "zope.interface-7.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdb7e7e5524b76d3ec037c1d81a9e2c7457b240fd4cb0a2476b65c3a5a6c81f"}, + {file = "zope.interface-7.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3603ef82a9920bd0bfb505423cb7e937498ad971ad5a6141841e8f76d2fd5446"}, + {file = "zope.interface-7.1.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d52d052355e0c5c89e0630dd2ff7c0b823fd5f56286a663e92444761b35e25"}, + {file = "zope.interface-7.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:179ad46ece518c9084cb272e4a69d266b659f7f8f48e51706746c2d8a426433e"}, + {file = "zope.interface-7.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e6503534b52bb1720ace9366ee30838a58a3413d3e197512f3338c8f34b5d89d"}, + {file = "zope.interface-7.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f85b290e5b8b11814efb0d004d8ce6c9a483c35c462e8d9bf84abb93e79fa770"}, + {file = "zope.interface-7.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d029fac6a80edae80f79c37e5e3abfa92968fe921886139b3ee470a1b177321a"}, + {file = "zope.interface-7.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5836b8fb044c6e75ba34dfaabc602493019eadfa0faf6ff25f4c4c356a71a853"}, + {file = "zope.interface-7.1.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7395f13533318f150ee72adb55b29284b16e73b6d5f02ab21f173b3e83f242b8"}, + {file = "zope.interface-7.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:1d0e23c6b746eb8ce04573cc47bcac60961ac138885d207bd6f57e27a1431ae8"}, + {file = "zope.interface-7.1.1-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:9fad9bd5502221ab179f13ea251cb30eef7cf65023156967f86673aff54b53a0"}, + {file = "zope.interface-7.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:55c373becbd36a44d0c9be1d5271422fdaa8562d158fb44b4192297b3c67096c"}, + {file = "zope.interface-7.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed1df8cc01dd1e3970666a7370b8bfc7457371c58ba88c57bd5bca17ab198053"}, + {file = "zope.interface-7.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99c14f0727c978639139e6cad7a60e82b7720922678d75aacb90cf4ef74a068c"}, + {file = "zope.interface-7.1.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b1eed7670d564f1025d7cda89f99f216c30210e42e95de466135be0b4a499d9"}, + {file = "zope.interface-7.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:3defc925c4b22ac1272d544a49c6ba04c3eefcce3200319ee1be03d9270306dd"}, + {file = "zope.interface-7.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8d0fe45be57b5219aa4b96e846631c04615d5ef068146de5a02ccd15c185321f"}, + {file = "zope.interface-7.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bcbeb44fc16e0078b3b68a95e43f821ae34dcbf976dde6985141838a5f23dd3d"}, + {file = "zope.interface-7.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8e7b05dc6315a193cceaec071cc3cf1c180cea28808ccded0b1283f1c38ba73"}, + {file = "zope.interface-7.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d553e02b68c0ea5a226855f02edbc9eefd99f6a8886fa9f9bdf999d77f46585"}, + {file = "zope.interface-7.1.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81744a7e61b598ebcf4722ac56a7a4f50502432b5b4dc7eb29075a89cf82d029"}, + {file = "zope.interface-7.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7720322763aceb5e0a7cadcc38c67b839efe599f0887cbf6c003c55b1458c501"}, + {file = "zope.interface-7.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ed0852c25950cf430067f058f8d98df6288502ac313861d9803fe7691a9b3"}, + {file = "zope.interface-7.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9595e478047ce752b35cfa221d7601a5283ccdaab40422e0dc1d4a334c70f580"}, + {file = "zope.interface-7.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2317e1d4dba68203a5227ea3057f9078ec9376275f9700086b8f0ffc0b358e1b"}, + {file = "zope.interface-7.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6821ef9870f32154da873fcde439274f99814ea452dd16b99fa0b66345c4b6b"}, + {file = "zope.interface-7.1.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:190eeec67e023d5aac54d183fa145db0b898664234234ac54643a441da434616"}, + {file = "zope.interface-7.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:d17e7fc814eaab93409b80819fd6d30342844345c27f3bc3c4b43c2425a8d267"}, + {file = "zope.interface-7.1.1.tar.gz", hash = "sha256:4284d664ef0ff7b709836d4de7b13d80873dc5faeffc073abdb280058bfac5e3"}, ] [package.dependencies] @@ -10796,4 +10940,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "5b102e3bc077ed730e9fb7be9015541111ffe7787888372d50a757aecb1d9eff" +content-hash = "ef927b98c33d704d680e08db0e5c7d9a4e05454c66fcd6a5f656a65eb08e886b" diff --git a/api/pyproject.toml b/api/pyproject.toml index 6b41f79a27..ef3dc14131 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -172,11 +172,12 @@ sagemaker = "2.231.0" scikit-learn = "~1.5.1" sentry-sdk = { version = "~1.44.1", extras = ["flask"] } sqlalchemy = "~2.0.29" +starlette = "0.41.0" tencentcloud-sdk-python-hunyuan = "~3.0.1158" tiktoken = "~0.8.0" tokenizers = "~0.15.0" transformers = "~4.35.0" -unstructured = { version = "~0.10.27", extras = ["docx", "epub", "md", "msg", "ppt", "pptx"] } +unstructured = { version = "~0.16.1", extras = ["docx", "epub", "md", "msg", "ppt", "pptx"] } validators = "0.21.0" volcengine-python-sdk = {extras = ["ark"], version = "~1.0.98"} websocket-client = "~1.7.0" @@ -206,7 +207,7 @@ duckduckgo-search = "~6.3.0" jsonpath-ng = "1.6.1" matplotlib = "~3.8.2" newspaper3k = "0.2.8" -nltk = "3.8.1" +nltk = "3.9.1" numexpr = "~2.9.0" pydub = "~0.25.1" qrcode = "~7.4.2" @@ -238,6 +239,7 @@ alibabacloud_gpdb20160503 = "~3.8.0" alibabacloud_tea_openapi = "~0.3.9" chromadb = "0.5.1" clickhouse-connect = "~0.7.16" +couchbase = "~4.3.0" elasticsearch = "8.14.0" opensearch-py = "2.4.0" oracledb = "~2.2.1" @@ -245,9 +247,11 @@ pgvecto-rs = { version = "~0.2.1", extras = ['sqlalchemy'] } pgvector = "0.2.5" pymilvus = "~2.4.4" pymochow = "1.3.1" +pyobvector = "~0.1.6" qdrant-client = "1.7.3" tcvectordb = "1.3.2" tidb-vector = "0.0.9" +upstash-vector = "0.6.0" volcengine-compat = "~1.0.156" weaviate-client = "~3.21.0" diff --git a/api/pytest.ini b/api/pytest.ini index dcca08e2e5..a23a4b3f3d 100644 --- a/api/pytest.ini +++ b/api/pytest.ini @@ -27,3 +27,4 @@ env = XINFERENCE_GENERATION_MODEL_UID = generate XINFERENCE_RERANK_MODEL_UID = rerank XINFERENCE_SERVER_URL = http://a.abc.com:11451 + GITEE_AI_API_KEY = aaaaaaaaaaaaaaaaaaaa diff --git a/api/schedule/create_tidb_serverless_task.py b/api/schedule/create_tidb_serverless_task.py new file mode 100644 index 0000000000..42d6c04beb --- /dev/null +++ b/api/schedule/create_tidb_serverless_task.py @@ -0,0 +1,56 @@ +import time + +import click + +import app +from configs import dify_config +from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService +from extensions.ext_database import db +from models.dataset import TidbAuthBinding + + +@app.celery.task(queue="dataset") +def create_tidb_serverless_task(): + click.echo(click.style("Start create tidb serverless task.", fg="green")) + tidb_serverless_number = dify_config.TIDB_SERVERLESS_NUMBER + start_at = time.perf_counter() + while True: + try: + # check the number of idle tidb serverless + idle_tidb_serverless_number = TidbAuthBinding.query.filter(TidbAuthBinding.active == False).count() + if idle_tidb_serverless_number >= tidb_serverless_number: + break + # create tidb serverless + iterations_per_thread = 20 + create_clusters(iterations_per_thread) + + except Exception as e: + click.echo(click.style(f"Error: {e}", fg="red")) + break + + end_at = time.perf_counter() + click.echo(click.style("Create tidb serverless task success latency: {}".format(end_at - start_at), fg="green")) + + +def create_clusters(batch_size): + try: + new_clusters = TidbService.batch_create_tidb_serverless_cluster( + batch_size, + dify_config.TIDB_PROJECT_ID, + dify_config.TIDB_API_URL, + dify_config.TIDB_IAM_API_URL, + dify_config.TIDB_PUBLIC_KEY, + dify_config.TIDB_PRIVATE_KEY, + dify_config.TIDB_REGION, + ) + for new_cluster in new_clusters: + tidb_auth_binding = TidbAuthBinding( + cluster_id=new_cluster["cluster_id"], + cluster_name=new_cluster["cluster_name"], + account=new_cluster["account"], + password=new_cluster["password"], + ) + db.session.add(tidb_auth_binding) + db.session.commit() + except Exception as e: + click.echo(click.style(f"Error: {e}", fg="red")) diff --git a/api/schedule/update_tidb_serverless_status_task.py b/api/schedule/update_tidb_serverless_status_task.py new file mode 100644 index 0000000000..07eca3173b --- /dev/null +++ b/api/schedule/update_tidb_serverless_status_task.py @@ -0,0 +1,51 @@ +import time + +import click + +import app +from configs import dify_config +from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService +from models.dataset import TidbAuthBinding + + +@app.celery.task(queue="dataset") +def update_tidb_serverless_status_task(): + click.echo(click.style("Update tidb serverless status task.", fg="green")) + start_at = time.perf_counter() + while True: + try: + # check the number of idle tidb serverless + tidb_serverless_list = TidbAuthBinding.query.filter( + TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING" + ).all() + if len(tidb_serverless_list) == 0: + break + # update tidb serverless status + iterations_per_thread = 20 + update_clusters(tidb_serverless_list) + + except Exception as e: + click.echo(click.style(f"Error: {e}", fg="red")) + break + + end_at = time.perf_counter() + click.echo( + click.style("Update tidb serverless status task success latency: {}".format(end_at - start_at), fg="green") + ) + + +def update_clusters(tidb_serverless_list: list[TidbAuthBinding]): + try: + # batch 20 + for i in range(0, len(tidb_serverless_list), 20): + items = tidb_serverless_list[i : i + 20] + TidbService.batch_update_tidb_serverless_cluster_status( + items, + dify_config.TIDB_PROJECT_ID, + dify_config.TIDB_API_URL, + dify_config.TIDB_IAM_API_URL, + dify_config.TIDB_PUBLIC_KEY, + dify_config.TIDB_PRIVATE_KEY, + ) + except Exception as e: + click.echo(click.style(f"Error: {e}", fg="red")) diff --git a/api/services/account_service.py b/api/services/account_service.py index fed9ae5a26..24472c349a 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -98,8 +98,8 @@ class AccountService: if not account: return None - if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: - raise Unauthorized("Account is banned or closed.") + if account.status == AccountStatus.BANNED.value: + raise Unauthorized("Account is banned.") current_tenant = TenantAccountJoin.query.filter_by(account_id=account.id, current=True).first() if current_tenant: @@ -143,8 +143,8 @@ class AccountService: if not account: raise AccountNotFoundError() - if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: - raise AccountLoginError("Account is banned or closed.") + if account.status == AccountStatus.BANNED.value: + raise AccountLoginError("Account is banned.") if password and invite_token and account.password is None: # if invite_token is valid, set password and password_salt @@ -408,8 +408,8 @@ class AccountService: if not account: return None - if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: - raise Unauthorized("Account is banned or closed.") + if account.status == AccountStatus.BANNED.value: + raise Unauthorized("Account is banned.") return account @@ -486,9 +486,13 @@ def _get_login_cache_key(*, account_id: str, token: str): class TenantService: @staticmethod - def create_tenant(name: str, is_setup: Optional[bool] = False) -> Tenant: + def create_tenant(name: str, is_setup: Optional[bool] = False, is_from_dashboard: Optional[bool] = False) -> Tenant: """Create tenant""" - if not FeatureService.get_system_features().is_allow_create_workspace and not is_setup: + if ( + not FeatureService.get_system_features().is_allow_create_workspace + and not is_setup + and not is_from_dashboard + ): from controllers.console.error import NotAllowedCreateWorkspace raise NotAllowedCreateWorkspace() @@ -505,9 +509,7 @@ class TenantService: def create_owner_tenant_if_not_exist( account: Account, name: Optional[str] = None, is_setup: Optional[bool] = False ): - """Create owner tenant if not exist""" - if not FeatureService.get_system_features().is_allow_create_workspace and not is_setup: - raise WorkSpaceNotAllowedCreateError() + """Check if user have a workspace or not""" available_ta = ( TenantAccountJoin.query.filter_by(account_id=account.id).order_by(TenantAccountJoin.id.asc()).first() ) @@ -515,6 +517,10 @@ class TenantService: if available_ta: return + """Create owner tenant if not exist""" + if not FeatureService.get_system_features().is_allow_create_workspace and not is_setup: + raise WorkSpaceNotAllowedCreateError() + if name: tenant = TenantService.create_tenant(name=name, is_setup=is_setup) else: diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index 3cc6c51c2d..915d37ec03 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -132,14 +132,14 @@ class AppAnnotationService: MessageAnnotation.content.ilike("%{}%".format(keyword)), ) ) - .order_by(MessageAnnotation.created_at.desc()) + .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) .paginate(page=page, per_page=limit, max_per_page=100, error_out=False) ) else: annotations = ( db.session.query(MessageAnnotation) .filter(MessageAnnotation.app_id == app_id) - .order_by(MessageAnnotation.created_at.desc()) + .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) .paginate(page=page, per_page=limit, max_per_page=100, error_out=False) ) return annotations.items, annotations.total diff --git a/api/services/app_dsl_service/__init__.py b/api/services/app_dsl_service/__init__.py new file mode 100644 index 0000000000..9fc988ffb3 --- /dev/null +++ b/api/services/app_dsl_service/__init__.py @@ -0,0 +1,3 @@ +from .service import AppDslService + +__all__ = ["AppDslService"] diff --git a/api/services/app_dsl_service/exc.py b/api/services/app_dsl_service/exc.py new file mode 100644 index 0000000000..6da4b1938f --- /dev/null +++ b/api/services/app_dsl_service/exc.py @@ -0,0 +1,34 @@ +class DSLVersionNotSupportedError(ValueError): + """Raised when the imported DSL version is not supported by the current Dify version.""" + + +class InvalidYAMLFormatError(ValueError): + """Raised when the provided YAML format is invalid.""" + + +class MissingAppDataError(ValueError): + """Raised when the app data is missing in the provided DSL.""" + + +class InvalidAppModeError(ValueError): + """Raised when the app mode is invalid.""" + + +class MissingWorkflowDataError(ValueError): + """Raised when the workflow data is missing in the provided DSL.""" + + +class MissingModelConfigError(ValueError): + """Raised when the model config data is missing in the provided DSL.""" + + +class FileSizeLimitExceededError(ValueError): + """Raised when the file size exceeds the allowed limit.""" + + +class EmptyContentError(ValueError): + """Raised when the content fetched from the URL is empty.""" + + +class ContentDecodingError(ValueError): + """Raised when there is an error decoding the content.""" diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service/service.py similarity index 75% rename from api/services/app_dsl_service.py rename to api/services/app_dsl_service/service.py index 750d0a8cd2..32b95ae3aa 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service/service.py @@ -1,8 +1,11 @@ import logging +from collections.abc import Mapping +from typing import Any -import httpx -import yaml # type: ignore +import yaml +from packaging import version +from core.helper import ssrf_proxy from events.app_event import app_model_config_was_updated, app_was_created from extensions.ext_database import db from factories import variable_factory @@ -11,6 +14,17 @@ from models.model import App, AppMode, AppModelConfig from models.workflow import Workflow from services.workflow_service import WorkflowService +from .exc import ( + ContentDecodingError, + EmptyContentError, + FileSizeLimitExceededError, + InvalidAppModeError, + InvalidYAMLFormatError, + MissingAppDataError, + MissingModelConfigError, + MissingWorkflowDataError, +) + logger = logging.getLogger(__name__) current_dsl_version = "0.1.2" @@ -30,32 +44,21 @@ class AppDslService: :param args: request args :param account: Account instance """ - try: - max_size = 10 * 1024 * 1024 # 10MB - timeout = httpx.Timeout(10.0) - with httpx.stream("GET", url.strip(), follow_redirects=True, timeout=timeout) as response: - response.raise_for_status() - total_size = 0 - content = b"" - for chunk in response.iter_bytes(): - total_size += len(chunk) - if total_size > max_size: - raise ValueError("File size exceeds the limit of 10MB") - content += chunk - except httpx.HTTPStatusError as http_err: - raise ValueError(f"HTTP error occurred: {http_err}") - except httpx.RequestError as req_err: - raise ValueError(f"Request error occurred: {req_err}") - except Exception as e: - raise ValueError(f"Failed to fetch DSL from URL: {e}") + max_size = 10 * 1024 * 1024 # 10MB + response = ssrf_proxy.get(url.strip(), follow_redirects=True, timeout=(10, 10)) + response.raise_for_status() + content = response.content + + if len(content) > max_size: + raise FileSizeLimitExceededError("File size exceeds the limit of 10MB") if not content: - raise ValueError("Empty content from url") + raise EmptyContentError("Empty content from url") try: data = content.decode("utf-8") except UnicodeDecodeError as e: - raise ValueError(f"Error decoding content: {e}") + raise ContentDecodingError(f"Error decoding content: {e}") return cls.import_and_create_new_app(tenant_id, data, args, account) @@ -71,14 +74,14 @@ class AppDslService: try: import_data = yaml.safe_load(data) except yaml.YAMLError: - raise ValueError("Invalid YAML format in data argument.") + raise InvalidYAMLFormatError("Invalid YAML format in data argument.") # check or repair dsl version - import_data = cls._check_or_fix_dsl(import_data) + import_data = _check_or_fix_dsl(import_data) app_data = import_data.get("app") if not app_data: - raise ValueError("Missing app in data argument") + raise MissingAppDataError("Missing app in data argument") # get app basic info name = args.get("name") or app_data.get("name") @@ -90,11 +93,18 @@ class AppDslService: # import dsl and create app app_mode = AppMode.value_of(app_data.get("mode")) + if app_mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: + workflow_data = import_data.get("workflow") + if not workflow_data or not isinstance(workflow_data, dict): + raise MissingWorkflowDataError( + "Missing workflow in data argument when app mode is advanced-chat or workflow" + ) + app = cls._import_and_create_new_workflow_based_app( tenant_id=tenant_id, app_mode=app_mode, - workflow_data=import_data.get("workflow"), + workflow_data=workflow_data, account=account, name=name, description=description, @@ -104,10 +114,16 @@ class AppDslService: use_icon_as_answer_icon=use_icon_as_answer_icon, ) elif app_mode in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.COMPLETION}: + model_config = import_data.get("model_config") + if not model_config or not isinstance(model_config, dict): + raise MissingModelConfigError( + "Missing model_config in data argument when app mode is chat, agent-chat or completion" + ) + app = cls._import_and_create_new_model_config_based_app( tenant_id=tenant_id, app_mode=app_mode, - model_config_data=import_data.get("model_config"), + model_config_data=model_config, account=account, name=name, description=description, @@ -117,7 +133,7 @@ class AppDslService: use_icon_as_answer_icon=use_icon_as_answer_icon, ) else: - raise ValueError("Invalid app mode") + raise InvalidAppModeError("Invalid app mode") return app @@ -132,26 +148,32 @@ class AppDslService: try: import_data = yaml.safe_load(data) except yaml.YAMLError: - raise ValueError("Invalid YAML format in data argument.") + raise InvalidYAMLFormatError("Invalid YAML format in data argument.") # check or repair dsl version - import_data = cls._check_or_fix_dsl(import_data) + import_data = _check_or_fix_dsl(import_data) app_data = import_data.get("app") if not app_data: - raise ValueError("Missing app in data argument") + raise MissingAppDataError("Missing app in data argument") # import dsl and overwrite app app_mode = AppMode.value_of(app_data.get("mode")) if app_mode not in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: - raise ValueError("Only support import workflow in advanced-chat or workflow app.") + raise InvalidAppModeError("Only support import workflow in advanced-chat or workflow app.") if app_data.get("mode") != app_model.mode: raise ValueError(f"App mode {app_data.get('mode')} is not matched with current app mode {app_mode.value}") + workflow_data = import_data.get("workflow") + if not workflow_data or not isinstance(workflow_data, dict): + raise MissingWorkflowDataError( + "Missing workflow in data argument when app mode is advanced-chat or workflow" + ) + return cls._import_and_overwrite_workflow_based_app( app_model=app_model, - workflow_data=import_data.get("workflow"), + workflow_data=workflow_data, account=account, ) @@ -186,35 +208,12 @@ class AppDslService: return yaml.dump(export_data, allow_unicode=True) - @classmethod - def _check_or_fix_dsl(cls, import_data: dict) -> dict: - """ - Check or fix dsl - - :param import_data: import data - """ - if not import_data.get("version"): - import_data["version"] = "0.1.0" - - if not import_data.get("kind") or import_data.get("kind") != "app": - import_data["kind"] = "app" - - if import_data.get("version") != current_dsl_version: - # Currently only one DSL version, so no difference checks or compatibility fixes will be performed. - logger.warning( - f"DSL version {import_data.get('version')} is not compatible " - f"with current version {current_dsl_version}, related to " - f"Dify version {dsl_to_dify_version_mapping.get(current_dsl_version)}." - ) - - return import_data - @classmethod def _import_and_create_new_workflow_based_app( cls, tenant_id: str, app_mode: AppMode, - workflow_data: dict, + workflow_data: Mapping[str, Any], account: Account, name: str, description: str, @@ -238,7 +237,9 @@ class AppDslService: :param use_icon_as_answer_icon: use app icon as answer icon """ if not workflow_data: - raise ValueError("Missing workflow in data argument when app mode is advanced-chat or workflow") + raise MissingWorkflowDataError( + "Missing workflow in data argument when app mode is advanced-chat or workflow" + ) app = cls._create_app( tenant_id=tenant_id, @@ -277,7 +278,7 @@ class AppDslService: @classmethod def _import_and_overwrite_workflow_based_app( - cls, app_model: App, workflow_data: dict, account: Account + cls, app_model: App, workflow_data: Mapping[str, Any], account: Account ) -> Workflow: """ Import app dsl and overwrite workflow based app @@ -287,7 +288,9 @@ class AppDslService: :param account: Account instance """ if not workflow_data: - raise ValueError("Missing workflow in data argument when app mode is advanced-chat or workflow") + raise MissingWorkflowDataError( + "Missing workflow in data argument when app mode is advanced-chat or workflow" + ) # fetch draft workflow by app_model workflow_service = WorkflowService() @@ -323,7 +326,7 @@ class AppDslService: cls, tenant_id: str, app_mode: AppMode, - model_config_data: dict, + model_config_data: Mapping[str, Any], account: Account, name: str, description: str, @@ -345,7 +348,9 @@ class AppDslService: :param icon_background: app icon background """ if not model_config_data: - raise ValueError("Missing model_config in data argument when app mode is chat, agent-chat or completion") + raise MissingModelConfigError( + "Missing model_config in data argument when app mode is chat, agent-chat or completion" + ) app = cls._create_app( tenant_id=tenant_id, @@ -448,3 +453,36 @@ class AppDslService: raise ValueError("Missing app configuration, please check.") export_data["model_config"] = app_model_config.to_dict() + + +def _check_or_fix_dsl(import_data: dict[str, Any]) -> Mapping[str, Any]: + """ + Check or fix dsl + + :param import_data: import data + :raises DSLVersionNotSupportedError: if the imported DSL version is newer than the current version + """ + if not import_data.get("version"): + import_data["version"] = "0.1.0" + + if not import_data.get("kind") or import_data.get("kind") != "app": + import_data["kind"] = "app" + + imported_version = import_data.get("version") + if imported_version != current_dsl_version: + if imported_version and version.parse(imported_version) > version.parse(current_dsl_version): + errmsg = ( + f"The imported DSL version {imported_version} is newer than " + f"the current supported version {current_dsl_version}. " + f"Please upgrade your Dify instance to import this configuration." + ) + logger.warning(errmsg) + # raise DSLVersionNotSupportedError(errmsg) + else: + logger.warning( + f"DSL version {imported_version} is older than " + f"the current version {current_dsl_version}. " + f"This may cause compatibility issues." + ) + + return import_data diff --git a/api/services/auth/jina.py b/api/services/auth/jina.py new file mode 100644 index 0000000000..de898a1f94 --- /dev/null +++ b/api/services/auth/jina.py @@ -0,0 +1,44 @@ +import json + +import requests + +from services.auth.api_key_auth_base import ApiKeyAuthBase + + +class JinaAuth(ApiKeyAuthBase): + def __init__(self, credentials: dict): + super().__init__(credentials) + auth_type = credentials.get("auth_type") + if auth_type != "bearer": + raise ValueError("Invalid auth type, Jina Reader auth type must be Bearer") + self.api_key = credentials.get("config").get("api_key", None) + + if not self.api_key: + raise ValueError("No API key provided") + + def validate_credentials(self): + headers = self._prepare_headers() + options = { + "url": "https://example.com", + } + response = self._post_request("https://r.jina.ai", options, headers) + if response.status_code == 200: + return True + else: + self._handle_error(response) + + def _prepare_headers(self): + return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} + + def _post_request(self, url, data, headers): + return requests.post(url, headers=headers, json=data) + + def _handle_error(self, response): + if response.status_code in {402, 409, 500}: + error_message = response.json().get("error", "Unknown error occurred") + raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") + else: + if response.text: + error_message = json.loads(response.text).get("error", "Unknown error occurred") + raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") + raise Exception(f"Unexpected error occurred while trying to authorize. Status code: {response.status_code}") diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index ede8764086..ac05cbc4f5 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -4,7 +4,7 @@ import logging import random import time import uuid -from typing import Optional +from typing import Any, Optional from flask_login import current_user from sqlalchemy import func @@ -140,6 +140,7 @@ class DatasetService: def create_empty_dataset( tenant_id: str, name: str, + description: Optional[str], indexing_technique: Optional[str], account: Account, permission: Optional[str] = None, @@ -158,6 +159,7 @@ class DatasetService: ) dataset = Dataset(name=name, indexing_technique=indexing_technique) # dataset = Dataset(name=name, provider=provider, config=config) + dataset.description = description dataset.created_by = account.id dataset.updated_by = account.id dataset.tenant_id = tenant_id @@ -673,7 +675,7 @@ class DocumentService: def save_document_with_dataset_id( dataset: Dataset, document_data: dict, - account: Account, + account: Account | Any, dataset_process_rule: Optional[DatasetProcessRule] = None, created_from: str = "web", ): @@ -734,11 +736,12 @@ class DocumentService: dataset.retrieval_model = document_data.get("retrieval_model") or default_retrieval_model documents = [] - batch = time.strftime("%Y%m%d%H%M%S") + str(random.randint(100000, 999999)) if document_data.get("original_document_id"): document = DocumentService.update_document_with_dataset_id(dataset, document_data, account) documents.append(document) + batch = document.batch else: + batch = time.strftime("%Y%m%d%H%M%S") + str(random.randint(100000, 999999)) # save process rule if not dataset_process_rule: process_rule = document_data["process_rule"] @@ -758,164 +761,166 @@ class DocumentService: ) db.session.add(dataset_process_rule) db.session.commit() - position = DocumentService.get_documents_position(dataset.id) - document_ids = [] - duplicate_document_ids = [] - if document_data["data_source"]["type"] == "upload_file": - upload_file_list = document_data["data_source"]["info_list"]["file_info_list"]["file_ids"] - for file_id in upload_file_list: - file = ( - db.session.query(UploadFile) - .filter(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id) - .first() - ) - - # raise error if file not found - if not file: - raise FileNotExistsError() - - file_name = file.name - data_source_info = { - "upload_file_id": file_id, - } - # check duplicate - if document_data.get("duplicate", False): - document = Document.query.filter_by( - dataset_id=dataset.id, - tenant_id=current_user.current_tenant_id, - data_source_type="upload_file", - enabled=True, - name=file_name, - ).first() - if document: - document.dataset_process_rule_id = dataset_process_rule.id - document.updated_at = datetime.datetime.utcnow() - document.created_from = created_from - document.doc_form = document_data["doc_form"] - document.doc_language = document_data["doc_language"] - document.data_source_info = json.dumps(data_source_info) - document.batch = batch - document.indexing_status = "waiting" - db.session.add(document) - documents.append(document) - duplicate_document_ids.append(document.id) - continue - document = DocumentService.build_document( - dataset, - dataset_process_rule.id, - document_data["data_source"]["type"], - document_data["doc_form"], - document_data["doc_language"], - data_source_info, - created_from, - position, - account, - file_name, - batch, - ) - db.session.add(document) - db.session.flush() - document_ids.append(document.id) - documents.append(document) - position += 1 - elif document_data["data_source"]["type"] == "notion_import": - notion_info_list = document_data["data_source"]["info_list"]["notion_info_list"] - exist_page_ids = [] - exist_document = {} - documents = Document.query.filter_by( - dataset_id=dataset.id, - tenant_id=current_user.current_tenant_id, - data_source_type="notion_import", - enabled=True, - ).all() - if documents: - for document in documents: - data_source_info = json.loads(document.data_source_info) - exist_page_ids.append(data_source_info["notion_page_id"]) - exist_document[data_source_info["notion_page_id"]] = document.id - for notion_info in notion_info_list: - workspace_id = notion_info["workspace_id"] - data_source_binding = DataSourceOauthBinding.query.filter( - db.and_( - DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, - DataSourceOauthBinding.provider == "notion", - DataSourceOauthBinding.disabled == False, - DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"', + lock_name = "add_document_lock_dataset_id_{}".format(dataset.id) + with redis_client.lock(lock_name, timeout=600): + position = DocumentService.get_documents_position(dataset.id) + document_ids = [] + duplicate_document_ids = [] + if document_data["data_source"]["type"] == "upload_file": + upload_file_list = document_data["data_source"]["info_list"]["file_info_list"]["file_ids"] + for file_id in upload_file_list: + file = ( + db.session.query(UploadFile) + .filter(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id) + .first() ) - ).first() - if not data_source_binding: - raise ValueError("Data source binding not found.") - for page in notion_info["pages"]: - if page["page_id"] not in exist_page_ids: - data_source_info = { - "notion_workspace_id": workspace_id, - "notion_page_id": page["page_id"], - "notion_page_icon": page["page_icon"], - "type": page["type"], - } - document = DocumentService.build_document( - dataset, - dataset_process_rule.id, - document_data["data_source"]["type"], - document_data["doc_form"], - document_data["doc_language"], - data_source_info, - created_from, - position, - account, - page["page_name"], - batch, - ) - db.session.add(document) - db.session.flush() - document_ids.append(document.id) - documents.append(document) - position += 1 - else: - exist_document.pop(page["page_id"]) - # delete not selected documents - if len(exist_document) > 0: - clean_notion_document_task.delay(list(exist_document.values()), dataset.id) - elif document_data["data_source"]["type"] == "website_crawl": - website_info = document_data["data_source"]["info_list"]["website_info_list"] - urls = website_info["urls"] - for url in urls: - data_source_info = { - "url": url, - "provider": website_info["provider"], - "job_id": website_info["job_id"], - "only_main_content": website_info.get("only_main_content", False), - "mode": "crawl", - } - if len(url) > 255: - document_name = url[:200] + "..." - else: - document_name = url - document = DocumentService.build_document( - dataset, - dataset_process_rule.id, - document_data["data_source"]["type"], - document_data["doc_form"], - document_data["doc_language"], - data_source_info, - created_from, - position, - account, - document_name, - batch, - ) - db.session.add(document) - db.session.flush() - document_ids.append(document.id) - documents.append(document) - position += 1 - db.session.commit() - # trigger async task - if document_ids: - document_indexing_task.delay(dataset.id, document_ids) - if duplicate_document_ids: - duplicate_document_indexing_task.delay(dataset.id, duplicate_document_ids) + # raise error if file not found + if not file: + raise FileNotExistsError() + + file_name = file.name + data_source_info = { + "upload_file_id": file_id, + } + # check duplicate + if document_data.get("duplicate", False): + document = Document.query.filter_by( + dataset_id=dataset.id, + tenant_id=current_user.current_tenant_id, + data_source_type="upload_file", + enabled=True, + name=file_name, + ).first() + if document: + document.dataset_process_rule_id = dataset_process_rule.id + document.updated_at = datetime.datetime.utcnow() + document.created_from = created_from + document.doc_form = document_data["doc_form"] + document.doc_language = document_data["doc_language"] + document.data_source_info = json.dumps(data_source_info) + document.batch = batch + document.indexing_status = "waiting" + db.session.add(document) + documents.append(document) + duplicate_document_ids.append(document.id) + continue + document = DocumentService.build_document( + dataset, + dataset_process_rule.id, + document_data["data_source"]["type"], + document_data["doc_form"], + document_data["doc_language"], + data_source_info, + created_from, + position, + account, + file_name, + batch, + ) + db.session.add(document) + db.session.flush() + document_ids.append(document.id) + documents.append(document) + position += 1 + elif document_data["data_source"]["type"] == "notion_import": + notion_info_list = document_data["data_source"]["info_list"]["notion_info_list"] + exist_page_ids = [] + exist_document = {} + documents = Document.query.filter_by( + dataset_id=dataset.id, + tenant_id=current_user.current_tenant_id, + data_source_type="notion_import", + enabled=True, + ).all() + if documents: + for document in documents: + data_source_info = json.loads(document.data_source_info) + exist_page_ids.append(data_source_info["notion_page_id"]) + exist_document[data_source_info["notion_page_id"]] = document.id + for notion_info in notion_info_list: + workspace_id = notion_info["workspace_id"] + data_source_binding = DataSourceOauthBinding.query.filter( + db.and_( + DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, + DataSourceOauthBinding.provider == "notion", + DataSourceOauthBinding.disabled == False, + DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"', + ) + ).first() + if not data_source_binding: + raise ValueError("Data source binding not found.") + for page in notion_info["pages"]: + if page["page_id"] not in exist_page_ids: + data_source_info = { + "notion_workspace_id": workspace_id, + "notion_page_id": page["page_id"], + "notion_page_icon": page["page_icon"], + "type": page["type"], + } + document = DocumentService.build_document( + dataset, + dataset_process_rule.id, + document_data["data_source"]["type"], + document_data["doc_form"], + document_data["doc_language"], + data_source_info, + created_from, + position, + account, + page["page_name"], + batch, + ) + db.session.add(document) + db.session.flush() + document_ids.append(document.id) + documents.append(document) + position += 1 + else: + exist_document.pop(page["page_id"]) + # delete not selected documents + if len(exist_document) > 0: + clean_notion_document_task.delay(list(exist_document.values()), dataset.id) + elif document_data["data_source"]["type"] == "website_crawl": + website_info = document_data["data_source"]["info_list"]["website_info_list"] + urls = website_info["urls"] + for url in urls: + data_source_info = { + "url": url, + "provider": website_info["provider"], + "job_id": website_info["job_id"], + "only_main_content": website_info.get("only_main_content", False), + "mode": "crawl", + } + if len(url) > 255: + document_name = url[:200] + "..." + else: + document_name = url + document = DocumentService.build_document( + dataset, + dataset_process_rule.id, + document_data["data_source"]["type"], + document_data["doc_form"], + document_data["doc_language"], + data_source_info, + created_from, + position, + account, + document_name, + batch, + ) + db.session.add(document) + db.session.flush() + document_ids.append(document.id) + documents.append(document) + position += 1 + db.session.commit() + + # trigger async task + if document_ids: + document_indexing_task.delay(dataset.id, document_ids) + if duplicate_document_ids: + duplicate_document_indexing_task.delay(dataset.id, duplicate_document_ids) return documents, batch diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index 4efdf8d7db..b49738c61c 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -6,6 +6,8 @@ from typing import Any, Optional, Union import httpx import validators +from constants import HIDDEN_VALUE + # from tasks.external_document_indexing_task import external_document_indexing_task from core.helper import ssrf_proxy from extensions.ext_database import db @@ -68,7 +70,7 @@ class ExternalDatasetService: endpoint = f"{settings['endpoint']}/retrieval" api_key = settings["api_key"] - if not validators.url(endpoint): + if not validators.url(endpoint, simple_host=True): raise ValueError(f"invalid endpoint: {endpoint}") try: response = httpx.post(endpoint, headers={"Authorization": f"Bearer {api_key}"}) @@ -92,6 +94,8 @@ class ExternalDatasetService: ).first() if external_knowledge_api is None: raise ValueError("api template not found") + if args.get("settings") and args.get("settings").get("api_key") == HIDDEN_VALUE: + args.get("settings")["api_key"] = external_knowledge_api.settings_dict.get("api_key") external_knowledge_api.name = args.get("name") external_knowledge_api.description = args.get("description", "") diff --git a/api/services/file_service.py b/api/services/file_service.py index 22ea923f6b..976111502c 100644 --- a/api/services/file_service.py +++ b/api/services/file_service.py @@ -1,11 +1,9 @@ import datetime import hashlib import uuid -from collections.abc import Generator -from typing import Literal, Union +from typing import Any, Literal, Union from flask_login import current_user -from werkzeug.datastructures import FileStorage from werkzeug.exceptions import NotFound from configs import dify_config @@ -22,7 +20,8 @@ from extensions.ext_storage import storage from models.account import Account from models.enums import CreatedByRole from models.model import EndUser, UploadFile -from services.errors.file import FileNotExistsError, FileTooLargeError, UnsupportedFileTypeError + +from .errors.file import FileTooLargeError, UnsupportedFileTypeError PREVIEW_WORDS_LIMIT = 3000 @@ -30,38 +29,28 @@ PREVIEW_WORDS_LIMIT = 3000 class FileService: @staticmethod def upload_file( - file: FileStorage, user: Union[Account, EndUser], source: Literal["datasets"] | None = None + *, + filename: str, + content: bytes, + mimetype: str, + user: Union[Account, EndUser, Any], + source: Literal["datasets"] | None = None, + source_url: str = "", ) -> UploadFile: - # get file name - filename = file.filename - if not filename: - raise FileNotExistsError - extension = filename.split(".")[-1] + # get file extension + extension = filename.split(".")[-1].lower() if len(filename) > 200: filename = filename.split(".")[0][:200] + "." + extension if source == "datasets" and extension not in DOCUMENT_EXTENSIONS: raise UnsupportedFileTypeError() - # select file size limit - if extension in IMAGE_EXTENSIONS: - file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT * 1024 * 1024 - elif extension in VIDEO_EXTENSIONS: - file_size_limit = dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT * 1024 * 1024 - elif extension in AUDIO_EXTENSIONS: - file_size_limit = dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT * 1024 * 1024 - else: - file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT * 1024 * 1024 - - # read file content - file_content = file.read() # get file size - file_size = len(file_content) + file_size = len(content) # check if the file size is exceeded - if file_size > file_size_limit: - message = f"File size exceeded. {file_size} > {file_size_limit}" - raise FileTooLargeError(message) + if not FileService.is_file_size_within_limit(extension=extension, file_size=file_size): + raise FileTooLargeError # generate file key file_uuid = str(uuid.uuid4()) @@ -75,7 +64,7 @@ class FileService: file_key = "upload_files/" + current_tenant_id + "/" + file_uuid + "." + extension # save file to storage - storage.save(file_key, file_content) + storage.save(file_key, content) # save file to db upload_file = UploadFile( @@ -85,12 +74,13 @@ class FileService: name=filename, size=file_size, extension=extension, - mime_type=file.mimetype, + mime_type=mimetype, created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER), created_by=user.id, created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), used=False, - hash=hashlib.sha3_256(file_content).hexdigest(), + hash=hashlib.sha3_256(content).hexdigest(), + source_url=source_url, ) db.session.add(upload_file) @@ -98,6 +88,19 @@ class FileService: return upload_file + @staticmethod + def is_file_size_within_limit(*, extension: str, file_size: int) -> bool: + if extension in IMAGE_EXTENSIONS: + file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT * 1024 * 1024 + elif extension in VIDEO_EXTENSIONS: + file_size_limit = dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT * 1024 * 1024 + elif extension in AUDIO_EXTENSIONS: + file_size_limit = dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT * 1024 * 1024 + else: + file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT * 1024 * 1024 + + return file_size <= file_size_limit + @staticmethod def upload_text(text: str, text_name: str) -> UploadFile: if len(text_name) > 200: @@ -132,7 +135,7 @@ class FileService: return upload_file @staticmethod - def get_file_preview(file_id: str) -> str: + def get_file_preview(file_id: str): upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() if not upload_file: @@ -171,7 +174,7 @@ class FileService: return generator, upload_file.mime_type @staticmethod - def get_signed_file_preview(file_id: str, timestamp: str, nonce: str, sign: str): + def get_file_generator_by_file_id(file_id: str, timestamp: str, nonce: str, sign: str): result = file_helpers.verify_file_signature(upload_file_id=file_id, timestamp=timestamp, nonce=nonce, sign=sign) if not result: raise NotFound("File not found or signature is invalid") @@ -183,10 +186,10 @@ class FileService: generator = storage.load(upload_file.key, stream=True) - return generator, upload_file.mime_type + return generator, upload_file @staticmethod - def get_public_image_preview(file_id: str) -> tuple[Generator, str]: + def get_public_image_preview(file_id: str): upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() if not upload_file: diff --git a/api/tests/integration_tests/.env.example b/api/tests/integration_tests/.env.example index 452aa0ad86..fa4a2eb36c 100644 --- a/api/tests/integration_tests/.env.example +++ b/api/tests/integration_tests/.env.example @@ -91,3 +91,10 @@ INNER_API_KEY= # Marketplace configuration MARKETPLACE_API_URL= +# VESSL AI Credentials +VESSL_AI_MODEL_NAME= +VESSL_AI_API_KEY= +VESSL_AI_ENDPOINT_URL= + +# Gitee AI Credentials +GITEE_AI_API_KEY= diff --git a/api/tests/integration_tests/model_runtime/gitee_ai/__init__.py b/api/tests/integration_tests/model_runtime/gitee_ai/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/model_runtime/gitee_ai/test_llm.py b/api/tests/integration_tests/model_runtime/gitee_ai/test_llm.py new file mode 100644 index 0000000000..753c52ce31 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/gitee_ai/test_llm.py @@ -0,0 +1,132 @@ +import os +from collections.abc import Generator + +import pytest + +from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta +from core.model_runtime.entities.message_entities import ( + AssistantPromptMessage, + PromptMessageTool, + SystemPromptMessage, + UserPromptMessage, +) +from core.model_runtime.entities.model_entities import AIModelEntity +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.gitee_ai.llm.llm import GiteeAILargeLanguageModel + + +def test_predefined_models(): + model = GiteeAILargeLanguageModel() + model_schemas = model.predefined_models() + + assert len(model_schemas) >= 1 + assert isinstance(model_schemas[0], AIModelEntity) + + +def test_validate_credentials_for_chat_model(): + model = GiteeAILargeLanguageModel() + + with pytest.raises(CredentialsValidateFailedError): + # model name to gpt-3.5-turbo because of mocking + model.validate_credentials(model="gpt-3.5-turbo", credentials={"api_key": "invalid_key"}) + + model.validate_credentials( + model="Qwen2-7B-Instruct", + credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}, + ) + + +def test_invoke_chat_model(): + model = GiteeAILargeLanguageModel() + + result = model.invoke( + model="Qwen2-7B-Instruct", + credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}, + prompt_messages=[ + SystemPromptMessage( + content="You are a helpful AI assistant.", + ), + UserPromptMessage(content="Hello World!"), + ], + model_parameters={ + "temperature": 0.0, + "top_p": 1.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "max_tokens": 10, + "stream": False, + }, + stop=["How"], + stream=False, + user="foo", + ) + + assert isinstance(result, LLMResult) + assert len(result.message.content) > 0 + + +def test_invoke_stream_chat_model(): + model = GiteeAILargeLanguageModel() + + result = model.invoke( + model="Qwen2-7B-Instruct", + credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}, + prompt_messages=[ + SystemPromptMessage( + content="You are a helpful AI assistant.", + ), + UserPromptMessage(content="Hello World!"), + ], + model_parameters={"temperature": 0.0, "max_tokens": 100, "stream": False}, + stream=True, + user="foo", + ) + + assert isinstance(result, Generator) + + for chunk in result: + assert isinstance(chunk, LLMResultChunk) + assert isinstance(chunk.delta, LLMResultChunkDelta) + assert isinstance(chunk.delta.message, AssistantPromptMessage) + assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True + if chunk.delta.finish_reason is not None: + assert chunk.delta.usage is not None + + +def test_get_num_tokens(): + model = GiteeAILargeLanguageModel() + + num_tokens = model.get_num_tokens( + model="Qwen2-7B-Instruct", + credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}, + prompt_messages=[UserPromptMessage(content="Hello World!")], + ) + + assert num_tokens == 10 + + num_tokens = model.get_num_tokens( + model="Qwen2-7B-Instruct", + credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}, + prompt_messages=[ + SystemPromptMessage( + content="You are a helpful AI assistant.", + ), + UserPromptMessage(content="Hello World!"), + ], + tools=[ + PromptMessageTool( + name="get_weather", + description="Determine weather in my location", + parameters={ + "type": "object", + "properties": { + "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, + "unit": {"type": "string", "enum": ["c", "f"]}, + }, + "required": ["location"], + }, + ), + ], + ) + + assert num_tokens == 77 diff --git a/api/tests/integration_tests/model_runtime/gitee_ai/test_provider.py b/api/tests/integration_tests/model_runtime/gitee_ai/test_provider.py new file mode 100644 index 0000000000..f12ed54a45 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/gitee_ai/test_provider.py @@ -0,0 +1,15 @@ +import os + +import pytest + +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.gitee_ai.gitee_ai import GiteeAIProvider + + +def test_validate_provider_credentials(): + provider = GiteeAIProvider() + + with pytest.raises(CredentialsValidateFailedError): + provider.validate_provider_credentials(credentials={"api_key": "invalid_key"}) + + provider.validate_provider_credentials(credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/gitee_ai/test_rerank.py b/api/tests/integration_tests/model_runtime/gitee_ai/test_rerank.py new file mode 100644 index 0000000000..0e5914a61f --- /dev/null +++ b/api/tests/integration_tests/model_runtime/gitee_ai/test_rerank.py @@ -0,0 +1,47 @@ +import os + +import pytest + +from core.model_runtime.entities.rerank_entities import RerankResult +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.gitee_ai.rerank.rerank import GiteeAIRerankModel + + +def test_validate_credentials(): + model = GiteeAIRerankModel() + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials( + model="bge-reranker-v2-m3", + credentials={"api_key": "invalid_key"}, + ) + + model.validate_credentials( + model="bge-reranker-v2-m3", + credentials={ + "api_key": os.environ.get("GITEE_AI_API_KEY"), + }, + ) + + +def test_invoke_model(): + model = GiteeAIRerankModel() + result = model.invoke( + model="bge-reranker-v2-m3", + credentials={ + "api_key": os.environ.get("GITEE_AI_API_KEY"), + }, + query="What is the capital of the United States?", + docs=[ + "Carson City is the capital city of the American state of Nevada. At the 2010 United States " + "Census, Carson City had a population of 55,274.", + "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " + "are a political division controlled by the United States. Its capital is Saipan.", + ], + top_n=1, + score_threshold=0.01, + ) + + assert isinstance(result, RerankResult) + assert len(result.docs) == 1 + assert result.docs[0].score >= 0.01 diff --git a/api/tests/integration_tests/model_runtime/gitee_ai/test_speech2text.py b/api/tests/integration_tests/model_runtime/gitee_ai/test_speech2text.py new file mode 100644 index 0000000000..4a01453fdd --- /dev/null +++ b/api/tests/integration_tests/model_runtime/gitee_ai/test_speech2text.py @@ -0,0 +1,45 @@ +import os + +import pytest + +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.gitee_ai.speech2text.speech2text import GiteeAISpeech2TextModel + + +def test_validate_credentials(): + model = GiteeAISpeech2TextModel() + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials( + model="whisper-base", + credentials={"api_key": "invalid_key"}, + ) + + model.validate_credentials( + model="whisper-base", + credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}, + ) + + +def test_invoke_model(): + model = GiteeAISpeech2TextModel() + + # Get the directory of the current file + current_dir = os.path.dirname(os.path.abspath(__file__)) + + # Get assets directory + assets_dir = os.path.join(os.path.dirname(current_dir), "assets") + + # Construct the path to the audio file + audio_file_path = os.path.join(assets_dir, "audio.mp3") + + # Open the file and get the file object + with open(audio_file_path, "rb") as audio_file: + file = audio_file + + result = model.invoke( + model="whisper-base", credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}, file=file + ) + + assert isinstance(result, str) + assert result == "1 2 3 4 5 6 7 8 9 10" diff --git a/api/tests/integration_tests/model_runtime/gitee_ai/test_text_embedding.py b/api/tests/integration_tests/model_runtime/gitee_ai/test_text_embedding.py new file mode 100644 index 0000000000..34648f0bc8 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/gitee_ai/test_text_embedding.py @@ -0,0 +1,46 @@ +import os + +import pytest + +from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.gitee_ai.text_embedding.text_embedding import GiteeAIEmbeddingModel + + +def test_validate_credentials(): + model = GiteeAIEmbeddingModel() + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials(model="bge-large-zh-v1.5", credentials={"api_key": "invalid_key"}) + + model.validate_credentials(model="bge-large-zh-v1.5", credentials={"api_key": os.environ.get("GITEE_AI_API_KEY")}) + + +def test_invoke_model(): + model = GiteeAIEmbeddingModel() + + result = model.invoke( + model="bge-large-zh-v1.5", + credentials={ + "api_key": os.environ.get("GITEE_AI_API_KEY"), + }, + texts=["hello", "world"], + user="user", + ) + + assert isinstance(result, TextEmbeddingResult) + assert len(result.embeddings) == 2 + + +def test_get_num_tokens(): + model = GiteeAIEmbeddingModel() + + num_tokens = model.get_num_tokens( + model="bge-large-zh-v1.5", + credentials={ + "api_key": os.environ.get("GITEE_AI_API_KEY"), + }, + texts=["hello", "world"], + ) + + assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/gitee_ai/test_tts.py b/api/tests/integration_tests/model_runtime/gitee_ai/test_tts.py new file mode 100644 index 0000000000..9f18161a7b --- /dev/null +++ b/api/tests/integration_tests/model_runtime/gitee_ai/test_tts.py @@ -0,0 +1,23 @@ +import os + +from core.model_runtime.model_providers.gitee_ai.tts.tts import GiteeAIText2SpeechModel + + +def test_invoke_model(): + model = GiteeAIText2SpeechModel() + + result = model.invoke( + model="speecht5_tts", + tenant_id="test", + credentials={ + "api_key": os.environ.get("GITEE_AI_API_KEY"), + }, + content_text="Hello, world!", + voice="", + ) + + content = b"" + for chunk in result: + content += chunk + + assert content != b"" diff --git a/api/tests/integration_tests/model_runtime/vessl_ai/__init__.py b/api/tests/integration_tests/model_runtime/vessl_ai/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/model_runtime/vessl_ai/test_llm.py b/api/tests/integration_tests/model_runtime/vessl_ai/test_llm.py new file mode 100644 index 0000000000..7797d0f8e4 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/vessl_ai/test_llm.py @@ -0,0 +1,131 @@ +import os +from collections.abc import Generator + +import pytest + +from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta +from core.model_runtime.entities.message_entities import ( + AssistantPromptMessage, + SystemPromptMessage, + UserPromptMessage, +) +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.vessl_ai.llm.llm import VesslAILargeLanguageModel + + +def test_validate_credentials(): + model = VesslAILargeLanguageModel() + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials( + model=os.environ.get("VESSL_AI_MODEL_NAME"), + credentials={ + "api_key": "invalid_key", + "endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"), + "mode": "chat", + }, + ) + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials( + model=os.environ.get("VESSL_AI_MODEL_NAME"), + credentials={ + "api_key": os.environ.get("VESSL_AI_API_KEY"), + "endpoint_url": "http://invalid_url", + "mode": "chat", + }, + ) + + model.validate_credentials( + model=os.environ.get("VESSL_AI_MODEL_NAME"), + credentials={ + "api_key": os.environ.get("VESSL_AI_API_KEY"), + "endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"), + "mode": "chat", + }, + ) + + +def test_invoke_model(): + model = VesslAILargeLanguageModel() + + response = model.invoke( + model=os.environ.get("VESSL_AI_MODEL_NAME"), + credentials={ + "api_key": os.environ.get("VESSL_AI_API_KEY"), + "endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"), + "mode": "chat", + }, + prompt_messages=[ + SystemPromptMessage( + content="You are a helpful AI assistant.", + ), + UserPromptMessage(content="Who are you?"), + ], + model_parameters={ + "temperature": 1.0, + "top_k": 2, + "top_p": 0.5, + }, + stop=["How"], + stream=False, + user="abc-123", + ) + + assert isinstance(response, LLMResult) + assert len(response.message.content) > 0 + + +def test_invoke_stream_model(): + model = VesslAILargeLanguageModel() + + response = model.invoke( + model=os.environ.get("VESSL_AI_MODEL_NAME"), + credentials={ + "api_key": os.environ.get("VESSL_AI_API_KEY"), + "endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"), + "mode": "chat", + }, + prompt_messages=[ + SystemPromptMessage( + content="You are a helpful AI assistant.", + ), + UserPromptMessage(content="Who are you?"), + ], + model_parameters={ + "temperature": 1.0, + "top_k": 2, + "top_p": 0.5, + }, + stop=["How"], + stream=True, + user="abc-123", + ) + + assert isinstance(response, Generator) + + for chunk in response: + assert isinstance(chunk, LLMResultChunk) + assert isinstance(chunk.delta, LLMResultChunkDelta) + assert isinstance(chunk.delta.message, AssistantPromptMessage) + + +def test_get_num_tokens(): + model = VesslAILargeLanguageModel() + + num_tokens = model.get_num_tokens( + model=os.environ.get("VESSL_AI_MODEL_NAME"), + credentials={ + "api_key": os.environ.get("VESSL_AI_API_KEY"), + "endpoint_url": os.environ.get("VESSL_AI_ENDPOINT_URL"), + }, + prompt_messages=[ + SystemPromptMessage( + content="You are a helpful AI assistant.", + ), + UserPromptMessage(content="Hello World!"), + ], + ) + + assert isinstance(num_tokens, int) + assert num_tokens == 21 diff --git a/api/tests/integration_tests/vdb/__mock/upstashvectordb.py b/api/tests/integration_tests/vdb/__mock/upstashvectordb.py new file mode 100644 index 0000000000..c93292bd8a --- /dev/null +++ b/api/tests/integration_tests/vdb/__mock/upstashvectordb.py @@ -0,0 +1,75 @@ +import os +from typing import Optional + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from upstash_vector import Index + + +# Mocking the Index class from upstash_vector +class MockIndex: + def __init__(self, url="", token=""): + self.url = url + self.token = token + self.vectors = [] + + def upsert(self, vectors): + for vector in vectors: + vector.score = 0.5 + self.vectors.append(vector) + return {"code": 0, "msg": "operation success", "affectedCount": len(vectors)} + + def fetch(self, ids): + return [vector for vector in self.vectors if vector.id in ids] + + def delete(self, ids): + self.vectors = [vector for vector in self.vectors if vector.id not in ids] + return {"code": 0, "msg": "Success"} + + def query( + self, + vector: None, + top_k: int = 10, + include_vectors: bool = False, + include_metadata: bool = False, + filter: str = "", + data: Optional[str] = None, + namespace: str = "", + include_data: bool = False, + ): + # Simple mock query, in real scenario you would calculate similarity + mock_result = [] + for vector_data in self.vectors: + mock_result.append(vector_data) + return mock_result[:top_k] + + def reset(self): + self.vectors = [] + + def info(self): + return AttrDict({"dimension": 1024}) + + +class AttrDict(dict): + def __getattr__(self, item): + return self.get(item) + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_upstashvector_mock(request, monkeypatch: MonkeyPatch): + if MOCK: + monkeypatch.setattr(Index, "__init__", MockIndex.__init__) + monkeypatch.setattr(Index, "upsert", MockIndex.upsert) + monkeypatch.setattr(Index, "fetch", MockIndex.fetch) + monkeypatch.setattr(Index, "delete", MockIndex.delete) + monkeypatch.setattr(Index, "query", MockIndex.query) + monkeypatch.setattr(Index, "reset", MockIndex.reset) + monkeypatch.setattr(Index, "info", MockIndex.info) + + yield + + if MOCK: + monkeypatch.undo() diff --git a/api/tests/integration_tests/vdb/couchbase/__init__.py b/api/tests/integration_tests/vdb/couchbase/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/vdb/couchbase/test_couchbase.py b/api/tests/integration_tests/vdb/couchbase/test_couchbase.py new file mode 100644 index 0000000000..d76c34ba0e --- /dev/null +++ b/api/tests/integration_tests/vdb/couchbase/test_couchbase.py @@ -0,0 +1,50 @@ +import subprocess +import time + +from core.rag.datasource.vdb.couchbase.couchbase_vector import CouchbaseConfig, CouchbaseVector +from tests.integration_tests.vdb.test_vector_store import ( + AbstractVectorTest, + get_example_text, + setup_mock_redis, +) + + +def wait_for_healthy_container(service_name="couchbase-server", timeout=300): + start_time = time.time() + while time.time() - start_time < timeout: + result = subprocess.run( + ["docker", "inspect", "--format", "{{.State.Health.Status}}", service_name], capture_output=True, text=True + ) + if result.stdout.strip() == "healthy": + print(f"{service_name} is healthy!") + return True + else: + print(f"Waiting for {service_name} to be healthy...") + time.sleep(10) + raise TimeoutError(f"{service_name} did not become healthy in time") + + +class CouchbaseTest(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = CouchbaseVector( + collection_name=self.collection_name, + config=CouchbaseConfig( + connection_string="couchbase://127.0.0.1", + user="Administrator", + password="password", + bucket_name="Embeddings", + scope_name="_default", + ), + ) + + def search_by_vector(self): + # brief sleep to ensure document is indexed + time.sleep(5) + hits_by_vector = self.vector.search_by_vector(query_vector=self.example_embedding) + assert len(hits_by_vector) == 1 + + +def test_couchbase(setup_mock_redis): + wait_for_healthy_container("couchbase-server", timeout=60) + CouchbaseTest().run_all_tests() diff --git a/api/tests/integration_tests/vdb/oceanbase/__init__.py b/api/tests/integration_tests/vdb/oceanbase/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/vdb/oceanbase/test_oceanbase.py b/api/tests/integration_tests/vdb/oceanbase/test_oceanbase.py new file mode 100644 index 0000000000..ebcb134168 --- /dev/null +++ b/api/tests/integration_tests/vdb/oceanbase/test_oceanbase.py @@ -0,0 +1,71 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from core.rag.datasource.vdb.oceanbase.oceanbase_vector import ( + OceanBaseVector, + OceanBaseVectorConfig, +) +from tests.integration_tests.vdb.__mock.tcvectordb import setup_tcvectordb_mock +from tests.integration_tests.vdb.test_vector_store import ( + AbstractVectorTest, + get_example_text, + setup_mock_redis, +) + + +@pytest.fixture +def oceanbase_vector(): + return OceanBaseVector( + "dify_test_collection", + config=OceanBaseVectorConfig( + host="127.0.0.1", + port="2881", + user="root@test", + database="test", + password="test", + ), + ) + + +class OceanBaseVectorTest(AbstractVectorTest): + def __init__(self, vector: OceanBaseVector): + super().__init__() + self.vector = vector + + def search_by_vector(self): + hits_by_vector = self.vector.search_by_vector(query_vector=self.example_embedding) + assert len(hits_by_vector) == 0 + + def search_by_full_text(self): + hits_by_full_text = self.vector.search_by_full_text(query=get_example_text()) + assert len(hits_by_full_text) == 0 + + def text_exists(self): + exist = self.vector.text_exists(self.example_doc_id) + assert exist == True + + def get_ids_by_metadata_field(self): + ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id) + assert len(ids) == 0 + + +@pytest.fixture +def setup_mock_oceanbase_client(): + with patch("core.rag.datasource.vdb.oceanbase.oceanbase_vector.ObVecClient", new_callable=MagicMock) as mock_client: + yield mock_client + + +@pytest.fixture +def setup_mock_oceanbase_vector(oceanbase_vector): + with patch.object(oceanbase_vector, "_client"): + yield oceanbase_vector + + +def test_oceanbase_vector( + setup_mock_redis, + setup_mock_oceanbase_client, + setup_mock_oceanbase_vector, + oceanbase_vector, +): + OceanBaseVectorTest(oceanbase_vector).run_all_tests() diff --git a/api/tests/integration_tests/vdb/upstash/__init__.py b/api/tests/integration_tests/vdb/upstash/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/vdb/upstash/test_upstash_vector.py b/api/tests/integration_tests/vdb/upstash/test_upstash_vector.py new file mode 100644 index 0000000000..23470474ff --- /dev/null +++ b/api/tests/integration_tests/vdb/upstash/test_upstash_vector.py @@ -0,0 +1,28 @@ +from core.rag.datasource.vdb.upstash.upstash_vector import UpstashVector, UpstashVectorConfig +from core.rag.models.document import Document +from tests.integration_tests.vdb.__mock.upstashvectordb import setup_upstashvector_mock +from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text + + +class UpstashVectorTest(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = UpstashVector( + collection_name="test_collection", + config=UpstashVectorConfig( + url="your-server-url", + token="your-access-token", + ), + ) + + def get_ids_by_metadata_field(self): + ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id) + assert len(ids) != 0 + + def search_by_full_text(self): + hits_by_full_text: list[Document] = self.vector.search_by_full_text(query=get_example_text()) + assert len(hits_by_full_text) == 0 + + +def test_upstash_vector(setup_upstashvector_mock): + UpstashVectorTest().run_all_tests() diff --git a/api/tests/integration_tests/workflow/nodes/test_http.py b/api/tests/integration_tests/workflow/nodes/test_http.py index 9eea63f722..0da6622658 100644 --- a/api/tests/integration_tests/workflow/nodes/test_http.py +++ b/api/tests/integration_tests/workflow/nodes/test_http.py @@ -430,3 +430,37 @@ def test_multi_colons_parse(setup_http_mock): assert urlencode({"Redirect": "http://example2.com"}) in result.process_data.get("request", "") assert 'form-data; name="Redirect"\r\n\r\nhttp://example6.com' in result.process_data.get("request", "") # assert "http://example3.com" == resp.get("headers", {}).get("referer") + + +def test_image_file(monkeypatch): + from types import SimpleNamespace + + monkeypatch.setattr( + "core.tools.tool_file_manager.ToolFileManager.create_file_by_raw", + lambda *args, **kwargs: SimpleNamespace(id="1"), + ) + + node = init_http_node( + config={ + "id": "1", + "data": { + "title": "http", + "desc": "", + "method": "get", + "url": "https://cloud.dify.ai/logo/logo-site.png", + "authorization": { + "type": "no-auth", + "config": None, + }, + "params": "", + "headers": "", + "body": None, + }, + } + ) + + result = node._run() + assert result.process_data is not None + assert result.outputs is not None + resp = result.outputs + assert len(resp.get("files", [])) == 1 diff --git a/api/tests/unit_tests/controllers/test_compare_versions.py b/api/tests/unit_tests/controllers/test_compare_versions.py index 87902b6d44..9db57a8446 100644 --- a/api/tests/unit_tests/controllers/test_compare_versions.py +++ b/api/tests/unit_tests/controllers/test_compare_versions.py @@ -22,17 +22,3 @@ from controllers.console.version import _has_new_version ) def test_has_new_version(latest_version, current_version, expected): assert _has_new_version(latest_version=latest_version, current_version=current_version) == expected - - -def test_has_new_version_invalid_input(): - with pytest.raises(ValueError): - _has_new_version(latest_version="1.0", current_version="1.0.0") - - with pytest.raises(ValueError): - _has_new_version(latest_version="1.0.0", current_version="1.0") - - with pytest.raises(ValueError): - _has_new_version(latest_version="invalid", current_version="1.0.0") - - with pytest.raises(ValueError): - _has_new_version(latest_version="1.0.0", current_version="invalid") diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py new file mode 100644 index 0000000000..def6c2a232 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py @@ -0,0 +1,125 @@ +import pytest + +from core.app.entities.app_invoke_entities import InvokeFrom +from core.file import File, FileTransferMethod, FileType +from core.model_runtime.entities.message_entities import ImagePromptMessageContent +from core.variables import ArrayAnySegment, ArrayFileSegment, NoneSegment +from core.workflow.entities.variable_pool import VariablePool +from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState +from core.workflow.nodes.answer import AnswerStreamGenerateRoute +from core.workflow.nodes.end import EndStreamParam +from core.workflow.nodes.llm.entities import ContextConfig, LLMNodeData, ModelConfig, VisionConfig, VisionConfigOptions +from core.workflow.nodes.llm.node import LLMNode +from models.enums import UserFrom +from models.workflow import WorkflowType + + +class TestLLMNode: + @pytest.fixture + def llm_node(self): + data = LLMNodeData( + title="Test LLM", + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode="chat", completion_params={}), + prompt_template=[], + memory=None, + context=ContextConfig(enabled=False), + vision=VisionConfig( + enabled=True, + configs=VisionConfigOptions( + variable_selector=["sys", "files"], + detail=ImagePromptMessageContent.DETAIL.HIGH, + ), + ), + ) + variable_pool = VariablePool( + system_variables={}, + user_inputs={}, + ) + node = LLMNode( + id="1", + config={ + "id": "1", + "data": data.model_dump(), + }, + graph_init_params=GraphInitParams( + tenant_id="1", + app_id="1", + workflow_type=WorkflowType.WORKFLOW, + workflow_id="1", + graph_config={}, + user_id="1", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, + call_depth=0, + ), + graph=Graph( + root_node_id="1", + answer_stream_generate_routes=AnswerStreamGenerateRoute( + answer_dependencies={}, + answer_generate_route={}, + ), + end_stream_param=EndStreamParam( + end_dependencies={}, + end_stream_variable_selector_mapping={}, + ), + ), + graph_runtime_state=GraphRuntimeState( + variable_pool=variable_pool, + start_at=0, + ), + ) + return node + + def test_fetch_files_with_file_segment(self, llm_node): + file = File( + id="1", + tenant_id="test", + type=FileType.IMAGE, + filename="test.jpg", + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="1", + ) + llm_node.graph_runtime_state.variable_pool.add(["sys", "files"], file) + + result = llm_node._fetch_files(selector=["sys", "files"]) + assert result == [file] + + def test_fetch_files_with_array_file_segment(self, llm_node): + files = [ + File( + id="1", + tenant_id="test", + type=FileType.IMAGE, + filename="test1.jpg", + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="1", + ), + File( + id="2", + tenant_id="test", + type=FileType.IMAGE, + filename="test2.jpg", + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="2", + ), + ] + llm_node.graph_runtime_state.variable_pool.add(["sys", "files"], ArrayFileSegment(value=files)) + + result = llm_node._fetch_files(selector=["sys", "files"]) + assert result == files + + def test_fetch_files_with_none_segment(self, llm_node): + llm_node.graph_runtime_state.variable_pool.add(["sys", "files"], NoneSegment()) + + result = llm_node._fetch_files(selector=["sys", "files"]) + assert result == [] + + def test_fetch_files_with_array_any_segment(self, llm_node): + llm_node.graph_runtime_state.variable_pool.add(["sys", "files"], ArrayAnySegment(value=[])) + + result = llm_node._fetch_files(selector=["sys", "files"]) + assert result == [] + + def test_fetch_files_with_non_existent_variable(self, llm_node): + result = llm_node._fetch_files(selector=["sys", "files"]) + assert result == [] diff --git a/api/tests/unit_tests/core/workflow/nodes/test_document_extractor_node.py b/api/tests/unit_tests/core/workflow/nodes/test_document_extractor_node.py index 7471e13e1e..4f1f8f05c8 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_document_extractor_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_document_extractor_node.py @@ -63,17 +63,24 @@ def test_run_invalid_variable_type(document_extractor_node, mock_graph_runtime_s @pytest.mark.parametrize( - ("mime_type", "file_content", "expected_text", "transfer_method"), + ("mime_type", "file_content", "expected_text", "transfer_method", "extension"), [ - ("text/plain", b"Hello, world!", ["Hello, world!"], FileTransferMethod.LOCAL_FILE), - ("application/pdf", b"%PDF-1.5\n%Test PDF content", ["Mocked PDF content"], FileTransferMethod.LOCAL_FILE), + ("text/plain", b"Hello, world!", ["Hello, world!"], FileTransferMethod.LOCAL_FILE, ".txt"), + ( + "application/pdf", + b"%PDF-1.5\n%Test PDF content", + ["Mocked PDF content"], + FileTransferMethod.LOCAL_FILE, + ".pdf", + ), ( "application/vnd.openxmlformats-officedocument.wordprocessingml.document", b"PK\x03\x04", ["Mocked DOCX content"], - FileTransferMethod.LOCAL_FILE, + FileTransferMethod.REMOTE_URL, + "", ), - ("text/plain", b"Remote content", ["Remote content"], FileTransferMethod.REMOTE_URL), + ("text/plain", b"Remote content", ["Remote content"], FileTransferMethod.REMOTE_URL, None), ], ) def test_run_extract_text( @@ -83,6 +90,7 @@ def test_run_extract_text( file_content, expected_text, transfer_method, + extension, monkeypatch, ): document_extractor_node.graph_runtime_state = mock_graph_runtime_state @@ -92,6 +100,7 @@ def test_run_extract_text( mock_file.transfer_method = transfer_method mock_file.related_id = "test_file_id" if transfer_method == FileTransferMethod.LOCAL_FILE else None mock_file.remote_url = "https://example.com/file.txt" if transfer_method == FileTransferMethod.REMOTE_URL else None + mock_file.extension = extension mock_array_file_segment = Mock(spec=ArrayFileSegment) mock_array_file_segment.value = [mock_file] @@ -116,7 +125,7 @@ def test_run_extract_text( result = document_extractor_node._run() assert isinstance(result, NodeRunResult) - assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED, result.error assert result.outputs is not None assert result.outputs["text"] == expected_text diff --git a/api/tests/unit_tests/core/workflow/nodes/test_http_request_node.py b/api/tests/unit_tests/core/workflow/nodes/test_http_request_node.py index 2a5fda48b1..720037d05f 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_http_request_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_http_request_node.py @@ -192,7 +192,7 @@ def test_http_request_node_form_with_file(monkeypatch): def attr_checker(*args, **kwargs): assert kwargs["data"] == {"name": "test"} - assert kwargs["files"] == {"file": b"test"} + assert kwargs["files"] == {"file": (None, b"test", "application/octet-stream")} return httpx.Response(200, content=b"") monkeypatch.setattr( diff --git a/api/tests/unit_tests/core/workflow/nodes/test_if_else.py b/api/tests/unit_tests/core/workflow/nodes/test_if_else.py index 8f38d3f280..d964d0e352 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_if_else.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_if_else.py @@ -55,6 +55,7 @@ def test_execute_if_else_result_true(): pool.add(["start", "less_than"], 21) pool.add(["start", "greater_than_or_equal"], 22) pool.add(["start", "less_than_or_equal"], 21) + pool.add(["start", "null"], None) pool.add(["start", "not_null"], "1212") node = IfElseNode( diff --git a/api/tests/unit_tests/core/workflow/test_variable_pool.py b/api/tests/unit_tests/core/workflow/test_variable_pool.py index a1e4dda627..9ea6acac17 100644 --- a/api/tests/unit_tests/core/workflow/test_variable_pool.py +++ b/api/tests/unit_tests/core/workflow/test_variable_pool.py @@ -33,8 +33,8 @@ def test_get_file_attribute(pool, file): assert result.value == file.filename # Test getting a non-existent attribute - with pytest.raises(ValueError): - pool.get(("node_1", "file_var", "non_existent_attr")) + result = pool.get(("node_1", "file_var", "non_existent_attr")) + assert result is None def test_use_long_selector(pool): diff --git a/api/tests/unit_tests/oss/__mock/aliyun_oss.py b/api/tests/unit_tests/oss/__mock/aliyun_oss.py new file mode 100644 index 0000000000..27e1c0ad85 --- /dev/null +++ b/api/tests/unit_tests/oss/__mock/aliyun_oss.py @@ -0,0 +1,100 @@ +import os +import posixpath +from unittest.mock import MagicMock + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from oss2 import Bucket +from oss2.models import GetObjectResult, PutObjectResult + +from tests.unit_tests.oss.__mock.base import ( + get_example_bucket, + get_example_data, + get_example_filename, + get_example_filepath, + get_example_folder, +) + + +class MockResponse: + def __init__(self, status, headers, request_id): + self.status = status + self.headers = headers + self.request_id = request_id + + +class MockAliyunOssClass: + def __init__( + self, + auth, + endpoint, + bucket_name, + is_cname=False, + session=None, + connect_timeout=None, + app_name="", + enable_crc=True, + proxies=None, + region=None, + cloudbox_id=None, + is_path_style=False, + is_verify_object_strict=True, + ): + self.bucket_name = get_example_bucket() + self.key = posixpath.join(get_example_folder(), get_example_filename()) + self.content = get_example_data() + self.filepath = get_example_filepath() + self.resp = MockResponse( + 200, + { + "etag": "ee8de918d05640145b18f70f4c3aa602", + "x-oss-version-id": "CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****", + }, + "request_id", + ) + + def put_object(self, key, data, headers=None, progress_callback=None): + assert key == self.key + assert data == self.content + return PutObjectResult(self.resp) + + def get_object(self, key, byte_range=None, headers=None, progress_callback=None, process=None, params=None): + assert key == self.key + + get_object_output = MagicMock(GetObjectResult) + get_object_output.read.return_value = self.content + return get_object_output + + def get_object_to_file( + self, key, filename, byte_range=None, headers=None, progress_callback=None, process=None, params=None + ): + assert key == self.key + assert filename == self.filepath + + def object_exists(self, key, headers=None): + assert key == self.key + return True + + def delete_object(self, key, params=None, headers=None): + assert key == self.key + self.resp.headers["x-oss-delete-marker"] = True + return self.resp + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_aliyun_oss_mock(monkeypatch: MonkeyPatch): + if MOCK: + monkeypatch.setattr(Bucket, "__init__", MockAliyunOssClass.__init__) + monkeypatch.setattr(Bucket, "put_object", MockAliyunOssClass.put_object) + monkeypatch.setattr(Bucket, "get_object", MockAliyunOssClass.get_object) + monkeypatch.setattr(Bucket, "get_object_to_file", MockAliyunOssClass.get_object_to_file) + monkeypatch.setattr(Bucket, "object_exists", MockAliyunOssClass.object_exists) + monkeypatch.setattr(Bucket, "delete_object", MockAliyunOssClass.delete_object) + + yield + + if MOCK: + monkeypatch.undo() diff --git a/api/tests/unit_tests/oss/__mock/base.py b/api/tests/unit_tests/oss/__mock/base.py new file mode 100644 index 0000000000..a1eaaab9c3 --- /dev/null +++ b/api/tests/unit_tests/oss/__mock/base.py @@ -0,0 +1,58 @@ +from collections.abc import Generator + +import pytest + +from extensions.storage.base_storage import BaseStorage + + +def get_example_folder() -> str: + return "/dify" + + +def get_example_bucket() -> str: + return "dify" + + +def get_example_filename() -> str: + return "test.txt" + + +def get_example_data() -> bytes: + return b"test" + + +def get_example_filepath() -> str: + return "/test" + + +class BaseStorageTest: + @pytest.fixture(autouse=True) + def setup_method(self): + """Should be implemented in child classes to setup specific storage.""" + self.storage = BaseStorage() + + def test_save(self): + """Test saving data.""" + self.storage.save(get_example_filename(), get_example_data()) + + def test_load_once(self): + """Test loading data once.""" + assert self.storage.load_once(get_example_filename()) == get_example_data() + + def test_load_stream(self): + """Test loading data as a stream.""" + generator = self.storage.load_stream(get_example_filename()) + assert isinstance(generator, Generator) + assert next(generator) == get_example_data() + + def test_download(self): + """Test downloading data.""" + self.storage.download(get_example_filename(), get_example_filepath()) + + def test_exists(self): + """Test checking if a file exists.""" + assert self.storage.exists(get_example_filename()) + + def test_delete(self): + """Test deleting a file.""" + self.storage.delete(get_example_filename()) diff --git a/api/tests/unit_tests/oss/__mock/local.py b/api/tests/unit_tests/oss/__mock/local.py new file mode 100644 index 0000000000..95cc06958c --- /dev/null +++ b/api/tests/unit_tests/oss/__mock/local.py @@ -0,0 +1,57 @@ +import os +import shutil +from pathlib import Path +from unittest.mock import MagicMock, mock_open, patch + +import pytest +from _pytest.monkeypatch import MonkeyPatch + +from tests.unit_tests.oss.__mock.base import ( + get_example_data, + get_example_filename, + get_example_filepath, + get_example_folder, +) + + +class MockLocalFSClass: + def write_bytes(self, data): + assert data == get_example_data() + + def read_bytes(self): + return get_example_data() + + @staticmethod + def copyfile(src, dst): + assert src == os.path.join(get_example_folder(), get_example_filename()) + assert dst == get_example_filepath() + + @staticmethod + def exists(path): + assert path == os.path.join(get_example_folder(), get_example_filename()) + return True + + @staticmethod + def remove(path): + assert path == os.path.join(get_example_folder(), get_example_filename()) + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_local_fs_mock(monkeypatch: MonkeyPatch): + if MOCK: + monkeypatch.setattr(Path, "write_bytes", MockLocalFSClass.write_bytes) + monkeypatch.setattr(Path, "read_bytes", MockLocalFSClass.read_bytes) + monkeypatch.setattr(shutil, "copyfile", MockLocalFSClass.copyfile) + monkeypatch.setattr(os.path, "exists", MockLocalFSClass.exists) + monkeypatch.setattr(os, "remove", MockLocalFSClass.remove) + + os.makedirs = MagicMock() + + with patch("builtins.open", mock_open(read_data=get_example_data())): + yield + + if MOCK: + monkeypatch.undo() diff --git a/api/tests/unit_tests/oss/__mock/tencent_cos.py b/api/tests/unit_tests/oss/__mock/tencent_cos.py new file mode 100644 index 0000000000..5189b68e87 --- /dev/null +++ b/api/tests/unit_tests/oss/__mock/tencent_cos.py @@ -0,0 +1,81 @@ +import os +from unittest.mock import MagicMock + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from qcloud_cos import CosS3Client +from qcloud_cos.streambody import StreamBody + +from tests.unit_tests.oss.__mock.base import ( + get_example_bucket, + get_example_data, + get_example_filename, + get_example_filepath, +) + + +class MockTencentCosClass: + def __init__(self, conf, retry=1, session=None): + self.bucket_name = get_example_bucket() + self.key = get_example_filename() + self.content = get_example_data() + self.filepath = get_example_filepath() + self.resp = { + "ETag": "ee8de918d05640145b18f70f4c3aa602", + "Server": "tencent-cos", + "x-cos-hash-crc64ecma": 16749565679157681890, + "x-cos-request-id": "NWU5MDNkYzlfNjRiODJhMDlfMzFmYzhfMTFm****", + } + + def put_object(self, Bucket, Body, Key, EnableMD5=False, **kwargs): # noqa: N803 + assert Bucket == self.bucket_name + assert Key == self.key + assert Body == self.content + return self.resp + + def get_object(self, Bucket, Key, KeySimplifyCheck=True, **kwargs): # noqa: N803 + assert Bucket == self.bucket_name + assert Key == self.key + + mock_stream_body = MagicMock(StreamBody) + mock_raw_stream = MagicMock() + mock_stream_body.get_raw_stream.return_value = mock_raw_stream + mock_raw_stream.read.return_value = self.content + + mock_stream_body.get_stream_to_file = MagicMock() + + def chunk_generator(chunk_size=2): + for i in range(0, len(self.content), chunk_size): + yield self.content[i : i + chunk_size] + + mock_stream_body.get_stream.return_value = chunk_generator(chunk_size=4096) + return {"Body": mock_stream_body} + + def object_exists(self, Bucket, Key): # noqa: N803 + assert Bucket == self.bucket_name + assert Key == self.key + return True + + def delete_object(self, Bucket, Key, **kwargs): # noqa: N803 + assert Bucket == self.bucket_name + assert Key == self.key + self.resp.update({"x-cos-delete-marker": True}) + return self.resp + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_tencent_cos_mock(monkeypatch: MonkeyPatch): + if MOCK: + monkeypatch.setattr(CosS3Client, "__init__", MockTencentCosClass.__init__) + monkeypatch.setattr(CosS3Client, "put_object", MockTencentCosClass.put_object) + monkeypatch.setattr(CosS3Client, "get_object", MockTencentCosClass.get_object) + monkeypatch.setattr(CosS3Client, "object_exists", MockTencentCosClass.object_exists) + monkeypatch.setattr(CosS3Client, "delete_object", MockTencentCosClass.delete_object) + + yield + + if MOCK: + monkeypatch.undo() diff --git a/api/tests/unit_tests/oss/__mock/volcengine_tos.py b/api/tests/unit_tests/oss/__mock/volcengine_tos.py index 241764c521..1194a03258 100644 --- a/api/tests/unit_tests/oss/__mock/volcengine_tos.py +++ b/api/tests/unit_tests/oss/__mock/volcengine_tos.py @@ -1,5 +1,4 @@ import os -from typing import Union from unittest.mock import MagicMock import pytest @@ -7,28 +6,19 @@ from _pytest.monkeypatch import MonkeyPatch from tos import TosClientV2 from tos.clientv2 import DeleteObjectOutput, GetObjectOutput, HeadObjectOutput, PutObjectOutput +from tests.unit_tests.oss.__mock.base import ( + get_example_bucket, + get_example_data, + get_example_filename, + get_example_filepath, +) + class AttrDict(dict): def __getattr__(self, item): return self.get(item) -def get_example_bucket() -> str: - return "dify" - - -def get_example_filename() -> str: - return "test.txt" - - -def get_example_data() -> bytes: - return b"test" - - -def get_example_filepath() -> str: - return "/test" - - class MockVolcengineTosClass: def __init__(self, ak="", sk="", endpoint="", region=""): self.bucket_name = get_example_bucket() diff --git a/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/__init__.py b/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py b/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py new file mode 100644 index 0000000000..65d31352bd --- /dev/null +++ b/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py @@ -0,0 +1,22 @@ +from unittest.mock import MagicMock, patch + +import pytest +from oss2 import Auth + +from extensions.storage.aliyun_oss_storage import AliyunOssStorage +from tests.unit_tests.oss.__mock.aliyun_oss import setup_aliyun_oss_mock +from tests.unit_tests.oss.__mock.base import ( + BaseStorageTest, + get_example_bucket, + get_example_folder, +) + + +class TestAliyunOss(BaseStorageTest): + @pytest.fixture(autouse=True) + def setup_method(self, setup_aliyun_oss_mock): + """Executed before each test method.""" + with patch.object(Auth, "__init__", return_value=None): + self.storage = AliyunOssStorage() + self.storage.bucket_name = get_example_bucket() + self.storage.folder = get_example_folder() diff --git a/api/tests/unit_tests/oss/local/test_local_fs.py b/api/tests/unit_tests/oss/local/test_local_fs.py new file mode 100644 index 0000000000..03ce7d2450 --- /dev/null +++ b/api/tests/unit_tests/oss/local/test_local_fs.py @@ -0,0 +1,18 @@ +from collections.abc import Generator + +import pytest + +from extensions.storage.local_fs_storage import LocalFsStorage +from tests.unit_tests.oss.__mock.base import ( + BaseStorageTest, + get_example_folder, +) +from tests.unit_tests.oss.__mock.local import setup_local_fs_mock + + +class TestLocalFS(BaseStorageTest): + @pytest.fixture(autouse=True) + def setup_method(self, setup_local_fs_mock): + """Executed before each test method.""" + self.storage = LocalFsStorage() + self.storage.folder = get_example_folder() diff --git a/api/tests/unit_tests/oss/tencent_cos/__init__.py b/api/tests/unit_tests/oss/tencent_cos/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py b/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py new file mode 100644 index 0000000000..303f0493bd --- /dev/null +++ b/api/tests/unit_tests/oss/tencent_cos/test_tencent_cos.py @@ -0,0 +1,20 @@ +from unittest.mock import patch + +import pytest +from qcloud_cos import CosConfig + +from extensions.storage.tencent_cos_storage import TencentCosStorage +from tests.unit_tests.oss.__mock.base import ( + BaseStorageTest, + get_example_bucket, +) +from tests.unit_tests.oss.__mock.tencent_cos import setup_tencent_cos_mock + + +class TestTencentCos(BaseStorageTest): + @pytest.fixture(autouse=True) + def setup_method(self, setup_tencent_cos_mock): + """Executed before each test method.""" + with patch.object(CosConfig, "__init__", return_value=None): + self.storage = TencentCosStorage() + self.storage.bucket_name = get_example_bucket() diff --git a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py index 545d18044d..5afbc9e8b4 100644 --- a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py +++ b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py @@ -1,30 +1,18 @@ -from collections.abc import Generator - -from flask import Flask +import pytest from tos import TosClientV2 -from tos.clientv2 import GetObjectOutput, HeadObjectOutput, PutObjectOutput from extensions.storage.volcengine_tos_storage import VolcengineTosStorage -from tests.unit_tests.oss.__mock.volcengine_tos import ( +from tests.unit_tests.oss.__mock.base import ( + BaseStorageTest, get_example_bucket, - get_example_data, - get_example_filename, - get_example_filepath, - setup_volcengine_tos_mock, ) +from tests.unit_tests.oss.__mock.volcengine_tos import setup_volcengine_tos_mock -class VolcengineTosTest: - _instance = None - - def __new__(cls): - if cls._instance == None: - cls._instance = object.__new__(cls) - return cls._instance - else: - return cls._instance - - def __init__(self): +class TestVolcengineTos(BaseStorageTest): + @pytest.fixture(autouse=True) + def setup_method(self, setup_volcengine_tos_mock): + """Executed before each test method.""" self.storage = VolcengineTosStorage() self.storage.bucket_name = get_example_bucket() self.storage.client = TosClientV2( @@ -33,35 +21,3 @@ class VolcengineTosTest: endpoint="https://xxx.volces.com", region="cn-beijing", ) - - -def test_save(setup_volcengine_tos_mock): - volc_tos = VolcengineTosTest() - volc_tos.storage.save(get_example_filename(), get_example_data()) - - -def test_load_once(setup_volcengine_tos_mock): - volc_tos = VolcengineTosTest() - assert volc_tos.storage.load_once(get_example_filename()) == get_example_data() - - -def test_load_stream(setup_volcengine_tos_mock): - volc_tos = VolcengineTosTest() - generator = volc_tos.storage.load_stream(get_example_filename()) - assert isinstance(generator, Generator) - assert next(generator) == get_example_data() - - -def test_download(setup_volcengine_tos_mock): - volc_tos = VolcengineTosTest() - volc_tos.storage.download(get_example_filename(), get_example_filepath()) - - -def test_exists(setup_volcengine_tos_mock): - volc_tos = VolcengineTosTest() - assert volc_tos.storage.exists(get_example_filename()) - - -def test_delete(setup_volcengine_tos_mock): - volc_tos = VolcengineTosTest() - volc_tos.storage.delete(get_example_filename()) diff --git a/api/tests/unit_tests/services/app_dsl_service/test_app_dsl_service.py b/api/tests/unit_tests/services/app_dsl_service/test_app_dsl_service.py new file mode 100644 index 0000000000..842e8268d1 --- /dev/null +++ b/api/tests/unit_tests/services/app_dsl_service/test_app_dsl_service.py @@ -0,0 +1,47 @@ +import pytest +from packaging import version + +from services.app_dsl_service import AppDslService +from services.app_dsl_service.exc import DSLVersionNotSupportedError +from services.app_dsl_service.service import _check_or_fix_dsl, current_dsl_version + + +class TestAppDSLService: + @pytest.mark.skip(reason="Test skipped") + def test_check_or_fix_dsl_missing_version(self): + import_data = {} + result = _check_or_fix_dsl(import_data) + assert result["version"] == "0.1.0" + assert result["kind"] == "app" + + @pytest.mark.skip(reason="Test skipped") + def test_check_or_fix_dsl_missing_kind(self): + import_data = {"version": "0.1.0"} + result = _check_or_fix_dsl(import_data) + assert result["kind"] == "app" + + @pytest.mark.skip(reason="Test skipped") + def test_check_or_fix_dsl_older_version(self): + import_data = {"version": "0.0.9", "kind": "app"} + result = _check_or_fix_dsl(import_data) + assert result["version"] == "0.0.9" + + @pytest.mark.skip(reason="Test skipped") + def test_check_or_fix_dsl_current_version(self): + import_data = {"version": current_dsl_version, "kind": "app"} + result = _check_or_fix_dsl(import_data) + assert result["version"] == current_dsl_version + + @pytest.mark.skip(reason="Test skipped") + def test_check_or_fix_dsl_newer_version(self): + current_version = version.parse(current_dsl_version) + newer_version = f"{current_version.major}.{current_version.minor + 1}.0" + import_data = {"version": newer_version, "kind": "app"} + with pytest.raises(DSLVersionNotSupportedError): + _check_or_fix_dsl(import_data) + + @pytest.mark.skip(reason="Test skipped") + def test_check_or_fix_dsl_invalid_kind(self): + import_data = {"version": current_dsl_version, "kind": "invalid"} + result = _check_or_fix_dsl(import_data) + assert result["kind"] == "app" diff --git a/dev/pytest/pytest_vdb.sh b/dev/pytest/pytest_vdb.sh index d6797ed28e..02a9f49279 100755 --- a/dev/pytest/pytest_vdb.sh +++ b/dev/pytest/pytest_vdb.sh @@ -10,4 +10,7 @@ pytest api/tests/integration_tests/vdb/chroma \ api/tests/integration_tests/vdb/elasticsearch \ api/tests/integration_tests/vdb/vikingdb \ api/tests/integration_tests/vdb/baidu \ - api/tests/integration_tests/vdb/tcvectordb + api/tests/integration_tests/vdb/tcvectordb \ + api/tests/integration_tests/vdb/upstash \ + api/tests/integration_tests/vdb/couchbase \ + api/tests/integration_tests/vdb/oceanbase \ diff --git a/docker-legacy/docker-compose.yaml b/docker-legacy/docker-compose.yaml index 3eb0de708f..e3f1c3b761 100644 --- a/docker-legacy/docker-compose.yaml +++ b/docker-legacy/docker-compose.yaml @@ -2,7 +2,7 @@ version: '3' services: # API service api: - image: langgenius/dify-api:0.10.0 + image: langgenius/dify-api:0.10.2 restart: always environment: # Startup mode, 'api' starts the API server. @@ -227,7 +227,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.10.0 + image: langgenius/dify-api:0.10.2 restart: always environment: CONSOLE_WEB_URL: '' @@ -396,7 +396,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.10.0 + image: langgenius/dify-web:0.10.2 restart: always environment: # The base URL of console application api server, refers to the Console base URL of WEB service if console domain is diff --git a/docker/.env.example b/docker/.env.example index f022a451cf..34b2136302 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -48,6 +48,12 @@ FILES_URL= # The log level for the application. # Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` LOG_LEVEL=INFO +# Log file path +LOG_FILE= +# Log file max size, the unit is MB +LOG_FILE_MAX_SIZE=20 +# Log file max backup count +LOG_FILE_BACKUP_COUNT=5 # Debug mode, default is false. # It is recommended to turn on this configuration for local development @@ -216,6 +222,7 @@ REDIS_PORT=6379 REDIS_USERNAME= REDIS_PASSWORD=difyai123456 REDIS_USE_SSL=false +REDIS_DB=0 # Whether to use Redis Sentinel mode. # If set to true, the application will automatically discover and connect to the master node through Sentinel. @@ -267,6 +274,7 @@ CONSOLE_CORS_ALLOW_ORIGINS=* # Supported values are `local` , `s3` , `azure-blob` , `google-storage`, `tencent-cos`, `huawei-obs`, `volcengine-tos`, `baidu-obs`, `supabase` # Default: `local` STORAGE_TYPE=local +STORAGE_LOCAL_PATH=storage # S3 Configuration # Whether to use AWS managed IAM roles for authenticating with the S3 service. @@ -367,7 +375,7 @@ SUPABASE_URL=your-server-url # ------------------------------ # The type of vector store to use. -# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `tidb_vector`, `oracle`, `tencent`, `elasticsearch`, `analyticdb`, `vikingdb`. +# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `tidb_vector`, `oracle`, `tencent`, `elasticsearch`, `analyticdb`, `couchbase`, `vikingdb`. VECTOR_STORE=weaviate # The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`. @@ -406,6 +414,14 @@ MYSCALE_PASSWORD= MYSCALE_DATABASE=dify MYSCALE_FTS_PARAMS= +# Couchbase configurations, only available when VECTOR_STORE is `couchbase` +# The connection string must include hostname defined in the docker-compose file (couchbase-server in this case) +COUCHBASE_CONNECTION_STRING=couchbase://couchbase-server +COUCHBASE_USER=Administrator +COUCHBASE_PASSWORD=password +COUCHBASE_BUCKET_NAME=Embeddings +COUCHBASE_SCOPE_NAME=_default + # pgvector configurations, only available when VECTOR_STORE is `pgvector` PGVECTOR_HOST=pgvector PGVECTOR_PORT=5432 @@ -439,6 +455,20 @@ TIDB_VECTOR_USER=xxx.root TIDB_VECTOR_PASSWORD=xxxxxx TIDB_VECTOR_DATABASE=dify +# Tidb on qdrant configuration, only available when VECTOR_STORE is `tidb_on_qdrant` +TIDB_ON_QDRANT_URL=http://127.0.0.1 +TIDB_ON_QDRANT_API_KEY=dify +TIDB_ON_QDRANT_CLIENT_TIMEOUT=20 +TIDB_ON_QDRANT_GRPC_ENABLED=false +TIDB_ON_QDRANT_GRPC_PORT=6334 +TIDB_PUBLIC_KEY=dify +TIDB_PRIVATE_KEY=dify +TIDB_API_URL=http://127.0.0.1 +TIDB_IAM_API_URL=http://127.0.0.1 +TIDB_REGION=regions/aws-us-east-1 +TIDB_PROJECT_ID=dify +TIDB_SPEND_LIMIT=100 + # Chroma configuration, only available when VECTOR_STORE is `chroma` CHROMA_HOST=127.0.0.1 CHROMA_PORT=8000 @@ -501,6 +531,14 @@ VIKINGDB_SCHEMA=http VIKINGDB_CONNECTION_TIMEOUT=30 VIKINGDB_SOCKET_TIMEOUT=30 +# OceanBase Vector configuration, only available when VECTOR_STORE is `oceanbase` +OCEANBASE_VECTOR_HOST=oceanbase-vector +OCEANBASE_VECTOR_PORT=2881 +OCEANBASE_VECTOR_USER=root@test +OCEANBASE_VECTOR_PASSWORD= +OCEANBASE_VECTOR_DATABASE=test +OCEANBASE_MEMORY_LIMIT=6G + # ------------------------------ # Knowledge Configuration # ------------------------------ @@ -520,6 +558,22 @@ ETL_TYPE=dify # For example: http://unstructured:8000/general/v0/general UNSTRUCTURED_API_URL= +# ------------------------------ +# Model Configuration +# ------------------------------ + +# The maximum number of tokens allowed for prompt generation. +# This setting controls the upper limit of tokens that can be used by the LLM +# when generating a prompt in the prompt generation tool. +# Default: 512 tokens. +PROMPT_GENERATION_MAX_TOKENS=512 + +# The maximum number of tokens allowed for code generation. +# This setting controls the upper limit of tokens that can be used by the LLM +# when generating code in the code generation tool. +# Default: 1024 tokens. +CODE_GENERATION_MAX_TOKENS=1024 + # ------------------------------ # Multi-modal Configuration # ------------------------------ @@ -534,6 +588,12 @@ MULTIMODAL_SEND_IMAGE_FORMAT=base64 # Upload image file size limit, default 10M. UPLOAD_IMAGE_FILE_SIZE_LIMIT=10 +# Upload video file size limit, default 100M. +UPLOAD_VIDEO_FILE_SIZE_LIMIT=100 + +# Upload audio file size limit, default 50M. +UPLOAD_AUDIO_FILE_SIZE_LIMIT=50 + # ------------------------------ # Sentry Configuration # Used for application monitoring and error log tracking. @@ -585,6 +645,7 @@ MAIL_DEFAULT_SEND_FROM= # API-Key for the Resend email provider, used when MAIL_TYPE is `resend`. RESEND_API_KEY=your-resend-api-key +RESEND_API_URL=https://api.resend.com # SMTP server configuration, used when MAIL_TYPE is `smtp` SMTP_SERVER= @@ -624,6 +685,7 @@ CODE_MAX_NUMBER_ARRAY_LENGTH=1000 WORKFLOW_MAX_EXECUTION_STEPS=500 WORKFLOW_MAX_EXECUTION_TIME=1200 WORKFLOW_CALL_MAX_DEPTH=5 +MAX_VARIABLE_SIZE=204800 # HTTP request node in workflow configuration HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 diff --git a/docker/couchbase-server/Dockerfile b/docker/couchbase-server/Dockerfile new file mode 100644 index 0000000000..bd8af64150 --- /dev/null +++ b/docker/couchbase-server/Dockerfile @@ -0,0 +1,4 @@ +FROM couchbase/server:latest AS stage_base +# FROM couchbase:latest AS stage_base +COPY init-cbserver.sh /opt/couchbase/init/ +RUN chmod +x /opt/couchbase/init/init-cbserver.sh \ No newline at end of file diff --git a/docker/couchbase-server/init-cbserver.sh b/docker/couchbase-server/init-cbserver.sh new file mode 100755 index 0000000000..e66bc18530 --- /dev/null +++ b/docker/couchbase-server/init-cbserver.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# used to start couchbase server - can't get around this as docker compose only allows you to start one command - so we have to start couchbase like the standard couchbase Dockerfile would +# https://github.com/couchbase/docker/blob/master/enterprise/couchbase-server/7.2.0/Dockerfile#L88 + +/entrypoint.sh couchbase-server & + +# track if setup is complete so we don't try to setup again +FILE=/opt/couchbase/init/setupComplete.txt + +if ! [ -f "$FILE" ]; then + # used to automatically create the cluster based on environment variables + # https://docs.couchbase.com/server/current/cli/cbcli/couchbase-cli-cluster-init.html + + echo $COUCHBASE_ADMINISTRATOR_USERNAME ":" $COUCHBASE_ADMINISTRATOR_PASSWORD + + sleep 20s + /opt/couchbase/bin/couchbase-cli cluster-init -c 127.0.0.1 \ + --cluster-username $COUCHBASE_ADMINISTRATOR_USERNAME \ + --cluster-password $COUCHBASE_ADMINISTRATOR_PASSWORD \ + --services data,index,query,fts \ + --cluster-ramsize $COUCHBASE_RAM_SIZE \ + --cluster-index-ramsize $COUCHBASE_INDEX_RAM_SIZE \ + --cluster-eventing-ramsize $COUCHBASE_EVENTING_RAM_SIZE \ + --cluster-fts-ramsize $COUCHBASE_FTS_RAM_SIZE \ + --index-storage-setting default + + sleep 2s + + # used to auto create the bucket based on environment variables + # https://docs.couchbase.com/server/current/cli/cbcli/couchbase-cli-bucket-create.html + + /opt/couchbase/bin/couchbase-cli bucket-create -c localhost:8091 \ + --username $COUCHBASE_ADMINISTRATOR_USERNAME \ + --password $COUCHBASE_ADMINISTRATOR_PASSWORD \ + --bucket $COUCHBASE_BUCKET \ + --bucket-ramsize $COUCHBASE_BUCKET_RAMSIZE \ + --bucket-type couchbase + + # create file so we know that the cluster is setup and don't run the setup again + touch $FILE +fi + # docker compose will stop the container from running unless we do this + # known issue and workaround + tail -f /dev/null diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index 0c9edd2b55..2eea273e72 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -16,7 +16,7 @@ services: -c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}' -c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}' volumes: - - ./volumes/db/data:/var/lib/postgresql/data + - ${PGDATA_HOST_VOLUME:-./volumes/db/data}:/var/lib/postgresql/data ports: - "${EXPOSE_POSTGRES_PORT:-5432}:5432" healthcheck: @@ -31,7 +31,7 @@ services: restart: always volumes: # Mount the redis data directory to the container. - - ./volumes/redis/data:/data + - ${REDIS_HOST_VOLUME:-./volumes/redis/data}:/data # Set the redis password when startup redis server. command: redis-server --requirepass difyai123456 ports: @@ -56,6 +56,7 @@ services: SANDBOX_PORT: ${SANDBOX_PORT:-8194} volumes: - ./volumes/sandbox/dependencies:/dependencies + - ./volumes/sandbox/conf:/conf healthcheck: test: [ "CMD", "curl", "-f", "http://localhost:8194/health" ] networks: @@ -94,7 +95,7 @@ services: restart: always volumes: # Mount the Weaviate data directory to the container. - - ./volumes/weaviate:/var/lib/weaviate + - ${WEAVIATE_HOST_VOLUME:-./volumes/weaviate}:/var/lib/weaviate env_file: - ./middleware.env environment: diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 614b1ccc6b..112e9a2702 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -1,6 +1,8 @@ x-shared-env: &shared-api-worker-env LOG_LEVEL: ${LOG_LEVEL:-INFO} LOG_FILE: ${LOG_FILE:-} + LOG_FILE_MAX_SIZE: ${LOG_FILE_MAX_SIZE:-20} + LOG_FILE_BACKUP_COUNT: ${LOG_FILE_BACKUP_COUNT:-5} DEBUG: ${DEBUG:-false} FLASK_DEBUG: ${FLASK_DEBUG:-false} SECRET_KEY: ${SECRET_KEY:-sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U} @@ -41,14 +43,14 @@ x-shared-env: &shared-api-worker-env REDIS_USERNAME: ${REDIS_USERNAME:-} REDIS_PASSWORD: ${REDIS_PASSWORD:-difyai123456} REDIS_USE_SSL: ${REDIS_USE_SSL:-false} - REDIS_DB: 0 + REDIS_DB: ${REDIS_DB:-0} REDIS_USE_SENTINEL: ${REDIS_USE_SENTINEL:-false} REDIS_SENTINELS: ${REDIS_SENTINELS:-} REDIS_SENTINEL_SERVICE_NAME: ${REDIS_SENTINEL_SERVICE_NAME:-} REDIS_SENTINEL_USERNAME: ${REDIS_SENTINEL_USERNAME:-} REDIS_SENTINEL_PASSWORD: ${REDIS_SENTINEL_PASSWORD:-} - ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60} REDIS_SENTINEL_SOCKET_TIMEOUT: ${REDIS_SENTINEL_SOCKET_TIMEOUT:-0.1} + ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60} CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1} BROKER_USE_SSL: ${BROKER_USE_SSL:-false} CELERY_USE_SENTINEL: ${CELERY_USE_SENTINEL:-false} @@ -57,7 +59,7 @@ x-shared-env: &shared-api-worker-env WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*} CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*} STORAGE_TYPE: ${STORAGE_TYPE:-local} - STORAGE_LOCAL_PATH: storage + STORAGE_LOCAL_PATH: ${STORAGE_LOCAL_PATH:-storage} S3_USE_AWS_MANAGED_IAM: ${S3_USE_AWS_MANAGED_IAM:-false} S3_ENDPOINT: ${S3_ENDPOINT:-} S3_BUCKET_NAME: ${S3_BUCKET_NAME:-} @@ -108,6 +110,11 @@ x-shared-env: &shared-api-worker-env QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20} QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false} QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334} + COUCHBASE_CONNECTION_STRING: ${COUCHBASE_CONNECTION_STRING:-'couchbase-server'} + COUCHBASE_USER: ${COUCHBASE_USER:-Administrator} + COUCHBASE_PASSWORD: ${COUCHBASE_PASSWORD:-password} + COUCHBASE_BUCKET_NAME: ${COUCHBASE_BUCKET_NAME:-Embeddings} + COUCHBASE_SCOPE_NAME: ${COUCHBASE_SCOPE_NAME:-_default} MILVUS_URI: ${MILVUS_URI:-http://127.0.0.1:19530} MILVUS_TOKEN: ${MILVUS_TOKEN:-} MILVUS_USER: ${MILVUS_USER:-root} @@ -133,6 +140,18 @@ x-shared-env: &shared-api-worker-env TIDB_VECTOR_USER: ${TIDB_VECTOR_USER:-} TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD:-} TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE:-dify} + TIDB_ON_QDRANT_URL: ${TIDB_ON_QDRANT_URL:-http://127.0.0.1} + TIDB_ON_QDRANT_API_KEY: ${TIDB_ON_QDRANT_API_KEY:-dify} + TIDB_ON_QDRANT_CLIENT_TIMEOUT: ${TIDB_ON_QDRANT_CLIENT_TIMEOUT:-20} + TIDB_ON_QDRANT_GRPC_ENABLED: ${TIDB_ON_QDRANT_GRPC_ENABLED:-false} + TIDB_ON_QDRANT_GRPC_PORT: ${TIDB_ON_QDRANT_GRPC_PORT:-6334} + TIDB_PUBLIC_KEY: ${TIDB_PUBLIC_KEY:-dify} + TIDB_PRIVATE_KEY: ${TIDB_PRIVATE_KEY:-dify} + TIDB_API_URL: ${TIDB_API_URL:-http://127.0.0.1} + TIDB_IAM_API_URL: ${TIDB_IAM_API_URL:-http://127.0.0.1} + TIDB_REGION: ${TIDB_REGION:-regions/aws-us-east-1} + TIDB_PROJECT_ID: ${TIDB_PROJECT_ID:-dify} + TIDB_SPEND_LIMIT: ${TIDB_SPEND_LIMIT:-100} ORACLE_HOST: ${ORACLE_HOST:-oracle} ORACLE_PORT: ${ORACLE_PORT:-1521} ORACLE_USER: ${ORACLE_USER:-dify} @@ -182,12 +201,18 @@ x-shared-env: &shared-api-worker-env VIKINGDB_REGION: ${VIKINGDB_REGION:-cn-shanghai} VIKINGDB_HOST: ${VIKINGDB_HOST:-api-vikingdb.xxx.volces.com} VIKINGDB_SCHEMA: ${VIKINGDB_SCHEMA:-http} + UPSTASH_VECTOR_URL: ${UPSTASH_VECTOR_URL:-https://xxx-vector.upstash.io} + UPSTASH_VECTOR_TOKEN: ${UPSTASH_VECTOR_TOKEN:-dify} UPLOAD_FILE_SIZE_LIMIT: ${UPLOAD_FILE_SIZE_LIMIT:-15} UPLOAD_FILE_BATCH_LIMIT: ${UPLOAD_FILE_BATCH_LIMIT:-5} ETL_TYPE: ${ETL_TYPE:-dify} UNSTRUCTURED_API_URL: ${UNSTRUCTURED_API_URL:-} + PROMPT_GENERATION_MAX_TOKENS: ${PROMPT_GENERATION_MAX_TOKENS:-512} + CODE_GENERATION_MAX_TOKENS: ${CODE_GENERATION_MAX_TOKENS:-1024} MULTIMODAL_SEND_IMAGE_FORMAT: ${MULTIMODAL_SEND_IMAGE_FORMAT:-base64} UPLOAD_IMAGE_FILE_SIZE_LIMIT: ${UPLOAD_IMAGE_FILE_SIZE_LIMIT:-10} + UPLOAD_VIDEO_FILE_SIZE_LIMIT: ${UPLOAD_VIDEO_FILE_SIZE_LIMIT:-100} + UPLOAD_AUDIO_FILE_SIZE_LIMIT: ${UPLOAD_AUDIO_FILE_SIZE_LIMIT:-50} SENTRY_DSN: ${API_SENTRY_DSN:-} SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0} @@ -204,7 +229,7 @@ x-shared-env: &shared-api-worker-env SMTP_USE_TLS: ${SMTP_USE_TLS:-true} SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS:-false} RESEND_API_KEY: ${RESEND_API_KEY:-your-resend-api-key} - RESEND_API_URL: https://api.resend.com + RESEND_API_URL: ${RESEND_API_URL:-https://api.resend.com} INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-1000} INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS:-72} RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: ${RESET_PASSWORD_TOKEN_EXPIRY_MINUTES:-5} @@ -227,11 +252,24 @@ x-shared-env: &shared-api-worker-env HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} APP_MAX_EXECUTION_TIME: ${APP_MAX_EXECUTION_TIME:-12000} + POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-} + POSITION_TOOL_INCLUDES: ${POSITION_TOOL_INCLUDES:-} + POSITION_TOOL_EXCLUDES: ${POSITION_TOOL_EXCLUDES:-} + POSITION_PROVIDER_PINS: ${POSITION_PROVIDER_PINS:-} + POSITION_PROVIDER_INCLUDES: ${POSITION_PROVIDER_INCLUDES:-} + POSITION_PROVIDER_EXCLUDES: ${POSITION_PROVIDER_EXCLUDES:-} + MAX_VARIABLE_SIZE: ${MAX_VARIABLE_SIZE:-204800} + OCEANBASE_VECTOR_HOST: ${OCEANBASE_VECTOR_HOST:-http://oceanbase-vector} + OCEANBASE_VECTOR_PORT: ${OCEANBASE_VECTOR_PORT:-2881} + OCEANBASE_VECTOR_USER: ${OCEANBASE_VECTOR_USER:-root@test} + OCEANBASE_VECTOR_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-""} + OCEANBASE_VECTOR_DATABASE: ${OCEANBASE_VECTOR_DATABASE:-test} + OCEANBASE_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} services: # API service api: - image: langgenius/dify-api:0.10.0 + image: langgenius/dify-api:0.10.2 restart: always environment: # Use the shared environment variables. @@ -251,7 +289,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.10.0 + image: langgenius/dify-api:0.10.2 restart: always environment: # Use the shared environment variables. @@ -270,7 +308,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.10.0 + image: langgenius/dify-web:0.10.2 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -464,6 +502,39 @@ services: environment: QDRANT_API_KEY: ${QDRANT_API_KEY:-difyai123456} + # The Couchbase vector store. + couchbase-server: + build: ./couchbase-server + profiles: + - couchbase + restart: always + environment: + - CLUSTER_NAME=dify_search + - COUCHBASE_ADMINISTRATOR_USERNAME=${COUCHBASE_USER:-Administrator} + - COUCHBASE_ADMINISTRATOR_PASSWORD=${COUCHBASE_PASSWORD:-password} + - COUCHBASE_BUCKET=${COUCHBASE_BUCKET_NAME:-Embeddings} + - COUCHBASE_BUCKET_RAMSIZE=512 + - COUCHBASE_RAM_SIZE=2048 + - COUCHBASE_EVENTING_RAM_SIZE=512 + - COUCHBASE_INDEX_RAM_SIZE=512 + - COUCHBASE_FTS_RAM_SIZE=1024 + hostname: couchbase-server + container_name: couchbase-server + working_dir: /opt/couchbase + stdin_open: true + tty: true + entrypoint: [""] + command: sh -c "/opt/couchbase/init/init-cbserver.sh" + volumes: + - ./volumes/couchbase/data:/opt/couchbase/var/lib/couchbase/data + healthcheck: + # ensure bucket was created before proceeding + test: [ "CMD-SHELL", "curl -s -f -u Administrator:password http://localhost:8091/pools/default/buckets | grep -q '\\[{' || exit 1" ] + interval: 10s + retries: 10 + start_period: 30s + timeout: 10s + # The pgvector vector database. pgvector: image: pgvector/pgvector:pg16 @@ -521,6 +592,18 @@ services: CHROMA_SERVER_AUTHN_PROVIDER: ${CHROMA_SERVER_AUTHN_PROVIDER:-chromadb.auth.token_authn.TokenAuthenticationServerProvider} IS_PERSISTENT: ${CHROMA_IS_PERSISTENT:-TRUE} + # OceanBase vector database + oceanbase-vector: + image: quay.io/oceanbase/oceanbase-ce:4.3.3.0-100000142024101215 + profiles: + - oceanbase-vector + restart: always + volumes: + - ./volumes/oceanbase/data:/root/ob + - ./volumes/oceanbase/conf:/root/.obd/cluster + environment: + OB_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} + # Oracle vector database oracle: image: container-registry.oracle.com/database/free:latest diff --git a/docker/middleware.env.example b/docker/middleware.env.example index 04d0fb5ed3..17ac819527 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -8,6 +8,7 @@ POSTGRES_PASSWORD=difyai123456 POSTGRES_DB=dify # postgres data directory PGDATA=/var/lib/postgresql/data/pgdata +PGDATA_HOST_VOLUME=./volumes/db/data # Maximum number of connections to the database # Default is 100 @@ -39,6 +40,11 @@ POSTGRES_MAINTENANCE_WORK_MEM=64MB # Reference: https://www.postgresql.org/docs/current/runtime-config-query.html#GUC-EFFECTIVE-CACHE-SIZE POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB +# ----------------------------- +# Environment Variables for redis Service +REDIS_HOST_VOLUME=./volumes/redis/data +# ----------------------------- + # ------------------------------ # Environment Variables for sandbox Service SANDBOX_API_KEY=dify-sandbox @@ -70,6 +76,7 @@ WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih WEAVIATE_AUTHENTICATION_APIKEY_USERS=hello@dify.ai WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED=true WEAVIATE_AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai +WEAVIATE_HOST_VOLUME=./volumes/weaviate # ------------------------------ # Docker Compose Service Expose Host Port Configurations diff --git a/docker/volumes/sandbox/conf/config.yaml b/docker/volumes/sandbox/conf/config.yaml new file mode 100644 index 0000000000..8c1a1deb54 --- /dev/null +++ b/docker/volumes/sandbox/conf/config.yaml @@ -0,0 +1,14 @@ +app: + port: 8194 + debug: True + key: dify-sandbox +max_workers: 4 +max_requests: 50 +worker_timeout: 5 +python_path: /usr/local/bin/python3 +enable_network: True # please make sure there is no network risk in your environment +allowed_syscalls: # please leave it empty if you have no idea how seccomp works +proxy: + socks5: '' + http: '' + https: '' diff --git a/docker/volumes/sandbox/conf/config.yaml.example b/docker/volumes/sandbox/conf/config.yaml.example new file mode 100644 index 0000000000..f92c19e51a --- /dev/null +++ b/docker/volumes/sandbox/conf/config.yaml.example @@ -0,0 +1,35 @@ +app: + port: 8194 + debug: True + key: dify-sandbox +max_workers: 4 +max_requests: 50 +worker_timeout: 5 +python_path: /usr/local/bin/python3 +python_lib_path: + - /usr/local/lib/python3.10 + - /usr/lib/python3.10 + - /usr/lib/python3 + - /usr/lib/x86_64-linux-gnu + - /etc/ssl/certs/ca-certificates.crt + - /etc/nsswitch.conf + - /etc/hosts + - /etc/resolv.conf + - /run/systemd/resolve/stub-resolv.conf + - /run/resolvconf/resolv.conf + - /etc/localtime + - /usr/share/zoneinfo + - /etc/timezone + # add more paths if needed +python_pip_mirror_url: https://pypi.tuna.tsinghua.edu.cn/simple +nodejs_path: /usr/local/bin/node +enable_network: True +allowed_syscalls: + - 1 + - 2 + - 3 + # add all the syscalls which you require +proxy: + socks5: '' + http: '' + https: '' diff --git a/web/__mocks__/mime.js b/web/__mocks__/mime.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/web/app/(commonLayout)/datasets/DatasetFooter.tsx b/web/app/(commonLayout)/datasets/DatasetFooter.tsx index 6eac815a1a..b87098000f 100644 --- a/web/app/(commonLayout)/datasets/DatasetFooter.tsx +++ b/web/app/(commonLayout)/datasets/DatasetFooter.tsx @@ -9,8 +9,8 @@ const DatasetFooter = () => {

{t('dataset.didYouKnow')}

- {t('dataset.intro1')}{t('dataset.intro2')}{t('dataset.intro3')}
- {t('dataset.intro4')}{t('dataset.intro5')}{t('dataset.intro6')} + {t('dataset.intro1')}{t('dataset.intro2')}{t('dataset.intro3')}
+ {t('dataset.intro4')}{t('dataset.intro5')}{t('dataset.intro6')}

) diff --git a/web/app/(commonLayout)/datasets/Doc.tsx b/web/app/(commonLayout)/datasets/Doc.tsx index a6dd8c23ef..553dca5008 100644 --- a/web/app/(commonLayout)/datasets/Doc.tsx +++ b/web/app/(commonLayout)/datasets/Doc.tsx @@ -1,6 +1,6 @@ 'use client' -import type { FC } from 'react' +import { type FC, useEffect } from 'react' import { useContext } from 'use-context-selector' import TemplateEn from './template/template.en.mdx' import TemplateZh from './template/template.zh.mdx' @@ -14,6 +14,13 @@ const Doc: FC = ({ apiBaseUrl, }) => { const { locale } = useContext(I18n) + + useEffect(() => { + const hash = location.hash + if (hash) + document.querySelector(hash)?.scrollIntoView() + }, []) + return (
{ diff --git a/web/app/(commonLayout)/datasets/template/template.en.mdx b/web/app/(commonLayout)/datasets/template/template.en.mdx index b846f6d9fb..263230d049 100644 --- a/web/app/(commonLayout)/datasets/template/template.en.mdx +++ b/web/app/(commonLayout)/datasets/template/template.en.mdx @@ -20,17 +20,17 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
- This api is based on an existing Knowledge and creates a new document through text based on this Knowledge. + This API is based on an existing knowledge and creates a new document through text based on this knowledge. ### Params @@ -50,7 +50,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from Index mode - high_quality High quality: embedding using embedding model, built as vector database index - - economy Economy: Build using inverted index of Keyword Table Index + - economy Economy: Build using inverted index of keyword table index Processing rules @@ -62,7 +62,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - remove_extra_spaces Replace consecutive spaces, newlines, tabs - remove_urls_emails Delete URL, email address - enabled (bool) Whether to select this rule or not. If no document ID is passed in, it represents the default value. - - segmentation (object) segmentation rules + - segmentation (object) Segmentation rules - separator Custom segment identifier, currently only allows one delimiter to be set. Default is \n - max_tokens Maximum length (token) defaults to 1000 @@ -72,11 +72,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/document/create_by_text' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/document/create-by-text' \ --header 'Authorization: Bearer {api_key}' \ --header 'Content-Type: application/json' \ --data-raw '{ @@ -123,17 +123,17 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
- This api is based on an existing Knowledge and creates a new document through a file based on this Knowledge. + This API is based on an existing knowledge and creates a new document through a file based on this knowledge. ### Params @@ -145,17 +145,17 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Request Body - - original_document_id Source document ID (optional) + - original_document_id Source document ID (optional) - Used to re-upload the document or modify the document cleaning and segmentation configuration. The missing information is copied from the source document - The source document cannot be an archived document - When original_document_id is passed in, the update operation is performed on behalf of the document. process_rule is a fillable item. If not filled in, the segmentation method of the source document will be used by default - When original_document_id is not passed in, the new operation is performed on behalf of the document, and process_rule is required - - indexing_technique Index mode + - indexing_technique Index mode - high_quality High quality: embedding using embedding model, built as vector database index - - economy Economy: Build using inverted index of Keyword Table Index + - economy Economy: Build using inverted index of keyword table index - - process_rule Processing rules + - process_rule Processing rules - mode (string) Cleaning, segmentation mode, automatic / custom - rules (object) Custom rules (in automatic mode, this field is empty) - pre_processing_rules (array[object]) Preprocessing rules @@ -164,7 +164,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - remove_extra_spaces Replace consecutive spaces, newlines, tabs - remove_urls_emails Delete URL, email address - enabled (bool) Whether to select this rule or not. If no document ID is passed in, it represents the default value. - - segmentation (object) segmentation rules + - segmentation (object) Segmentation rules - separator Custom segment identifier, currently only allows one delimiter to be set. Default is \n - max_tokens Maximum length (token) defaults to 1000 @@ -177,11 +177,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/document/create_by_file' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/document/create-by-file' \ --header 'Authorization: Bearer {api_key}' \ --form 'data="{\"name\":\"Dify\",\"indexing_technique\":\"high_quality\",\"process_rule\":{\"rules\":{\"pre_processing_rules\":[{\"id\":\"remove_extra_spaces\",\"enabled\":true},{\"id\":\"remove_urls_emails\",\"enabled\":true}],\"segmentation\":{\"separator\":\"###\",\"max_tokens\":500}},\"mode\":\"custom\"}}";type=text/plain' \ --form 'file=@"/path/to/file"' @@ -221,12 +221,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -236,18 +236,37 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from Knowledge name + + Knowledge description (optional) + + + Index technique (optional) + - high_quality High quality + - economy Economy + Permission - only_me Only me - all_team_members All team members - partial_members Partial members + + Provider (optional, default: vendor) + - vendor Vendor + - external External knowledge + + + External knowledge API ID (optional) + + + External knowledge ID (optional) + - @@ -287,12 +306,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -308,9 +327,9 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - @@ -350,12 +369,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -387,17 +406,17 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
- This api is based on an existing Knowledge and updates the document through text based on this Knowledge. + This API is based on an existing knowledge and updates the document through text based on this knowledge. ### Params @@ -427,7 +446,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - remove_extra_spaces Replace consecutive spaces, newlines, tabs - remove_urls_emails Delete URL, email address - enabled (bool) Whether to select this rule or not. If no document ID is passed in, it represents the default value. - - segmentation (object) segmentation rules + - segmentation (object) Segmentation rules - separator Custom segment identifier, currently only allows one delimiter to be set. Default is \n - max_tokens Maximum length (token) defaults to 1000 @@ -437,11 +456,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/update_by_text' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/update-by-text' \ --header 'Authorization: Bearer {api_key}' \ --header 'Content-Type: application/json' \ --data-raw '{ @@ -484,17 +503,17 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
- This api is based on an existing Knowledge, and updates documents through files based on this Knowledge + This API is based on an existing knowledge, and updates documents through files based on this knowledge ### Params @@ -524,7 +543,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - remove_extra_spaces Replace consecutive spaces, newlines, tabs - remove_urls_emails Delete URL, email address - enabled (bool) Whether to select this rule or not. If no document ID is passed in, it represents the default value. - - segmentation (object) segmentation rules + - segmentation (object) Segmentation rules - separator Custom segment identifier, currently only allows one delimiter to be set. Default is \n - max_tokens Maximum length (token) defaults to 1000 @@ -534,11 +553,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/update_by_file' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/update-by-file' \ --header 'Authorization: Bearer {api_key}' \ --form 'data="{\"name\":\"Dify\",\"indexing_technique\":\"high_quality\",\"process_rule\":{\"rules\":{\"pre_processing_rules\":[{\"id\":\"remove_extra_spaces\",\"enabled\":true},{\"id\":\"remove_urls_emails\",\"enabled\":true}],\"segmentation\":{\"separator\":\"###\",\"max_tokens\":500}},\"mode\":\"custom\"}}";type=text/plain' \ --form 'file=@"/path/to/file"' @@ -578,12 +597,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -633,12 +652,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -675,12 +694,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -695,13 +714,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Query - Search keywords, currently only search document names(optional) + Search keywords, currently only search document names (optional) - Page number(optional) + Page number (optional) - Number of items returned, default 20, range 1-100(optional) + Number of items returned, default 20, range 1-100 (optional) @@ -750,12 +769,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -773,9 +792,9 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Request Body - - content (text) Text content/question content, required - - answer (text) Answer content, if the mode of the Knowledge is qa mode, pass the value(optional) - - keywords (list) Keywords(optional) + - content (text) Text content / question content, required + - answer (text) Answer content, if the mode of the knowledge is Q&A mode, pass the value (optional) + - keywords (list) Keywords (optional) @@ -836,12 +855,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -859,10 +878,10 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Query - keyword,choosable + Keyword (optional) - Search status,completed + Search status, completed @@ -914,12 +933,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -960,12 +979,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -986,10 +1005,10 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Request Body - - content (text) text content/question content,required - - answer (text) Answer content, not required, passed if the Knowledge is in qa mode - - keywords (list) keyword, not required - - enabled (bool) false/true, not required + - content (text) Text content / question content, required + - answer (text) Answer content, passed if the knowledge is in Q&A mode (optional) + - keywords (list) Keyword (optional) + - enabled (bool) False / true (optional) @@ -1048,41 +1067,41 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
### Path - Dataset ID + Knowledge ID ### Request Body - retrieval keywordc + Query keyword - retrieval keyword(Optional, if not filled, it will be recalled according to the default method) + Retrieval model (optional, if not filled, it will be recalled according to the default method) - search_method (text) Search method: One of the following four keywords is required - keyword_search Keyword search - semantic_search Semantic search - full_text_search Full-text search - hybrid_search Hybrid search - - reranking_enable (bool) Whether to enable reranking, optional, required if the search mode is semantic_search or hybrid_search - - reranking_mode (object) Rerank model configuration, optional, required if reranking is enabled + - reranking_enable (bool) Whether to enable reranking, required if the search mode is semantic_search or hybrid_search (optional) + - reranking_mode (object) Rerank model configuration, required if reranking is enabled - reranking_provider_name (string) Rerank model provider - reranking_model_name (string) Rerank model name - weights (double) Semantic search weight setting in hybrid search mode - - top_k (integer) Number of results to return, optional + - top_k (integer) Number of results to return (optional) - score_threshold_enabled (bool) Whether to enable score threshold - score_threshold (double) Score threshold @@ -1095,26 +1114,26 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/hit_testing' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/retrieve' \ --header 'Authorization: Bearer {api_key}' \ --header 'Content-Type: application/json' \ --data-raw '{ @@ -1193,7 +1212,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
diff --git a/web/app/(commonLayout)/datasets/template/template.zh.mdx b/web/app/(commonLayout)/datasets/template/template.zh.mdx index ece4d3b771..9c25d1e7bb 100644 --- a/web/app/(commonLayout)/datasets/template/template.zh.mdx +++ b/web/app/(commonLayout)/datasets/template/template.zh.mdx @@ -20,13 +20,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -50,7 +50,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from 索引方式 - high_quality 高质量:使用 embedding 模型进行嵌入,构建为向量数据库索引 - - economy 经济:使用 Keyword Table Index 的倒排索引进行构建 + - economy 经济:使用 keyword table index 的倒排索引进行构建 处理规则 @@ -64,7 +64,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - enabled (bool) 是否选中该规则,不传入文档 ID 时代表默认值 - segmentation (object) 分段规则 - separator 自定义分段标识符,目前仅允许设置一个分隔符。默认为 \n - - max_tokens 最大长度 (token) 默认为 1000 + - max_tokens 最大长度(token)默认为 1000 @@ -72,11 +72,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/document/create_by_text' \ + curl --location --request --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/document/create-by-text' \ --header 'Authorization: Bearer {api_key}' \ --header 'Content-Type: application/json' \ --data-raw '{ @@ -123,13 +123,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -145,17 +145,17 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Request Body - - original_document_id 源文档 ID (选填) + - original_document_id 源文档 ID(选填) - 用于重新上传文档或修改文档清洗、分段配置,缺失的信息从源文档复制 - 源文档不可为归档的文档 - 当传入 original_document_id 时,代表文档进行更新操作,process_rule 为可填项目,不填默认使用源文档的分段方式 - 未传入 original_document_id 时,代表文档进行新增操作,process_rule 为必填 - - indexing_technique 索引方式 + - indexing_technique 索引方式 - high_quality 高质量:使用 embedding 模型进行嵌入,构建为向量数据库索引 - - economy 经济:使用 Keyword Table Index 的倒排索引进行构建 + - economy 经济:使用 keyword table index 的倒排索引进行构建 - - process_rule 处理规则 + - process_rule 处理规则 - mode (string) 清洗、分段模式 ,automatic 自动 / custom 自定义 - rules (object) 自定义规则(自动模式下,该字段为空) - pre_processing_rules (array[object]) 预处理规则 @@ -166,7 +166,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - enabled (bool) 是否选中该规则,不传入文档 ID 时代表默认值 - segmentation (object) 分段规则 - separator 自定义分段标识符,目前仅允许设置一个分隔符。默认为 \n - - max_tokens 最大长度 (token) 默认为 1000 + - max_tokens 最大长度(token)默认为 1000 需要上传的文件。 @@ -177,11 +177,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/document/create_by_file' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/document/create-by-file' \ --header 'Authorization: Bearer {api_key}' \ --form 'data="{\"name\":\"Dify\",\"indexing_technique\":\"high_quality\",\"process_rule\":{\"rules\":{\"pre_processing_rules\":[{\"id\":\"remove_extra_spaces\",\"enabled\":true},{\"id\":\"remove_urls_emails\",\"enabled\":true}],\"segmentation\":{\"separator\":\"###\",\"max_tokens\":500}},\"mode\":\"custom\"}}";type=text/plain' \ --form 'file=@"/path/to/file"' @@ -221,7 +221,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
- 知识库名称 + 知识库名称(必填) + + + 知识库描述(选填) + + + 索引模式(选填,建议填写) + - high_quality 高质量 + - economy 经济 - 权限 + 权限(选填,默认 only_me) - only_me 仅自己 - all_team_members 所有团队成员 - partial_members 部分团队成员 + + Provider(选填,默认 vendor) + - vendor 上传文件 + - external 外部知识库 + + + 外部知识库 API_ID(选填) + + + 外部知识库 ID(选填) + - @@ -287,7 +306,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
---- +
---- +
---- +
@@ -412,7 +431,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Request Body - 文档名称 (选填) + 文档名称(选填) 文档内容(选填) @@ -429,7 +448,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - enabled (bool) 是否选中该规则,不传入文档 ID 时代表默认值 - segmentation (object) 分段规则 - separator 自定义分段标识符,目前仅允许设置一个分隔符。默认为 \n - - max_tokens 最大长度 (token) 默认为 1000 + - max_tokens 最大长度(token)默认为 1000 @@ -437,11 +456,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/update_by_text' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/update-by-text' \ --header 'Authorization: Bearer {api_key}' \ --header 'Content-Type: application/json' \ --data-raw '{ @@ -484,13 +503,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -509,7 +528,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Request Body - 文档名称 (选填) + 文档名称(选填) 需要上传的文件 @@ -526,7 +545,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from - enabled (bool) 是否选中该规则,不传入文档 ID 时代表默认值 - segmentation (object) 分段规则 - separator 自定义分段标识符,目前仅允许设置一个分隔符。默认为 \n - - max_tokens 最大长度 (token) 默认为 1000 + - max_tokens 最大长度(token)默认为 1000 @@ -534,11 +553,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/update_by_file' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/update-by-file' \ --header 'Authorization: Bearer {api_key}' \ --form 'data="{\"name\":\"Dify\",\"indexing_technique\":\"high_quality\",\"process_rule\":{\"rules\":{\"pre_processing_rules\":[{\"id\":\"remove_extra_spaces\",\"enabled\":true},{\"id\":\"remove_urls_emails\",\"enabled\":true}],\"segmentation\":{\"separator\":\"###\",\"max_tokens\":500}},\"mode\":\"custom\"}}";type=text/plain' \ --form 'file=@"/path/to/file"' @@ -578,7 +597,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
---- +
---- +
---- +
- content (text) 文本内容/问题内容,必填 - - answer (text) 答案内容,非必填,如果知识库的模式为qa模式则传值 + - answer (text) 答案内容,非必填,如果知识库的模式为 Q&A 模式则传值 - keywords (list) 关键字,非必填 @@ -836,7 +855,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
---- +
---- +
- content (text) 文本内容/问题内容,必填 - - answer (text) 答案内容,非必填,如果知识库的模式为qa模式则传值 + - answer (text) 答案内容,非必填,如果知识库的模式为 Q&A 模式则传值 - keywords (list) 关键字,非必填 - enabled (bool) false/true,非必填 @@ -1049,13 +1068,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
@@ -1069,23 +1088,23 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ### Request Body - 召回关键词 + 检索关键词 - 召回参数(选填,如不填,按照默认方式召回) + 检索参数(选填,如不填,按照默认方式召回) - search_method (text) 检索方法:以下三个关键字之一,必填 - keyword_search 关键字检索 - semantic_search 语义检索 - full_text_search 全文检索 - hybrid_search 混合检索 - - reranking_enable (bool) 是否启用 Reranking,非必填,如果检索模式为semantic_search模式或者hybrid_search则传值 + - reranking_enable (bool) 是否启用 Reranking,非必填,如果检索模式为 semantic_search 模式或者 hybrid_search 则传值 - reranking_mode (object) Rerank模型配置,非必填,如果启用了 reranking 则传值 - reranking_provider_name (string) Rerank 模型提供商 - reranking_model_name (string) Rerank 模型名称 - weights (double) 混合检索模式下语意检索的权重设置 - top_k (integer) 返回结果数量,非必填 - - score_threshold_enabled (bool) 是否开启Score阈值 - - score_threshold (double) Score阈值 + - score_threshold_enabled (bool) 是否开启 score 阈值 + - score_threshold (double) Score 阈值 未启用字段 @@ -1096,26 +1115,26 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ```bash {{ title: 'cURL' }} - curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/hit_testing' \ + curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/retrieve' \ --header 'Authorization: Bearer {api_key}' \ --header 'Content-Type: application/json' \ --data-raw '{ @@ -1195,7 +1214,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ---- +
diff --git a/web/app/components/app/configuration/config/code-generator/get-code-generator-res.tsx b/web/app/components/app/configuration/config/code-generator/get-code-generator-res.tsx new file mode 100644 index 0000000000..b63e3e2693 --- /dev/null +++ b/web/app/components/app/configuration/config/code-generator/get-code-generator-res.tsx @@ -0,0 +1,220 @@ +import type { FC } from 'react' +import React from 'react' +import cn from 'classnames' +import useBoolean from 'ahooks/lib/useBoolean' +import { useTranslation } from 'react-i18next' +import ConfigPrompt from '../../config-prompt' +import { languageMap } from '../../../../workflow/nodes/_base/components/editor/code-editor/index' +import { generateRuleCode } from '@/service/debug' +import type { CodeGenRes } from '@/service/debug' +import { type AppType, type Model, ModelModeType } from '@/types/app' +import Modal from '@/app/components/base/modal' +import Button from '@/app/components/base/button' +import { Generator } from '@/app/components/base/icons/src/vender/other' +import Toast from '@/app/components/base/toast' +import Loading from '@/app/components/base/loading' +import Confirm from '@/app/components/base/confirm' +import type { CodeLanguage } from '@/app/components/workflow/nodes/code/types' +import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks' +import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' +import ModelIcon from '@/app/components/header/account-setting/model-provider-page/model-icon' +import ModelName from '@/app/components/header/account-setting/model-provider-page/model-name' +export type IGetCodeGeneratorResProps = { + mode: AppType + isShow: boolean + codeLanguages: CodeLanguage + onClose: () => void + onFinished: (res: CodeGenRes) => void +} + +export const GetCodeGeneratorResModal: FC = ( + { + mode, + isShow, + codeLanguages, + onClose, + onFinished, + }, +) => { + const { + currentProvider, + currentModel, + } = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.textGeneration) + const { t } = useTranslation() + const [instruction, setInstruction] = React.useState('') + const [isLoading, { setTrue: setLoadingTrue, setFalse: setLoadingFalse }] = useBoolean(false) + const [res, setRes] = React.useState(null) + const isValid = () => { + if (instruction.trim() === '') { + Toast.notify({ + type: 'error', + message: t('common.errorMsg.fieldRequired', { + field: t('appDebug.code.instruction'), + }), + }) + return false + } + return true + } + const model: Model = { + provider: currentProvider?.provider || '', + name: currentModel?.model || '', + mode: ModelModeType.chat, + // This is a fixed parameter + completion_params: { + temperature: 0.7, + max_tokens: 0, + top_p: 0, + echo: false, + stop: [], + presence_penalty: 0, + frequency_penalty: 0, + }, + } + const isInLLMNode = true + const onGenerate = async () => { + if (!isValid()) + return + if (isLoading) + return + setLoadingTrue() + try { + const { error, ...res } = await generateRuleCode({ + instruction, + model_config: model, + no_variable: !!isInLLMNode, + code_language: languageMap[codeLanguages] || 'javascript', + }) + setRes(res) + if (error) { + Toast.notify({ + type: 'error', + message: error, + }) + } + } + finally { + setLoadingFalse() + } + } + const [showConfirmOverwrite, setShowConfirmOverwrite] = React.useState(false) + + const renderLoading = ( +
+ +
{t('appDebug.codegen.loading')}
+
+ ) + + return ( + +
+
+
+
{t('appDebug.codegen.title')}
+
{t('appDebug.codegen.description')}
+
+
+ + +
+
+
+
{t('appDebug.codegen.instruction')}
+ + // + // + const { onSend } = useChatContext() + + const getFormValues = (children: any) => { + const formValues: { [key: string]: any } = {} + children.forEach((child: any) => { + if (child.tagName === SUPPORTED_TAGS.INPUT) + formValues[child.properties.name] = child.properties.value + if (child.tagName === SUPPORTED_TAGS.TEXTAREA) + formValues[child.properties.name] = child.properties.value + }) + return formValues + } + const onSubmit = (e: any) => { + e.preventDefault() + const format = node.properties.dataFormat || DATA_FORMAT.TEXT + const result = getFormValues(node.children) + if (format === DATA_FORMAT.JSON) { + onSend?.(JSON.stringify(result)) + } + else { + const textResult = Object.entries(result) + .map(([key, value]) => `${key}: ${value}`) + .join('\n') + onSend?.(textResult) + } + } + return ( +
{ + e.preventDefault() + e.stopPropagation() + }} + > + {node.children.filter((i: any) => i.type === 'element').map((child: any, index: number) => { + if (child.tagName === SUPPORTED_TAGS.LABEL) { + return ( + + ) + } + if (child.tagName === SUPPORTED_TAGS.INPUT) { + if (Object.values(SUPPORTED_TYPES).includes(child.properties.type)) { + return ( + { + e.preventDefault() + child.properties.value = e.target.value + }} + /> + ) + } + else { + return

Unsupported input type: {child.properties.type}

+ } + } + if (child.tagName === SUPPORTED_TAGS.TEXTAREA) { + return ( +