vella-backend / setup /easy_imports.py
luanpoppe
feat: adicionando possibilidade de requisição com gemini, adicionando possibilidade de requsição com deepseek, colocando como padrão não utilizar o llama parse, mudar o padrão para realizar poucas requisições do contextual e lidar com as respostas contendo vários chunks de uma vez só
e70ffc1
raw
history blame
918 Bytes
from adrf.views import APIView as AsyncAPIView
from drf_spectacular.utils import extend_schema
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_huggingface import HuggingFaceEmbeddings
# from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain.prompts import PromptTemplate
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.document_loaders import PyPDFLoader
from langchain_community.vectorstores import Chroma
from langchain_google_genai import ChatGoogleGenerativeAI
# from langchain_community.chat_models import ChatOpenAI
from langchain_openai import ChatOpenAI
from langchain.schema import Document
from langchain.chains import create_extraction_chain
from rank_bm25 import BM25Okapi