Spaces:
Sleeping
Sleeping
Rohil Bansal
commited on
Commit
·
dc3ed8e
1
Parent(s):
48676d6
fixed
Browse files
src/__pycache__/index.cpython-312.pyc
CHANGED
Binary files a/src/__pycache__/index.cpython-312.pyc and b/src/__pycache__/index.cpython-312.pyc differ
|
|
src/__pycache__/llm.cpython-312.pyc
CHANGED
Binary files a/src/__pycache__/llm.cpython-312.pyc and b/src/__pycache__/llm.cpython-312.pyc differ
|
|
src/index.py
CHANGED
@@ -38,8 +38,8 @@ try:
|
|
38 |
api_key=api_key,
|
39 |
api_version=api_version,
|
40 |
azure_endpoint=azure_endpoint,
|
41 |
-
|
42 |
-
temperature=0.3
|
43 |
)
|
44 |
print("Azure OpenAI embeddings and model set up successfully.")
|
45 |
except Exception as e:
|
|
|
38 |
api_key=api_key,
|
39 |
api_version=api_version,
|
40 |
azure_endpoint=azure_endpoint,
|
41 |
+
azure_deployment="gpt-4o",
|
42 |
+
temperature=0.3,
|
43 |
)
|
44 |
print("Azure OpenAI embeddings and model set up successfully.")
|
45 |
except Exception as e:
|
src/llm.py
CHANGED
@@ -7,7 +7,7 @@ from typing import Literal
|
|
7 |
|
8 |
from langchain_core.prompts import ChatPromptTemplate
|
9 |
from langchain_core.pydantic_v1 import BaseModel, Field
|
10 |
-
from langchain_openai import AzureChatOpenAI
|
11 |
|
12 |
#%%
|
13 |
# Data model
|
@@ -38,13 +38,6 @@ route_prompt = ChatPromptTemplate.from_messages(
|
|
38 |
|
39 |
#%%
|
40 |
question_router = route_prompt | structured_llm_router
|
41 |
-
print(
|
42 |
-
question_router.invoke(
|
43 |
-
{"question": "Who will the Bears draft first in the NFL draft?"}
|
44 |
-
)
|
45 |
-
)
|
46 |
-
print(question_router.invoke({"question": "What are the types of agent memory?"}))
|
47 |
-
|
48 |
|
49 |
# %%
|
50 |
### Retrieval Grader
|
@@ -90,7 +83,8 @@ from langchain_core.output_parsers import StrOutputParser
|
|
90 |
prompt = hub.pull("rlm/rag-prompt")
|
91 |
|
92 |
# LLM
|
93 |
-
llm = AzureChatOpenAI(model_name="gpt-4o-mini", temperature=0.3)
|
|
|
94 |
|
95 |
|
96 |
# Post-processing
|
|
|
7 |
|
8 |
from langchain_core.prompts import ChatPromptTemplate
|
9 |
from langchain_core.pydantic_v1 import BaseModel, Field
|
10 |
+
# from langchain_openai import AzureChatOpenAI
|
11 |
|
12 |
#%%
|
13 |
# Data model
|
|
|
38 |
|
39 |
#%%
|
40 |
question_router = route_prompt | structured_llm_router
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
|
42 |
# %%
|
43 |
### Retrieval Grader
|
|
|
83 |
prompt = hub.pull("rlm/rag-prompt")
|
84 |
|
85 |
# LLM
|
86 |
+
# llm = AzureChatOpenAI(model_name="gpt-4o-mini", temperature=0.3)
|
87 |
+
|
88 |
|
89 |
|
90 |
# Post-processing
|
vectordb/08d73b15-e800-45c5-a450-5b9d696166f3/length.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4000
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c46bdbd91f9ce31abb4f696e2ccd22cf2238401c3164a6636e356a2b808009fc
|
3 |
size 4000
|