Spaces:
Runtime error
Runtime error
Damien Benveniste
commited on
Commit
·
275d03f
1
Parent(s):
196de9f
corrected
Browse files- app/__pycache__/callbacks.cpython-312.pyc +0 -0
- app/__pycache__/chains.cpython-312.pyc +0 -0
- app/__pycache__/crud.cpython-312.pyc +0 -0
- app/__pycache__/data_indexing.cpython-312.pyc +0 -0
- app/__pycache__/database.cpython-312.pyc +0 -0
- app/__pycache__/main.cpython-312.pyc +0 -0
- app/__pycache__/models.cpython-312.pyc +0 -0
- app/__pycache__/prompts.cpython-312.pyc +0 -0
- app/__pycache__/schemas.cpython-312.pyc +0 -0
- app/chains.py +8 -9
- app/crud.py +1 -2
- app/data_indexing.py +2 -2
- app/main.py +1 -1
- app/prompts.py +4 -4
app/__pycache__/callbacks.cpython-312.pyc
ADDED
Binary file (1.9 kB). View file
|
|
app/__pycache__/chains.cpython-312.pyc
ADDED
Binary file (2.1 kB). View file
|
|
app/__pycache__/crud.cpython-312.pyc
ADDED
Binary file (1.94 kB). View file
|
|
app/__pycache__/data_indexing.cpython-312.pyc
ADDED
Binary file (7.39 kB). View file
|
|
app/__pycache__/database.cpython-312.pyc
ADDED
Binary file (622 Bytes). View file
|
|
app/__pycache__/main.cpython-312.pyc
ADDED
Binary file (6.69 kB). View file
|
|
app/__pycache__/models.cpython-312.pyc
ADDED
Binary file (1.37 kB). View file
|
|
app/__pycache__/prompts.cpython-312.pyc
ADDED
Binary file (2.46 kB). View file
|
|
app/__pycache__/schemas.cpython-312.pyc
ADDED
Binary file (1.67 kB). View file
|
|
app/chains.py
CHANGED
@@ -6,8 +6,8 @@ from prompts import (
|
|
6 |
raw_prompt,
|
7 |
raw_prompt_formatted,
|
8 |
history_prompt_formatted,
|
9 |
-
|
10 |
-
|
11 |
format_context,
|
12 |
tokenizer
|
13 |
)
|
@@ -16,6 +16,7 @@ from data_indexing import DataIndexer
|
|
16 |
|
17 |
data_indexer = DataIndexer()
|
18 |
|
|
|
19 |
llm = HuggingFaceEndpoint(
|
20 |
repo_id="meta-llama/Meta-Llama-3-8B-Instruct",
|
21 |
huggingfacehub_api_token=os.environ['HF_TOKEN'],
|
@@ -37,15 +38,13 @@ history_chain = (
|
|
37 |
).with_types(input_type=schemas.HistoryInput)
|
38 |
|
39 |
rag_chain = (
|
40 |
-
|
41 |
-
'question': question_prompt_formatted | llm,
|
42 |
-
'hybrid_search': RunnablePassthrough()
|
43 |
-
}
|
44 |
| {
|
45 |
-
'context': lambda x: format_context(data_indexer.search(x['
|
46 |
-
'standalone_question': lambda x: x['
|
|
|
47 |
}
|
48 |
-
|
|
49 |
| llm
|
50 |
).with_types(input_type=schemas.RagInput)
|
51 |
|
|
|
6 |
raw_prompt,
|
7 |
raw_prompt_formatted,
|
8 |
history_prompt_formatted,
|
9 |
+
standalone_prompt_formatted,
|
10 |
+
rag_prompt_formatted,
|
11 |
format_context,
|
12 |
tokenizer
|
13 |
)
|
|
|
16 |
|
17 |
data_indexer = DataIndexer()
|
18 |
|
19 |
+
|
20 |
llm = HuggingFaceEndpoint(
|
21 |
repo_id="meta-llama/Meta-Llama-3-8B-Instruct",
|
22 |
huggingfacehub_api_token=os.environ['HF_TOKEN'],
|
|
|
38 |
).with_types(input_type=schemas.HistoryInput)
|
39 |
|
40 |
rag_chain = (
|
41 |
+
RunnablePassthrough.assign(new_question=standalone_prompt_formatted | llm)
|
|
|
|
|
|
|
42 |
| {
|
43 |
+
'context': lambda x: format_context(data_indexer.search(x['new_question'], hybrid_search=x['hybrid_search'])),
|
44 |
+
'standalone_question': lambda x: x['new_question'],
|
45 |
+
'test': lambda x : print(x)
|
46 |
}
|
47 |
+
| rag_prompt_formatted
|
48 |
| llm
|
49 |
).with_types(input_type=schemas.RagInput)
|
50 |
|
app/crud.py
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
from sqlalchemy.orm import Session
|
2 |
import models, schemas
|
3 |
-
from fastapi import HTTPException
|
4 |
|
5 |
|
6 |
def get_or_create_user(db: Session, username: str):
|
@@ -25,4 +24,4 @@ def get_user_chat_history(db: Session, username: str):
|
|
25 |
user = db.query(models.User).filter(models.User.username == username).first()
|
26 |
if not user:
|
27 |
return []
|
28 |
-
return
|
|
|
1 |
from sqlalchemy.orm import Session
|
2 |
import models, schemas
|
|
|
3 |
|
4 |
|
5 |
def get_or_create_user(db: Session, username: str):
|
|
|
24 |
user = db.query(models.User).filter(models.User.username == username).first()
|
25 |
if not user:
|
26 |
return []
|
27 |
+
return user.messages
|
app/data_indexing.py
CHANGED
@@ -38,8 +38,8 @@ class DataIndexer:
|
|
38 |
)
|
39 |
|
40 |
self.index = self.pinecone_client.Index(self.index_name)
|
41 |
-
self.source_index = self.get_source_index()
|
42 |
-
|
43 |
|
44 |
def get_source_index(self):
|
45 |
if not os.path.isfile(self.source_file):
|
|
|
38 |
)
|
39 |
|
40 |
self.index = self.pinecone_client.Index(self.index_name)
|
41 |
+
# self.source_index = self.get_source_index()
|
42 |
+
self.source_index = None
|
43 |
|
44 |
def get_source_index(self):
|
45 |
if not os.path.isfile(self.source_file):
|
app/main.py
CHANGED
@@ -7,7 +7,7 @@ from langserve.serialization import WellKnownLCSerializer
|
|
7 |
from typing import Any, List
|
8 |
import crud, models, schemas
|
9 |
from database import SessionLocal, engine
|
10 |
-
from chains import
|
11 |
from prompts import format_chat_history
|
12 |
from callbacks import LogResponseCallback
|
13 |
|
|
|
7 |
from typing import Any, List
|
8 |
import crud, models, schemas
|
9 |
from database import SessionLocal, engine
|
10 |
+
from chains import simple_chain, formatted_chain, history_chain, rag_chain
|
11 |
from prompts import format_chat_history
|
12 |
from callbacks import LogResponseCallback
|
13 |
|
app/prompts.py
CHANGED
@@ -17,7 +17,7 @@ Follow Up question: {question}
|
|
17 |
helpful answer:
|
18 |
"""
|
19 |
|
20 |
-
|
21 |
Given the following conversation and a follow up question, rephrase the
|
22 |
follow up question to be a standalone question, in its original language.
|
23 |
|
@@ -29,7 +29,7 @@ Follow Up Input: {question}
|
|
29 |
Standalone question:
|
30 |
"""
|
31 |
|
32 |
-
|
33 |
Answer the question based only on the following context:
|
34 |
{context}
|
35 |
|
@@ -65,6 +65,6 @@ def format_context(docs: List[str]):
|
|
65 |
raw_prompt_formatted = format_prompt(raw_prompt)
|
66 |
raw_prompt = PromptTemplate.from_template(raw_prompt)
|
67 |
history_prompt_formatted = format_prompt(history_prompt)
|
68 |
-
|
69 |
-
|
70 |
|
|
|
17 |
helpful answer:
|
18 |
"""
|
19 |
|
20 |
+
standalone_prompt = """
|
21 |
Given the following conversation and a follow up question, rephrase the
|
22 |
follow up question to be a standalone question, in its original language.
|
23 |
|
|
|
29 |
Standalone question:
|
30 |
"""
|
31 |
|
32 |
+
rag_prompt = """
|
33 |
Answer the question based only on the following context:
|
34 |
{context}
|
35 |
|
|
|
65 |
raw_prompt_formatted = format_prompt(raw_prompt)
|
66 |
raw_prompt = PromptTemplate.from_template(raw_prompt)
|
67 |
history_prompt_formatted = format_prompt(history_prompt)
|
68 |
+
standalone_prompt_formatted = format_prompt(standalone_prompt)
|
69 |
+
rag_prompt_formatted = format_prompt(rag_prompt)
|
70 |
|