Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -38,11 +38,17 @@ if uploaded_file:
|
|
| 38 |
st.write("Preview of uploaded data:")
|
| 39 |
st.dataframe(data)
|
| 40 |
|
| 41 |
-
#
|
| 42 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".csv", mode="w", encoding="utf-8") as temp_file:
|
| 43 |
temp_file_path = temp_file.name
|
| 44 |
data.to_csv(temp_file.name, index=False, encoding="utf-8")
|
| 45 |
-
temp_file.flush() # Ensure data is written
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 46 |
|
| 47 |
# Tabs for LangChain and LlamaIndex
|
| 48 |
tab1, tab2 = st.tabs(["LangChain", "LlamaIndex"])
|
|
@@ -52,12 +58,13 @@ if uploaded_file:
|
|
| 52 |
st.subheader("LangChain Query")
|
| 53 |
try:
|
| 54 |
# Use CSVLoader with the temporary file path
|
|
|
|
| 55 |
loader = CSVLoader(file_path=temp_file_path)
|
| 56 |
docs = loader.load_and_split()
|
| 57 |
|
| 58 |
-
# Preview
|
|
|
|
| 59 |
if docs:
|
| 60 |
-
st.write("Preview of a document chunk (LangChain):")
|
| 61 |
st.text(docs[0].page_content)
|
| 62 |
|
| 63 |
# Create FAISS VectorStore
|
|
@@ -96,12 +103,13 @@ if uploaded_file:
|
|
| 96 |
st.subheader("LlamaIndex Query")
|
| 97 |
try:
|
| 98 |
# Use PagedCSVReader directly on the uploaded file
|
|
|
|
| 99 |
csv_reader = PagedCSVReader()
|
| 100 |
docs = csv_reader.load_from_file(temp_file_path)
|
| 101 |
|
| 102 |
-
# Preview
|
|
|
|
| 103 |
if docs:
|
| 104 |
-
st.write("Preview of a document chunk (LlamaIndex):")
|
| 105 |
st.text(docs[0].text)
|
| 106 |
|
| 107 |
# Initialize FAISS Vector Store
|
|
|
|
| 38 |
st.write("Preview of uploaded data:")
|
| 39 |
st.dataframe(data)
|
| 40 |
|
| 41 |
+
# Save the uploaded file to a temporary location
|
| 42 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".csv", mode="w", encoding="utf-8") as temp_file:
|
| 43 |
temp_file_path = temp_file.name
|
| 44 |
data.to_csv(temp_file.name, index=False, encoding="utf-8")
|
| 45 |
+
temp_file.flush() # Ensure all data is written to the file
|
| 46 |
+
|
| 47 |
+
# Debugging: Verify the temporary file
|
| 48 |
+
st.write("Temporary file path:", temp_file_path)
|
| 49 |
+
with open(temp_file_path, "r") as f:
|
| 50 |
+
st.write("Temporary file content:")
|
| 51 |
+
st.text(f.read())
|
| 52 |
|
| 53 |
# Tabs for LangChain and LlamaIndex
|
| 54 |
tab1, tab2 = st.tabs(["LangChain", "LlamaIndex"])
|
|
|
|
| 58 |
st.subheader("LangChain Query")
|
| 59 |
try:
|
| 60 |
# Use CSVLoader with the temporary file path
|
| 61 |
+
st.write("Loading file with LangChain CSVLoader...")
|
| 62 |
loader = CSVLoader(file_path=temp_file_path)
|
| 63 |
docs = loader.load_and_split()
|
| 64 |
|
| 65 |
+
# Debugging: Preview loaded documents
|
| 66 |
+
st.write("Successfully loaded documents:")
|
| 67 |
if docs:
|
|
|
|
| 68 |
st.text(docs[0].page_content)
|
| 69 |
|
| 70 |
# Create FAISS VectorStore
|
|
|
|
| 103 |
st.subheader("LlamaIndex Query")
|
| 104 |
try:
|
| 105 |
# Use PagedCSVReader directly on the uploaded file
|
| 106 |
+
st.write("Loading file with LlamaIndex PagedCSVReader...")
|
| 107 |
csv_reader = PagedCSVReader()
|
| 108 |
docs = csv_reader.load_from_file(temp_file_path)
|
| 109 |
|
| 110 |
+
# Debugging: Preview loaded documents
|
| 111 |
+
st.write("Successfully loaded documents:")
|
| 112 |
if docs:
|
|
|
|
| 113 |
st.text(docs[0].text)
|
| 114 |
|
| 115 |
# Initialize FAISS Vector Store
|