Starberry15 commited on
Commit
42984b6
Β·
verified Β·
1 Parent(s): 9ee32a9

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +20 -3
src/streamlit_app.py CHANGED
@@ -161,7 +161,7 @@ def search_index(query: str, index, meta, top_k: int, threshold: float) -> List[
161
  return results
162
 
163
  def generate_answer(context: str, query: str) -> str:
164
- """Generate robust answer with explicit citations."""
165
  prompt = f"""
166
  You are a precise academic assistant specialized in university policies.
167
  Use only the provided *USTP Student Handbook 2023 Edition* content as reference.
@@ -181,6 +181,10 @@ If the answer is not explicitly found, respond with:
181
  - Do NOT invent or assume any information not in the handbook.
182
  """
183
 
 
 
 
 
184
  try:
185
  response = hf_client.text_generation(
186
  model=DEFAULT_MODEL,
@@ -190,8 +194,21 @@ If the answer is not explicitly found, respond with:
190
  repetition_penalty=1.1,
191
  )
192
  return response
193
- except Exception as e:
194
- return f"⚠️ Error generating answer: {e}"
 
 
 
 
 
 
 
 
 
 
 
 
 
195
 
196
  # =============================================================
197
  # πŸ” Index Handling
 
161
  return results
162
 
163
  def generate_answer(context: str, query: str) -> str:
164
+ """Generate robust answer with explicit citations β€” auto-switches between endpoints."""
165
  prompt = f"""
166
  You are a precise academic assistant specialized in university policies.
167
  Use only the provided *USTP Student Handbook 2023 Edition* content as reference.
 
181
  - Do NOT invent or assume any information not in the handbook.
182
  """
183
 
184
+ if not hf_client:
185
+ return "❌ Hugging Face client not initialized."
186
+
187
+ # Try standard text-generation first
188
  try:
189
  response = hf_client.text_generation(
190
  model=DEFAULT_MODEL,
 
194
  repetition_penalty=1.1,
195
  )
196
  return response
197
+ except Exception as e1:
198
+ # If it fails, automatically switch to conversational API
199
+ try:
200
+ chat_response = hf_client.chat.completions.create(
201
+ model=DEFAULT_MODEL,
202
+ messages=[
203
+ {"role": "system", "content": "You are a precise and factual handbook assistant."},
204
+ {"role": "user", "content": prompt},
205
+ ],
206
+ max_tokens=400,
207
+ temperature=0.25,
208
+ )
209
+ return chat_response.choices[0].message["content"]
210
+ except Exception as e2:
211
+ return f"⚠️ Error generating answer: {e2}"
212
 
213
  # =============================================================
214
  # πŸ” Index Handling