prernajeet01 commited on
Commit
22f579b
·
verified ·
1 Parent(s): f3fa727

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -15
app.py CHANGED
@@ -10,7 +10,6 @@ from langchain_community.vectorstores import FAISS
10
  from langchain.chains import RetrievalQA
11
  from langchain_community.chat_models import BedrockChat
12
  from langchain_openai import ChatOpenAI
13
- from langchain_community.llms import Ollama
14
  from langchain.schema import Document
15
  from pathlib import Path
16
  from typing import List, Union
@@ -96,11 +95,6 @@ class AuditAgent:
96
  openai_api_key=api_keys["openai_key"],
97
  temperature=0.2
98
  )
99
- elif provider == "ollama":
100
- try:
101
- self.llm = Ollama(model=model_name)
102
- except Exception as e:
103
- raise ValueError(f"Failed to initialize Ollama model: {str(e)}")
104
  else:
105
  raise ValueError(f"Unsupported provider: {provider}")
106
 
@@ -128,10 +122,8 @@ class AuditAgent:
128
  ]
129
  )
130
  return response.content
131
- else: # Ollama
132
- full_prompt = f"{system_prompt}\n\nUser: {query}\nAssistant:"
133
- response = self.llm.invoke(full_prompt)
134
- return response
135
  except Exception as e:
136
  return f"Error processing query: {str(e)}"
137
 
@@ -287,7 +279,7 @@ class AuditAgent:
287
  except Exception as e:
288
  return f"Error querying documents: {str(e)}"
289
 
290
- # Available LLM configurations
291
  llm_configs = {
292
  "claude-3-sonnet": {
293
  "name": "anthropic.claude-3-sonnet-20240229-v1:0",
@@ -304,10 +296,10 @@ llm_configs = {
304
  "provider": "openai",
305
  "description": "Fast responses"
306
  },
307
- "openorca-mini": {
308
- "name": "openorca-mini",
309
- "provider": "ollama",
310
- "description": "Local lightweight model"
311
  }
312
  }
313
 
 
10
  from langchain.chains import RetrievalQA
11
  from langchain_community.chat_models import BedrockChat
12
  from langchain_openai import ChatOpenAI
 
13
  from langchain.schema import Document
14
  from pathlib import Path
15
  from typing import List, Union
 
95
  openai_api_key=api_keys["openai_key"],
96
  temperature=0.2
97
  )
 
 
 
 
 
98
  else:
99
  raise ValueError(f"Unsupported provider: {provider}")
100
 
 
122
  ]
123
  )
124
  return response.content
125
+ else:
126
+ raise ValueError(f"Unsupported provider: {self.provider}")
 
 
127
  except Exception as e:
128
  return f"Error processing query: {str(e)}"
129
 
 
279
  except Exception as e:
280
  return f"Error querying documents: {str(e)}"
281
 
282
+ # Updated LLM configurations - replaced openorca-mini with o3-mini
283
  llm_configs = {
284
  "claude-3-sonnet": {
285
  "name": "anthropic.claude-3-sonnet-20240229-v1:0",
 
296
  "provider": "openai",
297
  "description": "Fast responses"
298
  },
299
+ "o3-mini": {
300
+ "name": "o3-mini",
301
+ "provider": "openai",
302
+ "description": "Compact OpenAI model"
303
  }
304
  }
305