Upload 14 files
Browse files- orchestrator.py +1 -1
- rag_pipeline.py +2 -2
- test_enhanced_responses.py +2 -2
- test_optimizer.py +2 -2
orchestrator.py
CHANGED
|
@@ -33,7 +33,7 @@ from tax_optimizer import TaxOptimizer
|
|
| 33 |
RULES_PATH = "rules/rules_all.yaml" # adjust if yours is different
|
| 34 |
PDF_SOURCE = "data" # folder or a single PDF
|
| 35 |
EMBED_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
|
| 36 |
-
GROQ_MODEL = "llama-3.
|
| 37 |
|
| 38 |
# Use /tmp for vector store in Hugging Face Spaces (writable directory)
|
| 39 |
VECTOR_STORE_DIR = os.getenv('VECTOR_STORE_DIR', '/tmp/vector_store')
|
|
|
|
| 33 |
RULES_PATH = "rules/rules_all.yaml" # adjust if yours is different
|
| 34 |
PDF_SOURCE = "data" # folder or a single PDF
|
| 35 |
EMBED_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
|
| 36 |
+
GROQ_MODEL = "llama-3.3-70b-versatile"
|
| 37 |
|
| 38 |
# Use /tmp for vector store in Hugging Face Spaces (writable directory)
|
| 39 |
VECTOR_STORE_DIR = os.getenv('VECTOR_STORE_DIR', '/tmp/vector_store')
|
rag_pipeline.py
CHANGED
|
@@ -344,7 +344,7 @@ class RAGPipeline:
|
|
| 344 |
def __init__(
|
| 345 |
self,
|
| 346 |
doc_store: DocumentStore,
|
| 347 |
-
model: str = "llama-3.
|
| 348 |
temperature: float = 0.1,
|
| 349 |
max_tokens: int = 4096,
|
| 350 |
top_k: int = 8,
|
|
@@ -1106,7 +1106,7 @@ def main():
|
|
| 1106 |
parser.add_argument(
|
| 1107 |
"--model",
|
| 1108 |
type=str,
|
| 1109 |
-
default="llama-3.
|
| 1110 |
help="Groq model name"
|
| 1111 |
)
|
| 1112 |
parser.add_argument(
|
|
|
|
| 344 |
def __init__(
|
| 345 |
self,
|
| 346 |
doc_store: DocumentStore,
|
| 347 |
+
model: str = "llama-3.3-70b-versatile",
|
| 348 |
temperature: float = 0.1,
|
| 349 |
max_tokens: int = 4096,
|
| 350 |
top_k: int = 8,
|
|
|
|
| 1106 |
parser.add_argument(
|
| 1107 |
"--model",
|
| 1108 |
type=str,
|
| 1109 |
+
default="llama-3.3-70b-versatile",
|
| 1110 |
help="Groq model name"
|
| 1111 |
)
|
| 1112 |
parser.add_argument(
|
test_enhanced_responses.py
CHANGED
|
@@ -28,7 +28,7 @@ def test_student_question():
|
|
| 28 |
# Initialize RAG
|
| 29 |
rag = RAGPipeline(
|
| 30 |
doc_store=doc_store,
|
| 31 |
-
model="llama-3.
|
| 32 |
temperature=0.1,
|
| 33 |
top_k=8
|
| 34 |
)
|
|
@@ -74,7 +74,7 @@ def test_multiple_personas():
|
|
| 74 |
|
| 75 |
rag = RAGPipeline(
|
| 76 |
doc_store=doc_store,
|
| 77 |
-
model="llama-3.
|
| 78 |
temperature=0.1,
|
| 79 |
top_k=6
|
| 80 |
)
|
|
|
|
| 28 |
# Initialize RAG
|
| 29 |
rag = RAGPipeline(
|
| 30 |
doc_store=doc_store,
|
| 31 |
+
model="llama-3.3-70b-versatile",
|
| 32 |
temperature=0.1,
|
| 33 |
top_k=8
|
| 34 |
)
|
|
|
|
| 74 |
|
| 75 |
rag = RAGPipeline(
|
| 76 |
doc_store=doc_store,
|
| 77 |
+
model="llama-3.3-70b-versatile",
|
| 78 |
temperature=0.1,
|
| 79 |
top_k=6
|
| 80 |
)
|
test_optimizer.py
CHANGED
|
@@ -200,7 +200,7 @@ def test_with_rag():
|
|
| 200 |
)
|
| 201 |
pdfs = doc_store.discover_pdfs(pdf_source)
|
| 202 |
doc_store.build_vector_store(pdfs, force_rebuild=False)
|
| 203 |
-
rag = RAGPipeline(doc_store=doc_store, model="llama-3.
|
| 204 |
|
| 205 |
# Initialize tax engine
|
| 206 |
catalog = RuleCatalog.from_yaml_files(["rules/rules_all.yaml"])
|
|
@@ -293,7 +293,7 @@ def test_high_earner():
|
|
| 293 |
)
|
| 294 |
pdfs = doc_store.discover_pdfs(pdf_source)
|
| 295 |
doc_store.build_vector_store(pdfs, force_rebuild=False)
|
| 296 |
-
rag = RAGPipeline(doc_store=doc_store, model="llama-3.
|
| 297 |
|
| 298 |
# Initialize tax engine
|
| 299 |
catalog = RuleCatalog.from_yaml_files(["rules/rules_all.yaml"])
|
|
|
|
| 200 |
)
|
| 201 |
pdfs = doc_store.discover_pdfs(pdf_source)
|
| 202 |
doc_store.build_vector_store(pdfs, force_rebuild=False)
|
| 203 |
+
rag = RAGPipeline(doc_store=doc_store, model="llama-3.3-70b-versatile", temperature=0.1)
|
| 204 |
|
| 205 |
# Initialize tax engine
|
| 206 |
catalog = RuleCatalog.from_yaml_files(["rules/rules_all.yaml"])
|
|
|
|
| 293 |
)
|
| 294 |
pdfs = doc_store.discover_pdfs(pdf_source)
|
| 295 |
doc_store.build_vector_store(pdfs, force_rebuild=False)
|
| 296 |
+
rag = RAGPipeline(doc_store=doc_store, model="llama-3.3-70b-versatile", temperature=0.1)
|
| 297 |
|
| 298 |
# Initialize tax engine
|
| 299 |
catalog = RuleCatalog.from_yaml_files(["rules/rules_all.yaml"])
|