Aqs-shispare commited on
Commit
18a03f1
·
1 Parent(s): b357a2f

deploy on HF

Browse files
Dockerfile ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10
2
+
3
+ WORKDIR /app
4
+
5
+ COPY requirements.txt .
6
+ RUN pip install --no-cache-dir -r requirements.txt
7
+
8
+ COPY . .
9
+
10
+ CMD ["uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "7860"]
api/main.py CHANGED
@@ -1,23 +1,11 @@
1
  # main.py
2
  import os
3
- from pathlib import Path
4
  from contextlib import asynccontextmanager
5
  import logging
6
-
7
- # Load .env from backend root first (so OPENROUTER_API_KEY etc. are set regardless of cwd)
8
- _backend_root = Path(__file__).resolve().parent.parent
9
- _env_file = _backend_root / ".env"
10
- if _env_file.exists():
11
- from dotenv import load_dotenv
12
- load_dotenv(_env_file)
13
- logging.getLogger(__name__).info(f"Loaded env from {_env_file}")
14
- else:
15
- logging.getLogger(__name__).warning(f"No .env file at {_env_file}")
16
-
17
- from fastapi import FastAPI
18
  from fastapi.middleware.cors import CORSMiddleware
19
 
20
- from utils.config import get_settings, get_environment
21
  from api.routes import router
22
  from services.langchain_service import LangChainService
23
  from services.memory_service import MemoryService
@@ -30,8 +18,8 @@ logging.basicConfig(
30
  )
31
  logger = logging.getLogger(__name__)
32
 
33
- # Get settings
34
- settings = get_settings()
35
 
36
  # Service instances (will be initialized in lifespan)
37
  langchain_service: LangChainService = None
@@ -82,8 +70,8 @@ async def lifespan(app: FastAPI):
82
 
83
  logger.info("=" * 60)
84
  logger.info("✅ All services initialized successfully")
85
- logger.info(f"📡 Server running on http://{settings.host}:{settings.port}")
86
- logger.info(f"📚 API docs available at http://{settings.host}:{settings.port}/docs")
87
  logger.info("=" * 60)
88
 
89
  yield
@@ -109,15 +97,13 @@ app = FastAPI(
109
  app.add_middleware(
110
  CORSMiddleware,
111
  allow_origins=[
112
- "http://localhost:8000",
113
- "http://127.0.0.1:8000",
114
  "vscode-webview://*",
115
- "*" # Allow all origins for development (restrict in production)
116
  ],
117
  allow_credentials=True,
118
  allow_methods=["*"], # Allow all HTTP methods (GET, POST, PUT, DELETE, etc.)
119
  allow_headers=["*"], # Allow all headers including Authorization
120
- expose_headers=["*"], # Expose all response headers
121
  )
122
 
123
  # Log CORS configuration
@@ -126,6 +112,14 @@ logger.info("✅ CORS middleware configured for VS Code extension compatibility"
126
  # Include routes with /api prefix
127
  app.include_router(router, prefix="/api")
128
 
 
 
 
 
 
 
 
 
129
 
130
  @app.get("/")
131
  async def root():
@@ -156,14 +150,14 @@ if __name__ == "__main__":
156
  import uvicorn
157
 
158
  logger.info("Starting Augmas Backend...")
159
- logger.info(f"Host: {settings.host}")
160
- logger.info(f"Port: {settings.port}")
161
  logger.info(f"Reload: {settings.reload}")
162
 
163
  uvicorn.run(
164
- "api.main:app",
165
- host=settings.host,
166
- port=settings.port,
167
- reload=settings.reload,
168
- log_level=settings.log_level.lower()
169
- )
 
1
  # main.py
2
  import os
 
3
  from contextlib import asynccontextmanager
4
  import logging
5
+ from fastapi import FastAPI, Request
 
 
 
 
 
 
 
 
 
 
 
6
  from fastapi.middleware.cors import CORSMiddleware
7
 
8
+ from utils.config import get_settings_for_environment, get_environment
9
  from api.routes import router
10
  from services.langchain_service import LangChainService
11
  from services.memory_service import MemoryService
 
18
  )
19
  logger = logging.getLogger(__name__)
20
 
21
+ # Get settings based on environment
22
+ settings = get_settings_for_environment()
23
 
24
  # Service instances (will be initialized in lifespan)
25
  langchain_service: LangChainService = None
 
70
 
71
  logger.info("=" * 60)
72
  logger.info("✅ All services initialized successfully")
73
+ logger.info("Server running on Hugging Face Space")
74
+ logger.info("API docs available at /docs")
75
  logger.info("=" * 60)
76
 
77
  yield
 
97
  app.add_middleware(
98
  CORSMiddleware,
99
  allow_origins=[
 
 
100
  "vscode-webview://*",
101
+ "https://*.hf.space"
102
  ],
103
  allow_credentials=True,
104
  allow_methods=["*"], # Allow all HTTP methods (GET, POST, PUT, DELETE, etc.)
105
  allow_headers=["*"], # Allow all headers including Authorization
106
+
107
  )
108
 
109
  # Log CORS configuration
 
112
  # Include routes with /api prefix
113
  app.include_router(router, prefix="/api")
114
 
115
+ @app.middleware("http")
116
+ async def add_security_headers(request: Request, call_next):
117
+ response = await call_next(request)
118
+ response.headers["X-Content-Type-Options"] = "nosniff"
119
+ response.headers["X-Frame-Options"] = "DENY"
120
+ response.headers["X-XSS-Protection"] = "1; mode=block"
121
+ return response
122
+
123
 
124
  @app.get("/")
125
  async def root():
 
150
  import uvicorn
151
 
152
  logger.info("Starting Augmas Backend...")
153
+ logger.info("Server running on Hugging Face Space")
154
+ logger.info("API docs available at /docs")
155
  logger.info(f"Reload: {settings.reload}")
156
 
157
  uvicorn.run(
158
+ "api.main:app",
159
+ host="0.0.0.0",
160
+ port=7860,
161
+ reload=settings.reload,
162
+ log_level=settings.log_level.lower()
163
+ )
api/routes.py CHANGED
@@ -64,16 +64,19 @@ class FileReference(BaseModel):
64
  class RAGQueryRequest(BaseModel):
65
  query: str
66
  workspace_id: str
 
67
  max_chunks: int = 5
68
 
69
 
70
  class IndexWorkspaceRequest(BaseModel):
71
  workspace_id: str
 
72
  files: List[Dict[str, str]] # List of {path: str, content: str}
73
 
74
 
75
  class IndexFileRequest(BaseModel):
76
  workspace_id: str
 
77
  file_path: str
78
  content: str
79
 
@@ -257,11 +260,15 @@ async def get_current_user(
257
  @router.post("/chat", response_model=ChatResponse)
258
  async def chat(
259
  request: ChatRequest,
 
260
  langchain: LangChainService = Depends(get_langchain_service),
261
- rag: RAGServiceSupabase = Depends(get_rag_service)
 
262
  ):
263
  """Process chat message"""
264
  try:
 
 
265
  # Build code context
266
  context = CodeContext()
267
 
@@ -329,6 +336,7 @@ async def chat(
329
  @router.post("/agent/step", response_model=AgentStepResponse)
330
  async def agent_step(
331
  request: AgentStepRequest,
 
332
  langchain: LangChainService = Depends(get_langchain_service),
333
  ):
334
  """Agent mode: get next JSON action from the model (thought, action, input)."""
@@ -350,6 +358,7 @@ async def agent_step(
350
  @router.post("/rag/query")
351
  async def rag_query(
352
  request: RAGQueryRequest,
 
353
  rag: RAGServiceSupabase = Depends(get_rag_service)
354
  ):
355
  """Query RAG for relevant context (workspace-scoped)"""
@@ -360,6 +369,13 @@ async def rag_query(
360
  detail="RAG not ready. Please index workspace first."
361
  )
362
 
 
 
 
 
 
 
 
363
  context = await rag.get_relevant_context(
364
  request.workspace_id,
365
  request.query,
@@ -376,12 +392,22 @@ async def rag_query(
376
  @router.post("/rag/index/workspace")
377
  async def index_workspace(
378
  request: IndexWorkspaceRequest,
 
379
  rag: RAGServiceSupabase = Depends(get_rag_service)
380
  ):
381
  """Index workspace files"""
382
  try:
 
 
 
 
 
 
 
383
  result = await rag.index_workspace(request.workspace_id, request.files)
384
  return result
 
 
385
  except Exception as e:
386
  raise HTTPException(status_code=500, detail=str(e))
387
 
@@ -389,15 +415,25 @@ async def index_workspace(
389
  @router.post("/rag/index/file")
390
  async def index_file(
391
  request: IndexFileRequest,
 
392
  rag: RAGServiceSupabase = Depends(get_rag_service)
393
  ):
394
  """Index a single file"""
395
  try:
 
 
 
 
 
 
 
396
  result = await rag.index_workspace(
397
  request.workspace_id,
398
  [{'path': request.file_path, 'content': request.content}]
399
  )
400
  return result
 
 
401
  except Exception as e:
402
  raise HTTPException(status_code=500, detail=str(e))
403
 
@@ -405,13 +441,24 @@ async def index_file(
405
  @router.delete("/rag/index/file")
406
  async def delete_file(
407
  workspace_id: str,
 
408
  file_path: str,
 
409
  rag: RAGServiceSupabase = Depends(get_rag_service)
410
  ):
411
  """Delete embeddings for a file"""
412
  try:
 
 
 
 
 
 
 
413
  result = await rag.delete_file(workspace_id, file_path)
414
  return result
 
 
415
  except Exception as e:
416
  raise HTTPException(status_code=500, detail=str(e))
417
 
@@ -419,17 +466,30 @@ async def delete_file(
419
  @router.get("/rag/stats")
420
  async def get_rag_stats(
421
  workspace_id: str,
 
 
422
  rag: RAGServiceSupabase = Depends(get_rag_service)
423
  ):
424
  """Get RAG indexing statistics for a workspace"""
425
  try:
 
 
 
 
 
 
 
426
  return await rag.get_index_stats(workspace_id)
 
 
427
  except Exception as e:
428
  raise HTTPException(status_code=500, detail=str(e))
429
 
430
 
431
  @router.get("/rag/status")
432
- async def get_rag_status(rag: RAGServiceSupabase = Depends(get_rag_service)):
 
 
433
  """Get RAG service status"""
434
  return {
435
  "ready": rag.is_ready()
@@ -440,6 +500,7 @@ async def get_rag_status(rag: RAGServiceSupabase = Depends(get_rag_service)):
440
 
441
  @router.get("/models")
442
  async def get_available_models(
 
443
  langchain: LangChainService = Depends(get_langchain_service)
444
  ):
445
  """Get available models"""
@@ -463,6 +524,7 @@ async def get_available_models(
463
  @router.post("/models/switch")
464
  async def switch_model(
465
  request: ModelSwitchRequest,
 
466
  langchain: LangChainService = Depends(get_langchain_service)
467
  ):
468
  """Switch to a different model"""
@@ -478,6 +540,7 @@ async def switch_model(
478
 
479
  @router.get("/models/ollama/status")
480
  async def check_ollama_status(
 
481
  langchain: LangChainService = Depends(get_langchain_service)
482
  ):
483
  """Check Ollama service status"""
@@ -494,6 +557,7 @@ async def check_ollama_status(
494
 
495
  @router.get("/models/test")
496
  async def test_model_connection(
 
497
  langchain: LangChainService = Depends(get_langchain_service)
498
  ):
499
  """Test LLM connection"""
@@ -507,6 +571,7 @@ async def test_model_connection(
507
  @router.post("/workspace/set")
508
  async def set_workspace(
509
  request: SetWorkspaceRequest,
 
510
  langchain: LangChainService = Depends(get_langchain_service)
511
  ):
512
  """Set the workspace root path for LangChainService"""
@@ -663,6 +728,14 @@ async def delete_chat_session(
663
  ):
664
  """Delete a chat session (requires JWT authentication)"""
665
  try:
 
 
 
 
 
 
 
 
666
  await memory.delete_chat_session(chat_id)
667
  return {"message": "Chat deleted successfully"}
668
  except ValueError as e:
@@ -678,6 +751,7 @@ async def get_user_stats(
678
  ):
679
  """Get user statistics (requires JWT authentication)"""
680
  try:
 
681
  stats = await memory.get_user_stats()
682
  return stats
683
  except HTTPException:
@@ -686,29 +760,4 @@ async def get_user_stats(
686
  raise HTTPException(status_code=500, detail=str(e))
687
 
688
 
689
- @router.get("/test-openrouter")
690
- async def test_openrouter():
691
- """Test OpenRouter connectivity from backend (diagnostic endpoint)"""
692
- import os
693
- import httpx
694
-
695
- try:
696
- api_key = os.getenv("OPENROUTER_API_KEY")
697
- if not api_key:
698
- return {"status": "error", "message": "OPENROUTER_API_KEY not found in environment"}
699
-
700
- async with httpx.AsyncClient(verify=False) as client:
701
- response = await client.get(
702
- "https://openrouter.ai/api/v1/models",
703
- headers={"Authorization": f"Bearer {api_key}"},
704
- timeout=10.0
705
- )
706
-
707
- return {
708
- "status": "success" if response.status_code == 200 else "error",
709
- "status_code": response.status_code,
710
- "message": "OpenRouter is reachable and API key is valid" if response.status_code == 200 else "OpenRouter returned an error"
711
- }
712
- except Exception as e:
713
- logger.error(f"OpenRouter test error: {e}")
714
- return {"status": "error", "message": str(e)}
 
64
  class RAGQueryRequest(BaseModel):
65
  query: str
66
  workspace_id: str
67
+ workspace_path: str # Required for ownership validation
68
  max_chunks: int = 5
69
 
70
 
71
  class IndexWorkspaceRequest(BaseModel):
72
  workspace_id: str
73
+ workspace_path: str # Required for ownership validation
74
  files: List[Dict[str, str]] # List of {path: str, content: str}
75
 
76
 
77
  class IndexFileRequest(BaseModel):
78
  workspace_id: str
79
+ workspace_path: str # Required for ownership validation
80
  file_path: str
81
  content: str
82
 
 
260
  @router.post("/chat", response_model=ChatResponse)
261
  async def chat(
262
  request: ChatRequest,
263
+ user_id: str = Depends(get_current_user_id),
264
  langchain: LangChainService = Depends(get_langchain_service),
265
+ rag: RAGServiceSupabase = Depends(get_rag_service),
266
+ memory: MemoryService = Depends(get_memory_service)
267
  ):
268
  """Process chat message"""
269
  try:
270
+ # Set the user in memory service from JWT
271
+ memory.set_current_user(user_id)
272
  # Build code context
273
  context = CodeContext()
274
 
 
336
  @router.post("/agent/step", response_model=AgentStepResponse)
337
  async def agent_step(
338
  request: AgentStepRequest,
339
+ user_id: str = Depends(get_current_user_id),
340
  langchain: LangChainService = Depends(get_langchain_service),
341
  ):
342
  """Agent mode: get next JSON action from the model (thought, action, input)."""
 
358
  @router.post("/rag/query")
359
  async def rag_query(
360
  request: RAGQueryRequest,
361
+ user_id: str = Depends(get_current_user_id),
362
  rag: RAGServiceSupabase = Depends(get_rag_service)
363
  ):
364
  """Query RAG for relevant context (workspace-scoped)"""
 
369
  detail="RAG not ready. Please index workspace first."
370
  )
371
 
372
+ # Validate workspace ownership
373
+ if not rag._validate_workspace_ownership(request.workspace_id, user_id, request.workspace_path):
374
+ raise HTTPException(
375
+ status_code=403,
376
+ detail="Access denied: Workspace does not belong to user"
377
+ )
378
+
379
  context = await rag.get_relevant_context(
380
  request.workspace_id,
381
  request.query,
 
392
  @router.post("/rag/index/workspace")
393
  async def index_workspace(
394
  request: IndexWorkspaceRequest,
395
+ user_id: str = Depends(get_current_user_id),
396
  rag: RAGServiceSupabase = Depends(get_rag_service)
397
  ):
398
  """Index workspace files"""
399
  try:
400
+ # Validate workspace ownership
401
+ if not rag._validate_workspace_ownership(request.workspace_id, user_id, request.workspace_path):
402
+ raise HTTPException(
403
+ status_code=403,
404
+ detail="Access denied: Workspace does not belong to user"
405
+ )
406
+
407
  result = await rag.index_workspace(request.workspace_id, request.files)
408
  return result
409
+ except HTTPException:
410
+ raise
411
  except Exception as e:
412
  raise HTTPException(status_code=500, detail=str(e))
413
 
 
415
  @router.post("/rag/index/file")
416
  async def index_file(
417
  request: IndexFileRequest,
418
+ user_id: str = Depends(get_current_user_id),
419
  rag: RAGServiceSupabase = Depends(get_rag_service)
420
  ):
421
  """Index a single file"""
422
  try:
423
+ # Validate workspace ownership
424
+ if not rag._validate_workspace_ownership(request.workspace_id, user_id, request.workspace_path):
425
+ raise HTTPException(
426
+ status_code=403,
427
+ detail="Access denied: Workspace does not belong to user"
428
+ )
429
+
430
  result = await rag.index_workspace(
431
  request.workspace_id,
432
  [{'path': request.file_path, 'content': request.content}]
433
  )
434
  return result
435
+ except HTTPException:
436
+ raise
437
  except Exception as e:
438
  raise HTTPException(status_code=500, detail=str(e))
439
 
 
441
  @router.delete("/rag/index/file")
442
  async def delete_file(
443
  workspace_id: str,
444
+ workspace_path: str, # Required for ownership validation
445
  file_path: str,
446
+ user_id: str = Depends(get_current_user_id),
447
  rag: RAGServiceSupabase = Depends(get_rag_service)
448
  ):
449
  """Delete embeddings for a file"""
450
  try:
451
+ # Validate workspace ownership
452
+ if not rag._validate_workspace_ownership(workspace_id, user_id, workspace_path):
453
+ raise HTTPException(
454
+ status_code=403,
455
+ detail="Access denied: Workspace does not belong to user"
456
+ )
457
+
458
  result = await rag.delete_file(workspace_id, file_path)
459
  return result
460
+ except HTTPException:
461
+ raise
462
  except Exception as e:
463
  raise HTTPException(status_code=500, detail=str(e))
464
 
 
466
  @router.get("/rag/stats")
467
  async def get_rag_stats(
468
  workspace_id: str,
469
+ workspace_path: str, # Required for ownership validation
470
+ user_id: str = Depends(get_current_user_id),
471
  rag: RAGServiceSupabase = Depends(get_rag_service)
472
  ):
473
  """Get RAG indexing statistics for a workspace"""
474
  try:
475
+ # Validate workspace ownership
476
+ if not rag._validate_workspace_ownership(workspace_id, user_id, workspace_path):
477
+ raise HTTPException(
478
+ status_code=403,
479
+ detail="Access denied: Workspace does not belong to user"
480
+ )
481
+
482
  return await rag.get_index_stats(workspace_id)
483
+ except HTTPException:
484
+ raise
485
  except Exception as e:
486
  raise HTTPException(status_code=500, detail=str(e))
487
 
488
 
489
  @router.get("/rag/status")
490
+ async def get_rag_status(
491
+ user_id: str = Depends(get_current_user_id),
492
+ rag: RAGServiceSupabase = Depends(get_rag_service)):
493
  """Get RAG service status"""
494
  return {
495
  "ready": rag.is_ready()
 
500
 
501
  @router.get("/models")
502
  async def get_available_models(
503
+ user_id: str = Depends(get_current_user_id),
504
  langchain: LangChainService = Depends(get_langchain_service)
505
  ):
506
  """Get available models"""
 
524
  @router.post("/models/switch")
525
  async def switch_model(
526
  request: ModelSwitchRequest,
527
+ user_id: str = Depends(get_current_user_id),
528
  langchain: LangChainService = Depends(get_langchain_service)
529
  ):
530
  """Switch to a different model"""
 
540
 
541
  @router.get("/models/ollama/status")
542
  async def check_ollama_status(
543
+ user_id: str = Depends(get_current_user_id),
544
  langchain: LangChainService = Depends(get_langchain_service)
545
  ):
546
  """Check Ollama service status"""
 
557
 
558
  @router.get("/models/test")
559
  async def test_model_connection(
560
+ user_id: str = Depends(get_current_user_id),
561
  langchain: LangChainService = Depends(get_langchain_service)
562
  ):
563
  """Test LLM connection"""
 
571
  @router.post("/workspace/set")
572
  async def set_workspace(
573
  request: SetWorkspaceRequest,
574
+ user_id: str = Depends(get_current_user_id),
575
  langchain: LangChainService = Depends(get_langchain_service)
576
  ):
577
  """Set the workspace root path for LangChainService"""
 
728
  ):
729
  """Delete a chat session (requires JWT authentication)"""
730
  try:
731
+ memory.set_current_user(user_id)
732
+
733
+ # Verify the chat belongs to the user (security check)
734
+ sessions = await memory.get_chat_sessions()
735
+ chat_exists = any(s["id"] == chat_id for s in sessions)
736
+ if not chat_exists:
737
+ raise HTTPException(status_code=403, detail="Chat not found or access denied")
738
+
739
  await memory.delete_chat_session(chat_id)
740
  return {"message": "Chat deleted successfully"}
741
  except ValueError as e:
 
751
  ):
752
  """Get user statistics (requires JWT authentication)"""
753
  try:
754
+ memory.set_current_user(user_id)
755
  stats = await memory.get_user_stats()
756
  return stats
757
  except HTTPException:
 
760
  raise HTTPException(status_code=500, detail=str(e))
761
 
762
 
763
+ # Diagnostic endpoint removed for security - use /api/models/test instead
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
services/langchain_service.py CHANGED
@@ -377,9 +377,10 @@ class MCPModelServer:
377
 
378
  try:
379
  async with httpx.AsyncClient() as client:
 
380
  headers = {
381
  "Authorization": f"Bearer {self.openrouter_api_key}",
382
- "HTTP-Referer": "http://localhost:8000",
383
  "X-Title": "Code Assistant"
384
  }
385
  response = await client.get(
@@ -483,7 +484,7 @@ class LangChainService:
483
  client=openai_client.chat.completions,
484
  async_client=async_openai_client.chat.completions,
485
  default_headers={
486
- "HTTP-Referer": "http://localhost:8000",
487
  "X-Title": "Code Assistant"
488
  },
489
  )
@@ -538,24 +539,14 @@ class LangChainService:
538
 
539
  async def check_ollama_status(self) -> bool:
540
  """Check if Ollama service is running"""
541
- try:
542
- async with httpx.AsyncClient() as client:
543
- response = await client.get('http://localhost:11434/api/tags', timeout=5.0)
544
- return response.status_code == 200
545
- except Exception as e:
546
- logger.warning(f"Ollama status check failed: {e}")
547
- return False
548
-
549
  async def get_ollama_models(self) -> List[str]:
550
  """Get available Ollama models"""
551
- try:
552
- async with httpx.AsyncClient() as client:
553
- response = await client.get('http://localhost:11434/api/tags', timeout=5.0)
554
- if response.status_code == 200:
555
- data = response.json()
556
- return [model['name'] for model in data.get('models', [])]
557
- except Exception as e:
558
- logger.warning(f"Error fetching Ollama models: {e}")
559
  return []
560
 
561
  async def test_connection(self) -> bool:
 
377
 
378
  try:
379
  async with httpx.AsyncClient() as client:
380
+ app_url = os.getenv("APP_URL", "https://huggingface.co")
381
  headers = {
382
  "Authorization": f"Bearer {self.openrouter_api_key}",
383
+ "HTTP-Referer": app_url,
384
  "X-Title": "Code Assistant"
385
  }
386
  response = await client.get(
 
484
  client=openai_client.chat.completions,
485
  async_client=async_openai_client.chat.completions,
486
  default_headers={
487
+ "HTTP-Referer": os.getenv("APP_URL", "https://huggingface.co"),
488
  "X-Title": "Code Assistant"
489
  },
490
  )
 
539
 
540
  async def check_ollama_status(self) -> bool:
541
  """Check if Ollama service is running"""
542
+ # Ollama is not available in Hugging Face Spaces container
543
+ # Return False to indicate Ollama is not available
544
+ return False
545
+
 
 
 
 
546
  async def get_ollama_models(self) -> List[str]:
547
  """Get available Ollama models"""
548
+ # Ollama is not available in Hugging Face Spaces container
549
+ # Return empty list
 
 
 
 
 
 
550
  return []
551
 
552
  async def test_connection(self) -> bool:
services/rag_service_supabase.py CHANGED
@@ -116,6 +116,19 @@ class RAGServiceSupabase:
116
  def _hash_workspace_path(path: str) -> str:
117
  """Create stable hash of workspace path"""
118
  return hashlib.md5(path.encode()).hexdigest()
 
 
 
 
 
 
 
 
 
 
 
 
 
119
 
120
  @staticmethod
121
  def _content_hash(content: str) -> str:
 
116
  def _hash_workspace_path(path: str) -> str:
117
  """Create stable hash of workspace path"""
118
  return hashlib.md5(path.encode()).hexdigest()
119
+
120
+ @staticmethod
121
+ def _generate_user_scoped_workspace_id(user_id: str, workspace_path: str) -> str:
122
+ """Generate user-scoped workspace ID to ensure isolation"""
123
+ # Combine user_id and workspace_path, then hash
124
+ combined = f"{user_id}:{workspace_path}"
125
+ return hashlib.sha256(combined.encode('utf-8')).hexdigest()
126
+
127
+ @staticmethod
128
+ def _validate_workspace_ownership(workspace_id: str, user_id: str, workspace_path: str) -> bool:
129
+ """Validate that workspace_id belongs to the user"""
130
+ expected_id = RAGServiceSupabase._generate_user_scoped_workspace_id(user_id, workspace_path)
131
+ return workspace_id == expected_id
132
 
133
  @staticmethod
134
  def _content_hash(content: str) -> str:
utils/config.py CHANGED
@@ -31,7 +31,7 @@ class Settings(BaseSettings):
31
 
32
  # Workspace Settings
33
  workspace_root: str = Field(
34
- default_factory=lambda: os.getcwd(),
35
  description="Path to workspace root directory"
36
  )
37
 
@@ -169,6 +169,18 @@ class Settings(BaseSettings):
169
  description="Secret key for JWT tokens"
170
  )
171
 
 
 
 
 
 
 
 
 
 
 
 
 
172
  access_token_expire_minutes: int = Field(
173
  default=60 * 24 * 7, # 7 days
174
  description="Access token expiration time in minutes",
 
31
 
32
  # Workspace Settings
33
  workspace_root: str = Field(
34
+ default_factory=lambda: os.getenv("WORKSPACE_ROOT", "/app"),
35
  description="Path to workspace root directory"
36
  )
37
 
 
169
  description="Secret key for JWT tokens"
170
  )
171
 
172
+ @validator('secret_key')
173
+ def validate_secret_key(cls, v):
174
+ """Ensure secret key is changed from default in production/staging"""
175
+ if v == "your-secret-key-change-in-production":
176
+ env = os.getenv("ENVIRONMENT", "development").lower()
177
+ if env in ("production", "staging"):
178
+ raise ValueError(
179
+ "SECRET_KEY environment variable must be set in production/staging! "
180
+ "Set SECRET_KEY environment variable to a secure random string."
181
+ )
182
+ return v
183
+
184
  access_token_expire_minutes: int = Field(
185
  default=60 * 24 * 7, # 7 days
186
  description="Access token expiration time in minutes",