From 2896f454202733ad487ec4efd60fd2503e568661 Mon Sep 17 00:00:00 2001 From: vizsatiz Date: Tue, 24 Feb 2026 17:26:39 +0530 Subject: [PATCH] feature to export dynamic queries --- .../controllers/datasource_controller.py | 128 ++++++++++++++++++ 1 file changed, 128 insertions(+) diff --git a/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py b/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py index c0fc1a2c..fe0b1ca0 100644 --- a/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py +++ b/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py @@ -8,6 +8,7 @@ from fastapi import Request from fastapi import status from fastapi.responses import JSONResponse +from fastapi.responses import StreamingResponse from fastapi.routing import APIRouter from common_module.common_container import CommonContainer @@ -38,6 +39,8 @@ from plugins_module.services.dynamic_query_service import DynamicQueryService from db_repo_module.cache.cache_manager import CacheManager from ..utils.helper import generate_cache_key, validate_yaml_query +import csv +import io import yaml from ..utils.helper import DynamicQueryRequest from ..utils.helper import DynamicQueryExecuteRequest @@ -47,6 +50,29 @@ datasource_router = APIRouter() +def _serialized_rows_to_csv(rows: list) -> bytes: + """Convert a list of serialized dicts (e.g. from execute_dynamic_query) to CSV bytes.""" + if not rows: + return b'' + out = io.StringIO() + fieldnames = list(rows[0].keys()) + for row in rows[1:]: + for k in row: + if k not in fieldnames: + fieldnames.append(k) + writer = csv.DictWriter(out, fieldnames=fieldnames, extrasaction='ignore') + + def _cell_value(v): + if isinstance(v, (dict, list)): + return json.dumps(v) + return v if v is None or isinstance(v, str) else str(v) + + writer.writeheader() + for row in rows: + writer.writerow({k: _cell_value(row.get(k)) for k in fieldnames}) + return out.getvalue().encode('utf-8-sig') + + @datasource_router.post('/v1/datasources') @inject async def add_datasource( @@ -718,6 +744,108 @@ async def execute_dynamic_query( ) +@datasource_router.post('/v1/{datasource_id}/dynamic-queries/{query_id}/export') +@inject +async def export_dynamic_query_csv( + request: Request, + datasource_id: str, + query_id: str, + filter: str | None = Query(None, alias='$filter'), + offset: int | None = 0, + limit: int | None = 100, + dynamic_query_params: DynamicQueryExecuteRequest = None, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + dynamic_query_yaml_service: DynamicQueryService = Depends( + Provide[PluginsContainer.dynamic_query_service] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + cache_manager: CacheManager = Depends(Provide[PluginsContainer.cache_manager]), + force_fetch: int = Query(0), +): + """Execute the dynamic query and return results as a downloadable CSV file.""" + role_id, user_id, _ = get_current_user(request) + datasource_type, datasource_config = await get_datasource_config(datasource_id) + if not datasource_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Datasource not found: {datasource_id}' + ), + ) + yaml_query, _ = await dynamic_query_yaml_service.get_dynamic_yaml_query(query_id) + if not yaml_query: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Dynamic query not found: {query_id}' + ), + ) + + rls_filter_str = None + is_admin = await check_admin(role_id) + if not is_admin: + rls_filters = await user_service.get_user_resources( + user_id=user_id, scope=ResourceScope.DATA + ) + if len(rls_filters) == 0: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + rls_filters = fetch_data_filters(rls_filters) + rls_filter_str = f"{ ' $and '.join(rls_filters)}" + + datasource_plugin = DatasourcePlugin(datasource_type, datasource_config) + cache_key = generate_cache_key( + query_id, + filter, + rls_filter_str, + limit, + offset, + dynamic_query_params.params if dynamic_query_params else None, + ) + if not force_fetch: + cached_result = cache_manager.get_str(cache_key) + if cached_result: + serialized_res = json.loads(cached_result) + else: + res = await datasource_plugin.execute_dynamic_query( + yaml_query, + rls_filter_str, + filter, + offset, + limit, + dynamic_query_params.params if dynamic_query_params else None, + ) + serialized_res = serialize_values(res) + cache_manager.add(cache_key, json.dumps(serialized_res), expiry=60 * 2) + else: + res = await datasource_plugin.execute_dynamic_query( + yaml_query, + rls_filter_str, + filter, + offset, + limit, + dynamic_query_params.params if dynamic_query_params else None, + ) + serialized_res = serialize_values(res) + cache_manager.add(cache_key, json.dumps(serialized_res), expiry=60 * 2) + + csv_bytes = _serialized_rows_to_csv(serialized_res) + filename = f'export_{query_id}.csv' + return StreamingResponse( + iter([csv_bytes]), + media_type='text/csv', + headers={ + 'Content-Disposition': f'attachment; filename="{filename}"', + }, + ) + + @datasource_router.delete('/v1/{datasource_id}/dynamic-queries/{query_id}') @inject async def delete_dynamic_query(