This commit is contained in:
Alexey
2025-09-18 18:01:06 +02:00
parent d2db261152
commit b8885d7d73
5 changed files with 45 additions and 5 deletions

View File

@@ -1,6 +1,6 @@
# contains the business logic for aggregates
from typing import List
from typing import List, Dict
from pprint import pprint
from fastapi import Request
from src.aggregate.aggregate_schema import AggregateSchema, MetricEnum
@@ -10,12 +10,34 @@ from src.utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn")
logger.setLevel("DEBUG")
# TAG2REST = {
# 'worm_compliance': { 'snaplock_type': 'compliance' },
# 'worm_enterprise': { 'snaplock_type': 'enterprise' },
# 'flash': { 'block_storage.storage_type': 'ssd' },
# 'hdd': { 'block_storage.storage_type': 'hdd' },
# 'mcc': { 'block_storage.mirror.enabled': 'true' }
# }
async def get_aggregates(request: Request, metric: str = "relative") -> List[AggregateSchema]:
# {
# "flash": "production",
# "performance": "gold",
# "worm": "compliance"
# }
async def get_aggregates(request: Request, metric: str = "relative", tags: Dict[str, str] = None) -> List[AggregateSchema]:
# Dummy data for demonstration
# You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage
logger.debug(f"Metric used: {metric}")
logger.debug(f"Tags used: {tags}")
# convert tags to ONTAP filter
# filter_str = ""
# if tags:
# str_filter_parts = [f"tag.{key} eq '{value}'" for key, value in tags.items()]
# param_str = "&".join([f"{TAG2REST[key]}" for key, value in tags.items()])
__aggregates = await get_data_from_ontap(request, logger, "storage/aggregates", "fields=*")
pprint(__aggregates)
if metric == MetricEnum.relative: