Compare commits

19 Commits

Author SHA1 Message Date
Magel, Denis
362c470b3c fix: API does not suspend after 1st request 2025-09-18 18:36:51 +02:00
Alexey
b8885d7d73 tags dev 2025-09-18 18:01:06 +02:00
Magel, Denis
d2db261152 docs: add drawio 2025-09-18 17:05:10 +02:00
Magel, Denis
e73a18e981 docs: update readme 2025-09-18 16:39:54 +02:00
Magel, Denis
7d4b42df11 fix: now really use multiple clusters in the loop 2025-09-18 15:57:35 +02:00
Magel, Denis
e8aa7d7df5 feat: get cluster information from redis
feat: loop over all clusters
2025-09-18 15:36:39 +02:00
Magel, Denis
b60383071a make shared connections available via request.shared.XXX 2025-09-18 15:28:50 +02:00
58f7c5c393 Merge pull request 'Refactoring config to config_upload, making important vars global' (#6) from feature/config_features into main
Reviewed-on: #6
2025-09-18 13:01:16 +00:00
Pascal Scheiben
5dfba7416b Refactoring config to config_upload, making important vars global 2025-09-18 15:00:03 +02:00
Magel, Denis
fc3f39c6ae fix: duplicate API startup 2025-09-18 14:48:46 +02:00
1ee40b6647 Merge pull request 'feature/config_upload' (#5) from feature/config_upload into main
Reviewed-on: #5
2025-09-18 12:34:38 +00:00
Magel, Denis
60008fa947 fix: adjusted paths to run from root dir
updated inventory.yaml
2025-09-18 14:33:30 +02:00
Magel, Denis
767f43551e rebased 2025-09-18 14:23:19 +02:00
Pascal Scheiben
9d12045b81 Arranging imports 2025-09-18 14:14:53 +02:00
Pascal Scheiben
72992d651d Fixing typos 2025-09-18 14:13:50 +02:00
Pascal Scheiben
ab52169987 Enhancing comments, adding stub for business logic 2025-09-18 14:13:50 +02:00
Pascal Scheiben
1a4e2ff688 Rewriting comments 2025-09-18 14:13:50 +02:00
Pascal Scheiben
579c62319c Adding config endpoint 2025-09-18 14:13:50 +02:00
2a165c91b6 Merge pull request 'backend' (#4) from backend into main
Reviewed-on: #4
2025-09-18 12:09:49 +00:00
21 changed files with 213 additions and 82 deletions

11
.env
View File

@@ -1,8 +1,3 @@
# Environment variables for NetApp ONTAP clusters
CLUSTER1_HOSTNAME=172.16.57.2
CLUSTER1_USERNAME=admin
CLUSTER1_PASSWORD=Netapp12
CLUSTER2_HOSTNAME=172.16.56.2
CLUSTER2_USERNAME=admin
CLUSTER2_PASSWORD=Netapp12
cluster_inventory_path = config/inventory.yml
redis_host = '172.16.0.208'
redis_port = '6379'

View File

@@ -1,3 +1,19 @@
# generic_api_endpoint
Hackathon API endpoint
## management summary // usecase
This API acts as a middelware for service portals and frontends (like SNOW), that can retrieve data via REST API. It manages metadata.
## ideas for future
- store the data in redis on initialization or on first request
- also first query redis, and not directly ONTAP
- documentation -> make it understandable, so that users will use it!
- add capability to apply filters/conditions on the return
- Alexeys
-
- performance based filtering
- add capability for finding best clusters, volumes
- get credentials from credential-mgmt-system
-

BIN
concept.drawio.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 71 KiB

View File

@@ -1,8 +1,6 @@
- 1:
hostname: '172.16.57.2'
username: 'admin'
password: 'Netapp12'
- 2:
hostname: '172.16.56.2'
username: 'admin'
password: 'Netapp12'
- hostname: "172.16.57.2"
username: "admin"
password: "Netapp12"
- hostname: "172.16.56.2"
username: "admin"
password: "Netapp12"

3
src/.env Normal file
View File

@@ -0,0 +1,3 @@
cluster_inventory_path = ./config/inventory.yml
redis_host = '172.16.0.208'
redis_port = '6379'

View File

@@ -1,4 +1,5 @@
from src.example.router import router as example_router
from .aggregate_router import router as aggregate_router
from src.aggregate.aggregate_router import router as aggregate_router
__all__ = ["example_router", "aggregate_router"]

View File

@@ -1,6 +1,6 @@
# contains the router for the aggregates endpoint
from fastapi import APIRouter, Query, Request
from typing import List
from typing import List, Dict
from .aggregate_schema import AggregateSchema, MetricEnum
from .aggregate_service import get_aggregates
@@ -13,4 +13,11 @@ async def aggregates_endpoint(
request: Request,
metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"),
):
return await get_aggregates(request, metric)
# Extract tag parameters from query string
tags: Dict[str, str] = {}
for param_name, param_value in request.query_params.items():
if param_name.startswith("tag."):
tag_key = param_name[4:]
tags[tag_key] = param_value
return await get_aggregates(request, metric, tags)

View File

@@ -13,3 +13,11 @@ class AggregateSchema(BaseModel):
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"
TAG2REST = {
'worm_compliance': { 'snaplock_type': 'compliance' },
'worm_enterprise': { 'snaplock_type': 'enterprise' },
'flash': { 'block_storage.storage_type': 'ssd' },
'hdd': { 'block_storage.storage_type': 'hdd' },
'mcc': { 'block_storage.mirror.enabled': 'true' }
}

View File

@@ -1,25 +1,45 @@
# contains the business logic for aggregates
from typing import List
from typing import List, Dict
from pprint import pprint
from fastapi import Request
from .aggregate_schema import AggregateSchema, MetricEnum
from src.aggregate.aggregate_schema import AggregateSchema, MetricEnum
from logging import getLogger
from ..utils import round_bytes, get_data_from_ontap
from src.utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn")
logger.setLevel("DEBUG")
# TAG2REST = {
# 'worm_compliance': { 'snaplock_type': 'compliance' },
# 'worm_enterprise': { 'snaplock_type': 'enterprise' },
# 'flash': { 'block_storage.storage_type': 'ssd' },
# 'hdd': { 'block_storage.storage_type': 'hdd' },
# 'mcc': { 'block_storage.mirror.enabled': 'true' }
# }
async def get_aggregates(request: Request, metric: str = "relative") -> List[AggregateSchema]:
# {
# "flash": "production",
# "performance": "gold",
# "worm": "compliance"
# }
async def get_aggregates(request: Request, metric: str = "relative", tags: Dict[str, str] = None) -> List[AggregateSchema]:
# Dummy data for demonstration
# You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage
logger.debug(f"Metric used: {metric}")
client = request.app.requests_client
__aggregates = await get_data_from_ontap(client, logger, "172.16.57.2", "admin", "Netapp12", "storage/aggregates", "fields=name,uuid,space,node,home_node")
logger.debug(__aggregates)
__aggregates = __aggregates.get("records")
logger.debug(f"Tags used: {tags}")
# convert tags to ONTAP filter
# filter_str = ""
# if tags:
# str_filter_parts = [f"tag.{key} eq '{value}'" for key, value in tags.items()]
# param_str = "&".join([f"{TAG2REST[key]}" for key, value in tags.items()])
__aggregates = await get_data_from_ontap(request, logger, "storage/aggregates", "fields=*")
pprint(__aggregates)
if metric == MetricEnum.relative:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("used_percent"), reverse=True)
elif metric == MetricEnum.absolute:

View File

@@ -0,0 +1,3 @@
from src.config_upload.router import router as config_router
__all__ = ["config_router"]

View File

@@ -0,0 +1,14 @@
POST http://127.0.0.1:8000/config
Content-Type: application/json
{
"cluster_list": [
{
"hostname": "cluster1.demo.netapp.com",
"username": "admin",
"password": "Netapp1!"
}
]
}
###

View File

@@ -0,0 +1,23 @@
import logging
from fastapi import APIRouter
from .schema import ConfigReturnSchema, ConfigSchema
logger = logging.getLogger("uvicorn")
router = APIRouter(tags=["config_upload"])
@router.post(
"/config", summary="Upload a configuration", response_model=ConfigReturnSchema
)
async def create_config(config: ConfigSchema) -> ConfigSchema:
"""
Endpoint to receive and store configuration data.
⚠️ at this time the configuration is not stored anywhere. It's like logging to /dev/null
"""
logger.info("Received configuration data")
return config

View File

@@ -0,0 +1,21 @@
# contains the schema definitions for the config_upload service
from pydantic import BaseModel
class ConfigEntrySchema(BaseModel):
hostname: str
username: str
password: str
class ConfigOutSchema(BaseModel):
hostname: str
username: str
class ConfigReturnSchema(BaseModel):
cluster_list: list[ConfigOutSchema]
class ConfigSchema(BaseModel):
cluster_list: list[ConfigEntrySchema]

View File

@@ -0,0 +1,2 @@
# contains the business logic for the config_upload service
async def save_config() -> None: ...

View File

@@ -3,7 +3,7 @@ import logging
from redis import Redis, ConnectionError
from typing import List
from pydantic import TypeAdapter
from schema import ConfigSchema
from src.schema import ConfigSchema
def setup_db_conn(redishost, redisport: str):

View File

@@ -1,6 +1,7 @@
# contains the router for the aggregate endpoint
from fastapi import APIRouter
from .schema import ExampleSchema
from src.example.schema import ExampleSchema
router = APIRouter(tags=["aggregate"])

View File

@@ -1,15 +1,18 @@
# contains the schema definitions for the aggregate service
from pydantic import BaseModel
from pathlib import Path
class ExampleSchema(BaseModel):
example_field: str
another_field: int
class ClusterCreds(BaseModel):
"""A structure to hold basic auth cluster credentials for a cluster"""
username: str
password: str
hostname: str = None
username: str
password: str
hostname: str = None
cert_filepath: Path = None
key_filepath: Path = None
key_filepath: Path = None

View File

@@ -5,35 +5,36 @@ import yaml
from pathlib import Path
from dotenv import load_dotenv
from database import setup_db_conn
from schema import ConfigSchema
from src.database import setup_db_conn
from src.schema import ConfigSchema
from typing import List
from pydantic import TypeAdapter
def initialize_config():
load_dotenv()
log = logging.getLogger('uvicorn')
ENV_INVENTORYPATH = os.getenv('cluster_inventory_path')
ENV_REDISHOST = os.getenv('redis_host')
ENV_REDISPORT = os.getenv('redis_port')
log = logging.getLogger("uvicorn")
ENV_INVENTORYPATH = os.getenv("cluster_inventory_path")
ENV_REDISHOST = os.getenv("redis_host")
ENV_REDISPORT = os.getenv("redis_port")
log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}")
if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file():
print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.")
return False
try:
with open(ENV_INVENTORYPATH, 'r') as f:
with open(ENV_INVENTORYPATH, "r") as f:
inv = yaml.safe_load(f)
inventory = json.dumps(inv)
except Exception as e:
print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}")
return False
print(f'[INFO] Importing configuration to DB...')
log.info(f"Importing configuration to DB...")
try:
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv)
redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT)
redis_conn.hset('cluster_inventory', mapping={'inventory': inventory})
redis_conn.hset("cluster_inventory", mapping={"inventory": inventory})
redis_conn.close()
log.info("Configuration has been loaded.")

View File

@@ -1,30 +1,30 @@
import os
import json
import logging
import yaml
from pathlib import Path
from dotenv import load_dotenv
from redis import Redis
from contextlib import asynccontextmanager
from pydantic import BaseModel, ValidationError, SecretStr, AnyHttpUrl
from typing import Optional, Literal, List, Union
import httpx
from fastapi import FastAPI
from src.aggregate import aggregate_router
from src.config_upload import config_router
from database import setup_db_conn, get_inventory_from_redis, get_config_from_db
from contextlib import asynccontextmanager
from .database import setup_db_conn, get_config_from_db
from src.initialize import initialize_config
from utils import setup_logging
from .utils import setup_logging
logger = logging.getLogger("uvicorn")
logger.setLevel("DEBUG")
logger.info("Starting application")
@asynccontextmanager
async def lifespan(app: FastAPI):
''' make loading it async'''
log = logging.getLogger('uvicorn')
"""make loading it async"""
global shared_redis_conn, requests_client
log = logging.getLogger("uvicorn")
cfg_init_result = initialize_config()
shared_redis_conn = setup_db_conn(os.getenv('redis_host'), os.getenv('redis_port'))
shared_redis_conn = setup_db_conn(os.getenv("redis_host"), os.getenv("redis_port"))
if not shared_redis_conn:
log.error("Cannot connect to Redis DB. Exiting...")
exit(1)
@@ -34,13 +34,16 @@ async def lifespan(app: FastAPI):
if not cfg_init_result:
log.error("Configuration initialization failed. Exiting...")
# exit(1)
yield
requests_client = httpx.AsyncClient(verify=False)
yield {"redis_conn": shared_redis_conn, "requests_client": requests_client}
await requests_client.aclose()
log.info("Shutting down FastAPI app...")
setup_logging()
log = logging.getLogger('uvicorn')
log = logging.getLogger("uvicorn")
log.info("Starting FastAPI app...")
app = FastAPI(lifespan=lifespan)
app.include_router(aggregate_router)
app.include_router(config_router)

View File

@@ -1,6 +1,7 @@
from dotenv import dotenv_values
import logging
from dotenv import dotenv_values
from src.schema import ConfigSchema
logger = logging.getLogger("uvicorn")

View File

@@ -1,5 +1,8 @@
import logging
from fastapi import Request
import httpx
from src.database import get_config_from_db
def round_bytes(size_in_bytes: int) -> str:
# Helper function to convert bytes to a human-readable format
@@ -10,24 +13,32 @@ def round_bytes(size_in_bytes: int) -> str:
return f"{size_in_bytes:.2f}EB"
async def get_data_from_ontap(client, logger, hostname: str, username: str, password: str, endpoint: str, query_string: str = ""):
url = f"https://{hostname}/api/{endpoint}"
if query_string:
url += f"?{query_string}"
async with client as _client:
async def get_data_from_ontap(request: Request, logger, endpoint: str, query_string: str = ""):
# get clusters from redis
redis_conn = request.state.redis_conn
config = get_config_from_db(redis_conn)
logger.debug("Got the config from REDIS: %s", config)
results = []
client = request.state.requests_client
for cluster in config:
print(f"\n\n looping, {cluster}")
url = f"https://{cluster.hostname}/api/{endpoint}"
if query_string:
url += f"?{query_string}"
try:
logger.debug(f"Fetching data from ONTAP: {url}")
response = await _client.get(url, auth=(username, password))
response = await client.get(url, auth=(cluster.username, cluster.password))
response.raise_for_status()
return response.json()
results.extend(response.json()["records"])
except httpx.HTTPError as e:
logger.error(f"HTTP error occurred: {e}")
return None
return results
def setup_logging() -> None:
"""Configure logging for the application"""
logging.basicConfig(
level=logging.DEBUG,
format="[%(asctime)s] [%(levelname)5s] %(message)s"
)
print(f"Logger is initialized.")
logging.basicConfig(level=logging.DEBUG, format="[%(asctime)s] [%(levelname)5s] %(message)s")
print("Logger is initialized.")