Compare commits

..

22 Commits

Author SHA1 Message Date
Magel, Denis
b60383071a make shared connections available via request.shared.XXX 2025-09-18 15:28:50 +02:00
58f7c5c393 Merge pull request 'Refactoring config to config_upload, making important vars global' (#6) from feature/config_features into main
Reviewed-on: #6
2025-09-18 13:01:16 +00:00
Pascal Scheiben
5dfba7416b Refactoring config to config_upload, making important vars global 2025-09-18 15:00:03 +02:00
Magel, Denis
fc3f39c6ae fix: duplicate API startup 2025-09-18 14:48:46 +02:00
1ee40b6647 Merge pull request 'feature/config_upload' (#5) from feature/config_upload into main
Reviewed-on: #5
2025-09-18 12:34:38 +00:00
Magel, Denis
60008fa947 fix: adjusted paths to run from root dir
updated inventory.yaml
2025-09-18 14:33:30 +02:00
Magel, Denis
767f43551e rebased 2025-09-18 14:23:19 +02:00
Pascal Scheiben
9d12045b81 Arranging imports 2025-09-18 14:14:53 +02:00
Pascal Scheiben
72992d651d Fixing typos 2025-09-18 14:13:50 +02:00
Pascal Scheiben
ab52169987 Enhancing comments, adding stub for business logic 2025-09-18 14:13:50 +02:00
Pascal Scheiben
1a4e2ff688 Rewriting comments 2025-09-18 14:13:50 +02:00
Pascal Scheiben
579c62319c Adding config endpoint 2025-09-18 14:13:50 +02:00
2a165c91b6 Merge pull request 'backend' (#4) from backend into main
Reviewed-on: #4
2025-09-18 12:09:49 +00:00
Alexey
72f738a816 backedn dev 2025-09-18 14:09:11 +02:00
root
fe13e49172 dev schema 2025-09-18 14:09:11 +02:00
root
d90a18053f restructure 2025-09-18 14:09:11 +02:00
root
22419ecf84 read and write with readis 2025-09-18 14:09:11 +02:00
root
cf09ba6431 req file extended 2025-09-18 14:08:14 +02:00
root
774fa3484c basic creds schema 2025-09-18 14:04:06 +02:00
b0d70e2120 Merge pull request 'feat/handle-get-data-from-ONTAP' (#3) from feat/handle-get-data-from-ONTAP into main
Reviewed-on: #3
2025-09-18 12:00:54 +00:00
Magel, Denis
fc71950039 refactor: added async await to ONTAP call 2025-09-18 13:39:22 +02:00
Magel, Denis
e8efde9892 feat: added functionality inside GET/aggregates 2025-09-18 12:16:30 +02:00
21 changed files with 303 additions and 49 deletions

11
.env
View File

@@ -1,8 +1,3 @@
# Environment variables for NetApp ONTAP clusters cluster_inventory_path = config/inventory.yml
CLUSTER1_HOSTNAME=172.16.57.2 redis_host = '172.16.0.208'
CLUSTER1_USERNAME=admin redis_port = '6379'
CLUSTER1_PASSWORD=Netapp12
CLUSTER2_HOSTNAME=172.16.56.2
CLUSTER2_USERNAME=admin
CLUSTER2_PASSWORD=Netapp12

9
Dockerfile Normal file
View File

@@ -0,0 +1,9 @@
FROM python:latest
WORKDIR /usr/local/bin
COPY requirements.txt requirements.txt
RUN pip install -r requirements.txt
COPY src/start.py .
CMD ["src/start.py"]

6
config/inventory.yml Normal file
View File

@@ -0,0 +1,6 @@
- hostname: "172.16.57.2"
username: "admin"
password: "Netapp12"
- hostname: "172.16.56.2"
username: "admin"
password: "Netapp12"

0
initialize.py Normal file
View File

View File

@@ -1,4 +1,7 @@
fastapi[standard]>=0.116.2 fastapi[standard]>=0.116.2
httpx>=0.28.1 httpx>=0.28.1
redis>=6.4.0 redis>=6.4.0
python-dotenv>=1.1.1 python-dotenv>=1.1.1
pydantic
redis[hiredis]
dotenv

View File

@@ -1,4 +1,5 @@
from src.example.router import router as example_router from src.example.router import router as example_router
from .aggregate_router import router as aggregate_router
from src.aggregate.aggregate_router import router as aggregate_router
__all__ = ["example_router", "aggregate_router"] __all__ = ["example_router", "aggregate_router"]

View File

@@ -1,21 +1,16 @@
# contains the router for the aggregates endpoint # contains the router for the aggregates endpoint
from fastapi import APIRouter, Query from fastapi import APIRouter, Query, Request
from enum import Enum
from typing import List from typing import List
from .aggregate_schema import AggregateSchema from .aggregate_schema import AggregateSchema, MetricEnum
from .aggregate_service import get_aggregates from .aggregate_service import get_aggregates
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"
router = APIRouter(tags=["aggregates"]) router = APIRouter(tags=["aggregates"])
@router.get("/aggregates", response_model=List[AggregateSchema]) @router.get("/aggregates", response_model=List[AggregateSchema])
async def aggregates_endpoint( async def aggregates_endpoint(
request: Request,
metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"), metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"),
): ):
return await get_aggregates(metric) return await get_aggregates(request, metric)

View File

@@ -1,8 +1,15 @@
# contains the schema definitions for aggregates # contains the schema definitions for aggregates
from pydantic import BaseModel from pydantic import BaseModel
from enum import Enum
class AggregateSchema(BaseModel): class AggregateSchema(BaseModel):
aggregate: str aggregate: str
node: str node: str
available: str available: int
available_str: str
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"

View File

@@ -1,24 +1,37 @@
# contains the business logic for aggregates # contains the business logic for aggregates
from typing import List from typing import List
from .aggregate_schema import AggregateSchema
from fastapi import Request
from src.aggregate.aggregate_schema import AggregateSchema, MetricEnum
from logging import getLogger
from src.utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn")
logger.setLevel("DEBUG")
async def get_aggregates(metric: str = "relative") -> List[AggregateSchema]: async def get_aggregates(request: Request, metric: str = "relative") -> List[AggregateSchema]:
# Dummy data for demonstration # Dummy data for demonstration
# You can use the metric parameter to filter or modify results as needed # You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage # For now, just return the same data and show metric usage
print(f"Metric used: {metric}") logger.debug(f"Metric used: {metric}")
__aggregates = await get_data_from_ontap(request, logger, "172.16.57.2", "admin", "Netapp12", "storage/aggregates", "fields=name,uuid,space,node,home_node")
logger.debug(__aggregates)
__aggregates = __aggregates.get("records")
if metric == MetricEnum.relative:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("used_percent"), reverse=True)
elif metric == MetricEnum.absolute:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("available"), reverse=False)
aggregates: list = [ aggregates: list = [
AggregateSchema( AggregateSchema(
aggregate="Aggregate A", node="cluster01-01", available="100.0TB" aggregate=a["name"],
), node=a["node"]["name"],
AggregateSchema( available=a["space"]["block_storage"]["available"],
aggregate="Aggregate B", node="cluster01-01", available="200.5GB" available_str=round_bytes(a["space"]["block_storage"]["available"]),
), )
AggregateSchema( for a in __aggregates
aggregate="Aggregate C", node="cluster01-02", available="300.75MB"
),
] ]
return aggregates return aggregates

View File

@@ -0,0 +1,3 @@
from src.config_upload.router import router as config_router
__all__ = ["config_router"]

View File

@@ -0,0 +1,14 @@
POST http://127.0.0.1:8000/config
Content-Type: application/json
{
"cluster_list": [
{
"hostname": "cluster1.demo.netapp.com",
"username": "admin",
"password": "Netapp1!"
}
]
}
###

View File

@@ -0,0 +1,25 @@
import logging
from fastapi import APIRouter
from .schema import ConfigReturnSchema, ConfigSchema
from src.database import get_config_from_db
from src.main import shared_redis_conn
logger = logging.getLogger("uvicorn")
router = APIRouter(tags=["config_upload"])
@router.post(
"/config", summary="Upload a configuration", response_model=ConfigReturnSchema
)
async def create_config(config: ConfigSchema) -> ConfigSchema:
"""
Endpoint to receive and store configuration data.
⚠️ at this time the configuration is not stored anywhere. It's like logging to /dev/null
"""
logger.info("Received configuration data")
return config

View File

@@ -0,0 +1,21 @@
# contains the schema definitions for the config_upload service
from pydantic import BaseModel
class ConfigEntrySchema(BaseModel):
hostname: str
username: str
password: str
class ConfigOutSchema(BaseModel):
hostname: str
username: str
class ConfigReturnSchema(BaseModel):
cluster_list: list[ConfigOutSchema]
class ConfigSchema(BaseModel):
cluster_list: list[ConfigEntrySchema]

View File

@@ -0,0 +1,2 @@
# contains the business logic for the config_upload service
async def save_config() -> None: ...

40
src/database.py Normal file
View File

@@ -0,0 +1,40 @@
import json
import logging
from redis import Redis, ConnectionError
from typing import List
from pydantic import TypeAdapter
from src.schema import ConfigSchema
def setup_db_conn(redishost, redisport: str):
''' Setup Redis connection and return it open'''
log = logging.getLogger('uvicorn')
try:
redisclient = Redis(host=redishost, port=redisport, decode_responses=True)
if redisclient.ping():
log.info(f"Connected to Redis DB {redishost} on port {redisport}")
else:
log.error(f"Cannot connect to Redis DB {redishost} on port {redisport}")
exit(1)
return redisclient
except ConnectionError as e:
print(f"FATAL: Redis DB {redishost} is unreachable on port {redisport}. Err: {e}")
return None
except Exception as e:
print(f"FATAL: {e}")
return None
def get_inventory_from_redis(redisclient: Redis):
''' Read inventory from Redis '''
cluster_inv = redisclient.hgetall('cluster_inventory')
if 'inventory' in cluster_inv:
return json.loads(cluster_inv['inventory'])
return {}
def get_config_from_db(redisclient: Redis) -> ConfigSchema:
''' Load inventory to global vars'''
GLOBAL_INVENTORY = get_inventory_from_redis(redisclient)
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(GLOBAL_INVENTORY)
return GLOBAL_INVENTORY_VALID

View File

@@ -1,6 +1,7 @@
# contains the router for the aggregate endpoint # contains the router for the aggregate endpoint
from fastapi import APIRouter from fastapi import APIRouter
from .schema import ExampleSchema
from src.example.schema import ExampleSchema
router = APIRouter(tags=["aggregate"]) router = APIRouter(tags=["aggregate"])

View File

@@ -1,7 +1,18 @@
# contains the schema definitions for the aggregate service # contains the schema definitions for the aggregate service
from pydantic import BaseModel from pydantic import BaseModel
from pathlib import Path
class ExampleSchema(BaseModel): class ExampleSchema(BaseModel):
example_field: str example_field: str
another_field: int another_field: int
class ClusterCreds(BaseModel):
"""A structure to hold basic auth cluster credentials for a cluster"""
username: str
password: str
hostname: str = None
cert_filepath: Path = None
key_filepath: Path = None

45
src/initialize.py Normal file
View File

@@ -0,0 +1,45 @@
import os
import json
import logging
import yaml
from pathlib import Path
from dotenv import load_dotenv
from src.database import setup_db_conn
from src.schema import ConfigSchema
from typing import List
from pydantic import TypeAdapter
def initialize_config():
load_dotenv()
log = logging.getLogger("uvicorn")
ENV_INVENTORYPATH = os.getenv("cluster_inventory_path")
ENV_REDISHOST = os.getenv("redis_host")
ENV_REDISPORT = os.getenv("redis_port")
log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}")
if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file():
print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.")
return False
try:
with open(ENV_INVENTORYPATH, "r") as f:
inv = yaml.safe_load(f)
inventory = json.dumps(inv)
except Exception as e:
print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}")
return False
print(f"[INFO] Importing configuration to DB...")
try:
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv)
redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT)
redis_conn.hset("cluster_inventory", mapping={"inventory": inventory})
redis_conn.close()
log.info("Configuration has been loaded.")
return True
except Exception as e:
print(f"FATAL: Redis DB error: {e}")
return False

View File

@@ -1,23 +1,52 @@
from src.service import load_config import os
from fastapi import FastAPI
import logging import logging
import httpx
from fastapi import FastAPI
shared_redis_conn = None
requests_client = None
from src.aggregate import aggregate_router from src.aggregate import aggregate_router
from src.config_upload import config_router
from contextlib import asynccontextmanager
from .database import setup_db_conn, get_config_from_db
from src.initialize import initialize_config
from .utils import setup_logging
logger = logging.getLogger("uvicorn") logger = logging.getLogger("uvicorn")
logger.setLevel("DEBUG")
logger.info("Starting application") logger.info("Starting application")
config = load_config()
app = FastAPI()
@asynccontextmanager
async def lifespan(app: FastAPI):
"""make loading it async"""
global shared_redis_conn, requests_client
log = logging.getLogger("uvicorn")
cfg_init_result = initialize_config()
shared_redis_conn = setup_db_conn(os.getenv("redis_host"), os.getenv("redis_port"))
if not shared_redis_conn:
log.error("Cannot connect to Redis DB. Exiting...")
exit(1)
inv_check = get_config_from_db(shared_redis_conn)
log.info(f"[DEBUG] Data validity healthcheck (DEVELOPER MODE): {inv_check}")
if not cfg_init_result:
log.error("Configuration initialization failed. Exiting...")
# exit(1)
requests_client = httpx.AsyncClient(verify=False)
yield {"redis_conn": shared_redis_conn, "requests_client": requests_client}
await requests_client.aclose()
log.info("Shutting down FastAPI app...")
setup_logging()
log = logging.getLogger("uvicorn")
log.info("Starting FastAPI app...")
app = FastAPI(lifespan=lifespan)
app.include_router(aggregate_router) app.include_router(aggregate_router)
app.include_router(config_router)
@app.get("/")
async def main():
return {"Hello": "World"}
@app.get("/config")
async def get_config():
"""Endpoint to get the current configuration."""
return config.model_dump()

View File

@@ -1,6 +1,7 @@
from dotenv import dotenv_values
import logging import logging
from dotenv import dotenv_values
from src.schema import ConfigSchema from src.schema import ConfigSchema
logger = logging.getLogger("uvicorn") logger = logging.getLogger("uvicorn")

33
src/utils.py Normal file
View File

@@ -0,0 +1,33 @@
import logging
from fastapi import Request
import httpx
def round_bytes(size_in_bytes: int) -> str:
# Helper function to convert bytes to a human-readable format
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]:
if size_in_bytes < 1024:
return f"{size_in_bytes:.2f}{unit}"
size_in_bytes /= 1024
return f"{size_in_bytes:.2f}EB"
async def get_data_from_ontap(request: Request, logger, hostname: str, username: str, password: str, endpoint: str, query_string: str = ""):
url = f"https://{hostname}/api/{endpoint}"
if query_string:
url += f"?{query_string}"
async with request.state.requests_client as _client:
try:
logger.debug(f"Fetching data from ONTAP: {url}")
response = await _client.get(url, auth=(username, password))
response.raise_for_status()
return response.json()
except httpx.HTTPError as e:
logger.error(f"HTTP error occurred: {e}")
return None
def setup_logging() -> None:
"""Configure logging for the application"""
logging.basicConfig(level=logging.DEBUG, format="[%(asctime)s] [%(levelname)5s] %(message)s")
print(f"Logger is initialized.")