Compare commits

...

13 Commits

Author SHA1 Message Date
Magel, Denis
b60383071a make shared connections available via request.shared.XXX 2025-09-18 15:28:50 +02:00
58f7c5c393 Merge pull request 'Refactoring config to config_upload, making important vars global' (#6) from feature/config_features into main
Reviewed-on: #6
2025-09-18 13:01:16 +00:00
Pascal Scheiben
5dfba7416b Refactoring config to config_upload, making important vars global 2025-09-18 15:00:03 +02:00
Magel, Denis
fc3f39c6ae fix: duplicate API startup 2025-09-18 14:48:46 +02:00
1ee40b6647 Merge pull request 'feature/config_upload' (#5) from feature/config_upload into main
Reviewed-on: #5
2025-09-18 12:34:38 +00:00
Magel, Denis
60008fa947 fix: adjusted paths to run from root dir
updated inventory.yaml
2025-09-18 14:33:30 +02:00
Magel, Denis
767f43551e rebased 2025-09-18 14:23:19 +02:00
Pascal Scheiben
9d12045b81 Arranging imports 2025-09-18 14:14:53 +02:00
Pascal Scheiben
72992d651d Fixing typos 2025-09-18 14:13:50 +02:00
Pascal Scheiben
ab52169987 Enhancing comments, adding stub for business logic 2025-09-18 14:13:50 +02:00
Pascal Scheiben
1a4e2ff688 Rewriting comments 2025-09-18 14:13:50 +02:00
Pascal Scheiben
579c62319c Adding config endpoint 2025-09-18 14:13:50 +02:00
2a165c91b6 Merge pull request 'backend' (#4) from backend into main
Reviewed-on: #4
2025-09-18 12:09:49 +00:00
16 changed files with 138 additions and 68 deletions

11
.env
View File

@@ -1,8 +1,3 @@
# Environment variables for NetApp ONTAP clusters cluster_inventory_path = config/inventory.yml
CLUSTER1_HOSTNAME=172.16.57.2 redis_host = '172.16.0.208'
CLUSTER1_USERNAME=admin redis_port = '6379'
CLUSTER1_PASSWORD=Netapp12
CLUSTER2_HOSTNAME=172.16.56.2
CLUSTER2_USERNAME=admin
CLUSTER2_PASSWORD=Netapp12

View File

@@ -1,8 +1,6 @@
- 1: - hostname: "172.16.57.2"
hostname: '172.16.57.2' username: "admin"
username: 'admin' password: "Netapp12"
password: 'Netapp12' - hostname: "172.16.56.2"
- 2: username: "admin"
hostname: '172.16.56.2' password: "Netapp12"
username: 'admin'
password: 'Netapp12'

View File

@@ -1,4 +1,5 @@
from src.example.router import router as example_router from src.example.router import router as example_router
from .aggregate_router import router as aggregate_router
from src.aggregate.aggregate_router import router as aggregate_router
__all__ = ["example_router", "aggregate_router"] __all__ = ["example_router", "aggregate_router"]

View File

@@ -3,9 +3,9 @@
from typing import List from typing import List
from fastapi import Request from fastapi import Request
from .aggregate_schema import AggregateSchema, MetricEnum from src.aggregate.aggregate_schema import AggregateSchema, MetricEnum
from logging import getLogger from logging import getLogger
from ..utils import round_bytes, get_data_from_ontap from src.utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn") logger = getLogger("uvicorn")
logger.setLevel("DEBUG") logger.setLevel("DEBUG")
@@ -16,8 +16,7 @@ async def get_aggregates(request: Request, metric: str = "relative") -> List[Agg
# You can use the metric parameter to filter or modify results as needed # You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage # For now, just return the same data and show metric usage
logger.debug(f"Metric used: {metric}") logger.debug(f"Metric used: {metric}")
client = request.app.requests_client __aggregates = await get_data_from_ontap(request, logger, "172.16.57.2", "admin", "Netapp12", "storage/aggregates", "fields=name,uuid,space,node,home_node")
__aggregates = await get_data_from_ontap(client, logger, "172.16.57.2", "admin", "Netapp12", "storage/aggregates", "fields=name,uuid,space,node,home_node")
logger.debug(__aggregates) logger.debug(__aggregates)
__aggregates = __aggregates.get("records") __aggregates = __aggregates.get("records")
if metric == MetricEnum.relative: if metric == MetricEnum.relative:

View File

@@ -0,0 +1,3 @@
from src.config_upload.router import router as config_router
__all__ = ["config_router"]

View File

@@ -0,0 +1,14 @@
POST http://127.0.0.1:8000/config
Content-Type: application/json
{
"cluster_list": [
{
"hostname": "cluster1.demo.netapp.com",
"username": "admin",
"password": "Netapp1!"
}
]
}
###

View File

@@ -0,0 +1,25 @@
import logging
from fastapi import APIRouter
from .schema import ConfigReturnSchema, ConfigSchema
from src.database import get_config_from_db
from src.main import shared_redis_conn
logger = logging.getLogger("uvicorn")
router = APIRouter(tags=["config_upload"])
@router.post(
"/config", summary="Upload a configuration", response_model=ConfigReturnSchema
)
async def create_config(config: ConfigSchema) -> ConfigSchema:
"""
Endpoint to receive and store configuration data.
⚠️ at this time the configuration is not stored anywhere. It's like logging to /dev/null
"""
logger.info("Received configuration data")
return config

View File

@@ -0,0 +1,21 @@
# contains the schema definitions for the config_upload service
from pydantic import BaseModel
class ConfigEntrySchema(BaseModel):
hostname: str
username: str
password: str
class ConfigOutSchema(BaseModel):
hostname: str
username: str
class ConfigReturnSchema(BaseModel):
cluster_list: list[ConfigOutSchema]
class ConfigSchema(BaseModel):
cluster_list: list[ConfigEntrySchema]

View File

@@ -0,0 +1,2 @@
# contains the business logic for the config_upload service
async def save_config() -> None: ...

View File

@@ -3,7 +3,7 @@ import logging
from redis import Redis, ConnectionError from redis import Redis, ConnectionError
from typing import List from typing import List
from pydantic import TypeAdapter from pydantic import TypeAdapter
from schema import ConfigSchema from src.schema import ConfigSchema
def setup_db_conn(redishost, redisport: str): def setup_db_conn(redishost, redisport: str):

View File

@@ -1,6 +1,7 @@
# contains the router for the aggregate endpoint # contains the router for the aggregate endpoint
from fastapi import APIRouter from fastapi import APIRouter
from .schema import ExampleSchema
from src.example.schema import ExampleSchema
router = APIRouter(tags=["aggregate"]) router = APIRouter(tags=["aggregate"])

View File

@@ -1,15 +1,18 @@
# contains the schema definitions for the aggregate service # contains the schema definitions for the aggregate service
from pydantic import BaseModel from pydantic import BaseModel
from pathlib import Path
class ExampleSchema(BaseModel): class ExampleSchema(BaseModel):
example_field: str example_field: str
another_field: int another_field: int
class ClusterCreds(BaseModel): class ClusterCreds(BaseModel):
"""A structure to hold basic auth cluster credentials for a cluster""" """A structure to hold basic auth cluster credentials for a cluster"""
username: str
password: str username: str
hostname: str = None password: str
hostname: str = None
cert_filepath: Path = None cert_filepath: Path = None
key_filepath: Path = None key_filepath: Path = None

View File

@@ -5,35 +5,36 @@ import yaml
from pathlib import Path from pathlib import Path
from dotenv import load_dotenv from dotenv import load_dotenv
from database import setup_db_conn from src.database import setup_db_conn
from schema import ConfigSchema from src.schema import ConfigSchema
from typing import List from typing import List
from pydantic import TypeAdapter from pydantic import TypeAdapter
def initialize_config(): def initialize_config():
load_dotenv() load_dotenv()
log = logging.getLogger('uvicorn') log = logging.getLogger("uvicorn")
ENV_INVENTORYPATH = os.getenv('cluster_inventory_path') ENV_INVENTORYPATH = os.getenv("cluster_inventory_path")
ENV_REDISHOST = os.getenv('redis_host') ENV_REDISHOST = os.getenv("redis_host")
ENV_REDISPORT = os.getenv('redis_port') ENV_REDISPORT = os.getenv("redis_port")
log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}") log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}")
if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file(): if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file():
print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.") print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.")
return False return False
try: try:
with open(ENV_INVENTORYPATH, 'r') as f: with open(ENV_INVENTORYPATH, "r") as f:
inv = yaml.safe_load(f) inv = yaml.safe_load(f)
inventory = json.dumps(inv) inventory = json.dumps(inv)
except Exception as e: except Exception as e:
print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}") print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}")
return False return False
print(f'[INFO] Importing configuration to DB...') print(f"[INFO] Importing configuration to DB...")
try: try:
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv) GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv)
redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT) redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT)
redis_conn.hset('cluster_inventory', mapping={'inventory': inventory}) redis_conn.hset("cluster_inventory", mapping={"inventory": inventory})
redis_conn.close() redis_conn.close()
log.info("Configuration has been loaded.") log.info("Configuration has been loaded.")

View File

@@ -1,30 +1,33 @@
import os import os
import json
import logging import logging
import yaml import httpx
from pathlib import Path
from dotenv import load_dotenv
from redis import Redis
from contextlib import asynccontextmanager
from pydantic import BaseModel, ValidationError, SecretStr, AnyHttpUrl
from typing import Optional, Literal, List, Union
from fastapi import FastAPI from fastapi import FastAPI
shared_redis_conn = None
requests_client = None
from database import setup_db_conn, get_inventory_from_redis, get_config_from_db from src.aggregate import aggregate_router
from src.config_upload import config_router
from contextlib import asynccontextmanager
from .database import setup_db_conn, get_config_from_db
from src.initialize import initialize_config from src.initialize import initialize_config
from utils import setup_logging from .utils import setup_logging
logger = logging.getLogger("uvicorn")
logger.setLevel("DEBUG")
logger.info("Starting application")
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
''' make loading it async''' """make loading it async"""
log = logging.getLogger('uvicorn') global shared_redis_conn, requests_client
log = logging.getLogger("uvicorn")
cfg_init_result = initialize_config() cfg_init_result = initialize_config()
shared_redis_conn = setup_db_conn(os.getenv('redis_host'), os.getenv('redis_port')) shared_redis_conn = setup_db_conn(os.getenv("redis_host"), os.getenv("redis_port"))
if not shared_redis_conn: if not shared_redis_conn:
log.error("Cannot connect to Redis DB. Exiting...") log.error("Cannot connect to Redis DB. Exiting...")
exit(1) exit(1)
@@ -34,13 +37,16 @@ async def lifespan(app: FastAPI):
if not cfg_init_result: if not cfg_init_result:
log.error("Configuration initialization failed. Exiting...") log.error("Configuration initialization failed. Exiting...")
# exit(1) # exit(1)
requests_client = httpx.AsyncClient(verify=False)
yield yield {"redis_conn": shared_redis_conn, "requests_client": requests_client}
await requests_client.aclose()
log.info("Shutting down FastAPI app...") log.info("Shutting down FastAPI app...")
setup_logging() setup_logging()
log = logging.getLogger('uvicorn') log = logging.getLogger("uvicorn")
log.info("Starting FastAPI app...") log.info("Starting FastAPI app...")
app = FastAPI(lifespan=lifespan) app = FastAPI(lifespan=lifespan)
app.include_router(aggregate_router)
app.include_router(config_router)

View File

@@ -1,6 +1,7 @@
from dotenv import dotenv_values
import logging import logging
from dotenv import dotenv_values
from src.schema import ConfigSchema from src.schema import ConfigSchema
logger = logging.getLogger("uvicorn") logger = logging.getLogger("uvicorn")

View File

@@ -1,6 +1,8 @@
import logging import logging
from fastapi import Request
import httpx import httpx
def round_bytes(size_in_bytes: int) -> str: def round_bytes(size_in_bytes: int) -> str:
# Helper function to convert bytes to a human-readable format # Helper function to convert bytes to a human-readable format
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]: for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]:
@@ -10,11 +12,11 @@ def round_bytes(size_in_bytes: int) -> str:
return f"{size_in_bytes:.2f}EB" return f"{size_in_bytes:.2f}EB"
async def get_data_from_ontap(client, logger, hostname: str, username: str, password: str, endpoint: str, query_string: str = ""): async def get_data_from_ontap(request: Request, logger, hostname: str, username: str, password: str, endpoint: str, query_string: str = ""):
url = f"https://{hostname}/api/{endpoint}" url = f"https://{hostname}/api/{endpoint}"
if query_string: if query_string:
url += f"?{query_string}" url += f"?{query_string}"
async with client as _client: async with request.state.requests_client as _client:
try: try:
logger.debug(f"Fetching data from ONTAP: {url}") logger.debug(f"Fetching data from ONTAP: {url}")
response = await _client.get(url, auth=(username, password)) response = await _client.get(url, auth=(username, password))
@@ -24,10 +26,8 @@ async def get_data_from_ontap(client, logger, hostname: str, username: str, pass
logger.error(f"HTTP error occurred: {e}") logger.error(f"HTTP error occurred: {e}")
return None return None
def setup_logging() -> None: def setup_logging() -> None:
"""Configure logging for the application""" """Configure logging for the application"""
logging.basicConfig( logging.basicConfig(level=logging.DEBUG, format="[%(asctime)s] [%(levelname)5s] %(message)s")
level=logging.DEBUG,
format="[%(asctime)s] [%(levelname)5s] %(message)s"
)
print(f"Logger is initialized.") print(f"Logger is initialized.")