Compare commits

...

15 Commits

Author SHA1 Message Date
Pascal Scheiben
9d12045b81 Arranging imports 2025-09-18 14:14:53 +02:00
Pascal Scheiben
72992d651d Fixing typos 2025-09-18 14:13:50 +02:00
Pascal Scheiben
ab52169987 Enhancing comments, adding stub for business logic 2025-09-18 14:13:50 +02:00
Pascal Scheiben
1a4e2ff688 Rewriting comments 2025-09-18 14:13:50 +02:00
Pascal Scheiben
579c62319c Adding config endpoint 2025-09-18 14:13:50 +02:00
2a165c91b6 Merge pull request 'backend' (#4) from backend into main
Reviewed-on: #4
2025-09-18 12:09:49 +00:00
Alexey
72f738a816 backedn dev 2025-09-18 14:09:11 +02:00
root
fe13e49172 dev schema 2025-09-18 14:09:11 +02:00
root
d90a18053f restructure 2025-09-18 14:09:11 +02:00
root
22419ecf84 read and write with readis 2025-09-18 14:09:11 +02:00
root
cf09ba6431 req file extended 2025-09-18 14:08:14 +02:00
root
774fa3484c basic creds schema 2025-09-18 14:04:06 +02:00
b0d70e2120 Merge pull request 'feat/handle-get-data-from-ONTAP' (#3) from feat/handle-get-data-from-ONTAP into main
Reviewed-on: #3
2025-09-18 12:00:54 +00:00
Magel, Denis
fc71950039 refactor: added async await to ONTAP call 2025-09-18 13:39:22 +02:00
Magel, Denis
e8efde9892 feat: added functionality inside GET/aggregates 2025-09-18 12:16:30 +02:00
20 changed files with 285 additions and 34 deletions

9
Dockerfile Normal file
View File

@@ -0,0 +1,9 @@
FROM python:latest
WORKDIR /usr/local/bin
COPY requirements.txt requirements.txt
RUN pip install -r requirements.txt
COPY src/start.py .
CMD ["src/start.py"]

8
config/inventory.yml Normal file
View File

@@ -0,0 +1,8 @@
- 1:
hostname: '172.16.57.2'
username: 'admin'
password: 'Netapp12'
- 2:
hostname: '172.16.56.2'
username: 'admin'
password: 'Netapp12'

0
initialize.py Normal file
View File

View File

@@ -2,3 +2,6 @@ fastapi[standard]>=0.116.2
httpx>=0.28.1 httpx>=0.28.1
redis>=6.4.0 redis>=6.4.0
python-dotenv>=1.1.1 python-dotenv>=1.1.1
pydantic
redis[hiredis]
dotenv

View File

@@ -1,4 +1,5 @@
from src.example.router import router as example_router from src.example.router import router as example_router
from .aggregate_router import router as aggregate_router from .aggregate_router import router as aggregate_router
__all__ = ["example_router", "aggregate_router"] __all__ = ["example_router", "aggregate_router"]

View File

@@ -1,21 +1,16 @@
# contains the router for the aggregates endpoint # contains the router for the aggregates endpoint
from fastapi import APIRouter, Query from fastapi import APIRouter, Query, Request
from enum import Enum
from typing import List from typing import List
from .aggregate_schema import AggregateSchema from .aggregate_schema import AggregateSchema, MetricEnum
from .aggregate_service import get_aggregates from .aggregate_service import get_aggregates
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"
router = APIRouter(tags=["aggregates"]) router = APIRouter(tags=["aggregates"])
@router.get("/aggregates", response_model=List[AggregateSchema]) @router.get("/aggregates", response_model=List[AggregateSchema])
async def aggregates_endpoint( async def aggregates_endpoint(
request: Request,
metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"), metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"),
): ):
return await get_aggregates(metric) return await get_aggregates(request, metric)

View File

@@ -1,8 +1,15 @@
# contains the schema definitions for aggregates # contains the schema definitions for aggregates
from pydantic import BaseModel from pydantic import BaseModel
from enum import Enum
class AggregateSchema(BaseModel): class AggregateSchema(BaseModel):
aggregate: str aggregate: str
node: str node: str
available: str available: int
available_str: str
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"

View File

@@ -1,24 +1,38 @@
# contains the business logic for aggregates # contains the business logic for aggregates
from typing import List from typing import List
from .aggregate_schema import AggregateSchema
from fastapi import Request
from .aggregate_schema import AggregateSchema, MetricEnum
from logging import getLogger
from ..utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn")
logger.setLevel("DEBUG")
async def get_aggregates(metric: str = "relative") -> List[AggregateSchema]: async def get_aggregates(request: Request, metric: str = "relative") -> List[AggregateSchema]:
# Dummy data for demonstration # Dummy data for demonstration
# You can use the metric parameter to filter or modify results as needed # You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage # For now, just return the same data and show metric usage
print(f"Metric used: {metric}") logger.debug(f"Metric used: {metric}")
client = request.app.requests_client
__aggregates = await get_data_from_ontap(client, logger, "172.16.57.2", "admin", "Netapp12", "storage/aggregates", "fields=name,uuid,space,node,home_node")
logger.debug(__aggregates)
__aggregates = __aggregates.get("records")
if metric == MetricEnum.relative:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("used_percent"), reverse=True)
elif metric == MetricEnum.absolute:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("available"), reverse=False)
aggregates: list = [ aggregates: list = [
AggregateSchema( AggregateSchema(
aggregate="Aggregate A", node="cluster01-01", available="100.0TB" aggregate=a["name"],
), node=a["node"]["name"],
AggregateSchema( available=a["space"]["block_storage"]["available"],
aggregate="Aggregate B", node="cluster01-01", available="200.5GB" available_str=round_bytes(a["space"]["block_storage"]["available"]),
), )
AggregateSchema( for a in __aggregates
aggregate="Aggregate C", node="cluster01-02", available="300.75MB"
),
] ]
return aggregates return aggregates

3
src/config/__init__.py Normal file
View File

@@ -0,0 +1,3 @@
from src.config.router import router as config_router
__all__ = ["config_router"]

14
src/config/config.http Normal file
View File

@@ -0,0 +1,14 @@
POST http://127.0.0.1:8000/config
Content-Type: application/json
{
"cluster_list": [
{
"hostname": "cluster1.demo.netapp.com",
"username": "admin",
"password": "Netapp1!"
}
]
}
###

22
src/config/router.py Normal file
View File

@@ -0,0 +1,22 @@
import logging
from fastapi import APIRouter
from .schema import ConfigReturnSchema, ConfigSchema
logger = logging.getLogger("uvicorn")
router = APIRouter(tags=["config"])
@router.post(
"/config", summary="Upload a configuration", response_model=ConfigReturnSchema
)
async def create_config(config: ConfigSchema) -> ConfigSchema:
"""
Endpoint to receive and store configuration data.
⚠️ at this time the configuration is not stored anywhere. It's like logging to /dev/null
"""
logger.info("Received configuration data")
return config

21
src/config/schema.py Normal file
View File

@@ -0,0 +1,21 @@
# contains the schema definitions for the config service
from pydantic import BaseModel
class ConfigEntrySchema(BaseModel):
hostname: str
username: str
password: str
class ConfigOutSchema(BaseModel):
hostname: str
username: str
class ConfigReturnSchema(BaseModel):
cluster_list: list[ConfigOutSchema]
class ConfigSchema(BaseModel):
cluster_list: list[ConfigEntrySchema]

2
src/config/service.py Normal file
View File

@@ -0,0 +1,2 @@
# contains the business logic for the config service
async def save_config() -> None: ...

40
src/database.py Normal file
View File

@@ -0,0 +1,40 @@
import json
import logging
from redis import Redis, ConnectionError
from typing import List
from pydantic import TypeAdapter
from schema import ConfigSchema
def setup_db_conn(redishost, redisport: str):
''' Setup Redis connection and return it open'''
log = logging.getLogger('uvicorn')
try:
redisclient = Redis(host=redishost, port=redisport, decode_responses=True)
if redisclient.ping():
log.info(f"Connected to Redis DB {redishost} on port {redisport}")
else:
log.error(f"Cannot connect to Redis DB {redishost} on port {redisport}")
exit(1)
return redisclient
except ConnectionError as e:
print(f"FATAL: Redis DB {redishost} is unreachable on port {redisport}. Err: {e}")
return None
except Exception as e:
print(f"FATAL: {e}")
return None
def get_inventory_from_redis(redisclient: Redis):
''' Read inventory from Redis '''
cluster_inv = redisclient.hgetall('cluster_inventory')
if 'inventory' in cluster_inv:
return json.loads(cluster_inv['inventory'])
return {}
def get_config_from_db(redisclient: Redis) -> ConfigSchema:
''' Load inventory to global vars'''
GLOBAL_INVENTORY = get_inventory_from_redis(redisclient)
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(GLOBAL_INVENTORY)
return GLOBAL_INVENTORY_VALID

View File

@@ -1,5 +1,6 @@
# contains the router for the aggregate endpoint # contains the router for the aggregate endpoint
from fastapi import APIRouter from fastapi import APIRouter
from .schema import ExampleSchema from .schema import ExampleSchema
router = APIRouter(tags=["aggregate"]) router = APIRouter(tags=["aggregate"])

View File

@@ -5,3 +5,11 @@ from pydantic import BaseModel
class ExampleSchema(BaseModel): class ExampleSchema(BaseModel):
example_field: str example_field: str
another_field: int another_field: int
class ClusterCreds(BaseModel):
"""A structure to hold basic auth cluster credentials for a cluster"""
username: str
password: str
hostname: str = None
cert_filepath: Path = None
key_filepath: Path = None

44
src/initialize.py Normal file
View File

@@ -0,0 +1,44 @@
import os
import json
import logging
import yaml
from pathlib import Path
from dotenv import load_dotenv
from database import setup_db_conn
from schema import ConfigSchema
from typing import List
from pydantic import TypeAdapter
def initialize_config():
load_dotenv()
log = logging.getLogger('uvicorn')
ENV_INVENTORYPATH = os.getenv('cluster_inventory_path')
ENV_REDISHOST = os.getenv('redis_host')
ENV_REDISPORT = os.getenv('redis_port')
log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}")
if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file():
print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.")
return False
try:
with open(ENV_INVENTORYPATH, 'r') as f:
inv = yaml.safe_load(f)
inventory = json.dumps(inv)
except Exception as e:
print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}")
return False
print(f'[INFO] Importing configuration to DB...')
try:
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv)
redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT)
redis_conn.hset('cluster_inventory', mapping={'inventory': inventory})
redis_conn.close()
log.info("Configuration has been loaded.")
return True
except Exception as e:
print(f"FATAL: Redis DB error: {e}")
return False

View File

@@ -1,23 +1,48 @@
from src.service import load_config import os
from fastapi import FastAPI
import logging import logging
from fastapi import FastAPI
from src.aggregate import aggregate_router from src.aggregate import aggregate_router
from src.config import config_router
from contextlib import asynccontextmanager
from database import setup_db_conn, get_config_from_db
from src.initialize import initialize_config
from utils import setup_logging
logger = logging.getLogger("uvicorn") logger = logging.getLogger("uvicorn")
logger.info("Starting application") logger.info("Starting application")
config = load_config()
app = FastAPI() app = FastAPI()
app.include_router(aggregate_router) app.include_router(aggregate_router)
app.include_router(config_router)
@app.get("/") @asynccontextmanager
async def main(): async def lifespan(app: FastAPI):
return {"Hello": "World"} """make loading it async"""
log = logging.getLogger("uvicorn")
cfg_init_result = initialize_config()
shared_redis_conn = setup_db_conn(os.getenv("redis_host"), os.getenv("redis_port"))
if not shared_redis_conn:
log.error("Cannot connect to Redis DB. Exiting...")
exit(1)
inv_check = get_config_from_db(shared_redis_conn)
log.info(f"[DEBUG] Data validity healthcheck (DEVELOPER MODE): {inv_check}")
if not cfg_init_result:
log.error("Configuration initialization failed. Exiting...")
# exit(1)
yield
log.info("Shutting down FastAPI app...")
@app.get("/config") setup_logging()
async def get_config(): log = logging.getLogger("uvicorn")
"""Endpoint to get the current configuration."""
return config.model_dump() log.info("Starting FastAPI app...")
app = FastAPI(lifespan=lifespan)

View File

@@ -1,6 +1,7 @@
from dotenv import dotenv_values
import logging import logging
from dotenv import dotenv_values
from src.schema import ConfigSchema from src.schema import ConfigSchema
logger = logging.getLogger("uvicorn") logger = logging.getLogger("uvicorn")

33
src/utils.py Normal file
View File

@@ -0,0 +1,33 @@
import logging
import httpx
def round_bytes(size_in_bytes: int) -> str:
# Helper function to convert bytes to a human-readable format
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]:
if size_in_bytes < 1024:
return f"{size_in_bytes:.2f}{unit}"
size_in_bytes /= 1024
return f"{size_in_bytes:.2f}EB"
async def get_data_from_ontap(client, logger, hostname: str, username: str, password: str, endpoint: str, query_string: str = ""):
url = f"https://{hostname}/api/{endpoint}"
if query_string:
url += f"?{query_string}"
async with client as _client:
try:
logger.debug(f"Fetching data from ONTAP: {url}")
response = await _client.get(url, auth=(username, password))
response.raise_for_status()
return response.json()
except httpx.HTTPError as e:
logger.error(f"HTTP error occurred: {e}")
return None
def setup_logging() -> None:
"""Configure logging for the application"""
logging.basicConfig(
level=logging.DEBUG,
format="[%(asctime)s] [%(levelname)5s] %(message)s"
)
print(f"Logger is initialized.")