Compare commits

...

17 Commits

Author SHA1 Message Date
Alexey
72f738a816 backedn dev 2025-09-18 14:09:11 +02:00
root
fe13e49172 dev schema 2025-09-18 14:09:11 +02:00
root
d90a18053f restructure 2025-09-18 14:09:11 +02:00
root
22419ecf84 read and write with readis 2025-09-18 14:09:11 +02:00
root
cf09ba6431 req file extended 2025-09-18 14:08:14 +02:00
root
774fa3484c basic creds schema 2025-09-18 14:04:06 +02:00
b0d70e2120 Merge pull request 'feat/handle-get-data-from-ONTAP' (#3) from feat/handle-get-data-from-ONTAP into main
Reviewed-on: #3
2025-09-18 12:00:54 +00:00
Magel, Denis
fc71950039 refactor: added async await to ONTAP call 2025-09-18 13:39:22 +02:00
Magel, Denis
e8efde9892 feat: added functionality inside GET/aggregates 2025-09-18 12:16:30 +02:00
1592333ef8 Merge pull request 'get-aggregates' (#2) from get-aggregates into main
Reviewed-on: #2
2025-09-18 09:06:09 +00:00
Pascal Scheiben
73a42aae3b Moving example files back to its place 2025-09-18 10:39:36 +02:00
Pascal Scheiben
615d290773 Renamed folder from example -> aggregate 2025-09-18 10:23:40 +02:00
Magel, Denis
af4b60a0e3 style: 2025-09-18 10:17:24 +02:00
Magel, Denis
63bcd9b931 feat: add GET /aggregates 2025-09-18 10:10:30 +02:00
d564710004 Merge pull request 'feature/base_file' (#1) from feature/base_file into main
Reviewed-on: #1
2025-09-18 07:36:51 +00:00
Pascal Scheiben
76c5353afa Adding base logic for config handling. Adding example config to root 2025-09-18 09:33:55 +02:00
Pascal Scheiben
19e9cd6625 Adding python dotenv to requirements/pyproject 2025-09-18 09:07:09 +02:00
22 changed files with 307 additions and 16 deletions

8
.env Normal file
View File

@@ -0,0 +1,8 @@
# Environment variables for NetApp ONTAP clusters
CLUSTER1_HOSTNAME=172.16.57.2
CLUSTER1_USERNAME=admin
CLUSTER1_PASSWORD=Netapp12
CLUSTER2_HOSTNAME=172.16.56.2
CLUSTER2_USERNAME=admin
CLUSTER2_PASSWORD=Netapp12

2
.gitignore vendored
View File

@@ -129,7 +129,7 @@ celerybeat.pid
*.sage.py *.sage.py
# Environments # Environments
.env #.env
.venv .venv
env/ env/
venv/ venv/

9
Dockerfile Normal file
View File

@@ -0,0 +1,9 @@
FROM python:latest
WORKDIR /usr/local/bin
COPY requirements.txt requirements.txt
RUN pip install -r requirements.txt
COPY src/start.py .
CMD ["src/start.py"]

8
config/inventory.yml Normal file
View File

@@ -0,0 +1,8 @@
- 1:
hostname: '172.16.57.2'
username: 'admin'
password: 'Netapp12'
- 2:
hostname: '172.16.56.2'
username: 'admin'
password: 'Netapp12'

0
initialize.py Normal file
View File

View File

@@ -12,5 +12,6 @@ requires-python = ">=3.13"
dependencies = [ dependencies = [
"fastapi[standard]>=0.116.2", "fastapi[standard]>=0.116.2",
"httpx>=0.28.1", "httpx>=0.28.1",
"python-dotenv>=1.1.1",
"redis>=6.4.0", "redis>=6.4.0",
] ]

View File

@@ -1,3 +1,7 @@
fastapi[standard]>=0.116.2 fastapi[standard]>=0.116.2
httpx>=0.28.1 httpx>=0.28.1
redis>=6.4.0 redis>=6.4.0
python-dotenv>=1.1.1
pydantic
redis[hiredis]
dotenv

View File

@@ -0,0 +1,4 @@
from src.example.router import router as example_router
from .aggregate_router import router as aggregate_router
__all__ = ["example_router", "aggregate_router"]

View File

@@ -0,0 +1,16 @@
# contains the router for the aggregates endpoint
from fastapi import APIRouter, Query, Request
from typing import List
from .aggregate_schema import AggregateSchema, MetricEnum
from .aggregate_service import get_aggregates
router = APIRouter(tags=["aggregates"])
@router.get("/aggregates", response_model=List[AggregateSchema])
async def aggregates_endpoint(
request: Request,
metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"),
):
return await get_aggregates(request, metric)

View File

@@ -0,0 +1,15 @@
# contains the schema definitions for aggregates
from pydantic import BaseModel
from enum import Enum
class AggregateSchema(BaseModel):
aggregate: str
node: str
available: int
available_str: str
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"

View File

@@ -0,0 +1,38 @@
# contains the business logic for aggregates
from typing import List
from fastapi import Request
from .aggregate_schema import AggregateSchema, MetricEnum
from logging import getLogger
from ..utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn")
logger.setLevel("DEBUG")
async def get_aggregates(request: Request, metric: str = "relative") -> List[AggregateSchema]:
# Dummy data for demonstration
# You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage
logger.debug(f"Metric used: {metric}")
client = request.app.requests_client
__aggregates = await get_data_from_ontap(client, logger, "172.16.57.2", "admin", "Netapp12", "storage/aggregates", "fields=name,uuid,space,node,home_node")
logger.debug(__aggregates)
__aggregates = __aggregates.get("records")
if metric == MetricEnum.relative:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("used_percent"), reverse=True)
elif metric == MetricEnum.absolute:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("available"), reverse=False)
aggregates: list = [
AggregateSchema(
aggregate=a["name"],
node=a["node"]["name"],
available=a["space"]["block_storage"]["available"],
available_str=round_bytes(a["space"]["block_storage"]["available"]),
)
for a in __aggregates
]
return aggregates

40
src/database.py Normal file
View File

@@ -0,0 +1,40 @@
import json
import logging
from redis import Redis, ConnectionError
from typing import List
from pydantic import TypeAdapter
from schema import ConfigSchema
def setup_db_conn(redishost, redisport: str):
''' Setup Redis connection and return it open'''
log = logging.getLogger('uvicorn')
try:
redisclient = Redis(host=redishost, port=redisport, decode_responses=True)
if redisclient.ping():
log.info(f"Connected to Redis DB {redishost} on port {redisport}")
else:
log.error(f"Cannot connect to Redis DB {redishost} on port {redisport}")
exit(1)
return redisclient
except ConnectionError as e:
print(f"FATAL: Redis DB {redishost} is unreachable on port {redisport}. Err: {e}")
return None
except Exception as e:
print(f"FATAL: {e}")
return None
def get_inventory_from_redis(redisclient: Redis):
''' Read inventory from Redis '''
cluster_inv = redisclient.hgetall('cluster_inventory')
if 'inventory' in cluster_inv:
return json.loads(cluster_inv['inventory'])
return {}
def get_config_from_db(redisclient: Redis) -> ConfigSchema:
''' Load inventory to global vars'''
GLOBAL_INVENTORY = get_inventory_from_redis(redisclient)
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(GLOBAL_INVENTORY)
return GLOBAL_INVENTORY_VALID

View File

@@ -1,3 +0,0 @@
from .router import router as example_router
__all__ = ["example_router"]

View File

@@ -1,8 +1,9 @@
# contains the router for the example endpoint # contains the router for the aggregate endpoint
from fastapi import APIRouter from fastapi import APIRouter
from .schema import ExampleSchema from .schema import ExampleSchema
router = APIRouter(tags=["example"]) router = APIRouter(tags=["aggregate"])
@router.get("/example") @router.get("/example")
async def example_endpoint() -> ExampleSchema: async def example_endpoint() -> ExampleSchema:

View File

@@ -1,6 +1,15 @@
# contains the schema definitions for the example service # contains the schema definitions for the aggregate service
from pydantic import BaseModel from pydantic import BaseModel
class ExampleSchema(BaseModel): class ExampleSchema(BaseModel):
example_field: str example_field: str
another_field: int another_field: int
class ClusterCreds(BaseModel):
"""A structure to hold basic auth cluster credentials for a cluster"""
username: str
password: str
hostname: str = None
cert_filepath: Path = None
key_filepath: Path = None

View File

@@ -1,3 +1,3 @@
# contains the business logic for the example service # contains the business logic for the aggregate service
async def example_service() -> str: async def example_service() -> str:
return "This is an example service" return "This is an aggregate service"

44
src/initialize.py Normal file
View File

@@ -0,0 +1,44 @@
import os
import json
import logging
import yaml
from pathlib import Path
from dotenv import load_dotenv
from database import setup_db_conn
from schema import ConfigSchema
from typing import List
from pydantic import TypeAdapter
def initialize_config():
load_dotenv()
log = logging.getLogger('uvicorn')
ENV_INVENTORYPATH = os.getenv('cluster_inventory_path')
ENV_REDISHOST = os.getenv('redis_host')
ENV_REDISPORT = os.getenv('redis_port')
log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}")
if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file():
print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.")
return False
try:
with open(ENV_INVENTORYPATH, 'r') as f:
inv = yaml.safe_load(f)
inventory = json.dumps(inv)
except Exception as e:
print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}")
return False
print(f'[INFO] Importing configuration to DB...')
try:
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv)
redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT)
redis_conn.hset('cluster_inventory', mapping={'inventory': inventory})
redis_conn.close()
log.info("Configuration has been loaded.")
return True
except Exception as e:
print(f"FATAL: Redis DB error: {e}")
return False

View File

@@ -1,5 +1,46 @@
def main() -> None: import os
print("Hello, World!") import json
import logging
import yaml
if __name__ == "__main__": from pathlib import Path
main() from dotenv import load_dotenv
from redis import Redis
from contextlib import asynccontextmanager
from pydantic import BaseModel, ValidationError, SecretStr, AnyHttpUrl
from typing import Optional, Literal, List, Union
from fastapi import FastAPI
from database import setup_db_conn, get_inventory_from_redis, get_config_from_db
from src.initialize import initialize_config
from utils import setup_logging
@asynccontextmanager
async def lifespan(app: FastAPI):
''' make loading it async'''
log = logging.getLogger('uvicorn')
cfg_init_result = initialize_config()
shared_redis_conn = setup_db_conn(os.getenv('redis_host'), os.getenv('redis_port'))
if not shared_redis_conn:
log.error("Cannot connect to Redis DB. Exiting...")
exit(1)
inv_check = get_config_from_db(shared_redis_conn)
log.info(f"[DEBUG] Data validity healthcheck (DEVELOPER MODE): {inv_check}")
if not cfg_init_result:
log.error("Configuration initialization failed. Exiting...")
# exit(1)
yield
log.info("Shutting down FastAPI app...")
setup_logging()
log = logging.getLogger('uvicorn')
log.info("Starting FastAPI app...")
app = FastAPI(lifespan=lifespan)

7
src/schema.py Normal file
View File

@@ -0,0 +1,7 @@
from pydantic import BaseModel
class ConfigSchema(BaseModel):
hostname: str
username: str
password: str

16
src/service.py Normal file
View File

@@ -0,0 +1,16 @@
from dotenv import dotenv_values
import logging
from src.schema import ConfigSchema
logger = logging.getLogger("uvicorn")
def load_config() -> ConfigSchema:
logger.info("Loading config from .env file")
config = dotenv_values(".env")
return ConfigSchema(
hostname=config["CLUSTER1_HOSTNAME"],
username=config["CLUSTER1_USERNAME"],
password=config["CLUSTER1_PASSWORD"],
)

33
src/utils.py Normal file
View File

@@ -0,0 +1,33 @@
import logging
import httpx
def round_bytes(size_in_bytes: int) -> str:
# Helper function to convert bytes to a human-readable format
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]:
if size_in_bytes < 1024:
return f"{size_in_bytes:.2f}{unit}"
size_in_bytes /= 1024
return f"{size_in_bytes:.2f}EB"
async def get_data_from_ontap(client, logger, hostname: str, username: str, password: str, endpoint: str, query_string: str = ""):
url = f"https://{hostname}/api/{endpoint}"
if query_string:
url += f"?{query_string}"
async with client as _client:
try:
logger.debug(f"Fetching data from ONTAP: {url}")
response = await _client.get(url, auth=(username, password))
response.raise_for_status()
return response.json()
except httpx.HTTPError as e:
logger.error(f"HTTP error occurred: {e}")
return None
def setup_logging() -> None:
"""Configure logging for the application"""
logging.basicConfig(
level=logging.DEBUG,
format="[%(asctime)s] [%(levelname)5s] %(message)s"
)
print(f"Logger is initialized.")