Compare commits

..

6 Commits

Author SHA1 Message Date
Alexey
4acb5ac3bb backedn dev 2025-09-18 14:00:05 +02:00
root
c7e3dbf6b4 dev schema 2025-09-18 12:11:52 +02:00
root
0b30d5ea83 restructure 2025-09-18 12:04:10 +02:00
root
8ac82369f5 read and write with readis 2025-09-18 11:45:51 +02:00
root
1eac2e75ae req file extended 2025-09-18 09:39:36 +02:00
root
847579419b basic creds schema 2025-09-18 09:04:08 +02:00
24 changed files with 63 additions and 255 deletions

3
.env
View File

@@ -1,3 +0,0 @@
cluster_inventory_path = config/inventory.yml
redis_host = '172.16.0.208'
redis_port = '6379'

2
.gitignore vendored
View File

@@ -129,7 +129,7 @@ celerybeat.pid
*.sage.py
# Environments
#.env
.env
.venv
env/
venv/

View File

@@ -1,6 +1,8 @@
- hostname: "172.16.57.2"
username: "admin"
password: "Netapp12"
- hostname: "172.16.56.2"
username: "admin"
password: "Netapp12"
- 1:
hostname: '172.16.57.2'
username: 'admin'
password: 'Netapp12'
- 2:
hostname: '172.16.56.2'
username: 'admin'
password: 'Netapp12'

View File

@@ -12,6 +12,5 @@ requires-python = ">=3.13"
dependencies = [
"fastapi[standard]>=0.116.2",
"httpx>=0.28.1",
"python-dotenv>=1.1.1",
"redis>=6.4.0",
]

View File

@@ -1,7 +1,6 @@
fastapi[standard]>=0.116.2
httpx>=0.28.1
redis>=6.4.0
python-dotenv>=1.1.1
pydantic
redis[hiredis]
dotenv

View File

@@ -1,5 +0,0 @@
from src.example.router import router as example_router
from src.aggregate.aggregate_router import router as aggregate_router
__all__ = ["example_router", "aggregate_router"]

View File

@@ -1,16 +0,0 @@
# contains the router for the aggregates endpoint
from fastapi import APIRouter, Query, Request
from typing import List
from .aggregate_schema import AggregateSchema, MetricEnum
from .aggregate_service import get_aggregates
router = APIRouter(tags=["aggregates"])
@router.get("/aggregates", response_model=List[AggregateSchema])
async def aggregates_endpoint(
request: Request,
metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"),
):
return await get_aggregates(request, metric)

View File

@@ -1,15 +0,0 @@
# contains the schema definitions for aggregates
from pydantic import BaseModel
from enum import Enum
class AggregateSchema(BaseModel):
aggregate: str
node: str
available: int
available_str: str
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"

View File

@@ -1,37 +0,0 @@
# contains the business logic for aggregates
from typing import List
from fastapi import Request
from src.aggregate.aggregate_schema import AggregateSchema, MetricEnum
from logging import getLogger
from src.utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn")
logger.setLevel("DEBUG")
async def get_aggregates(request: Request, metric: str = "relative") -> List[AggregateSchema]:
# Dummy data for demonstration
# You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage
logger.debug(f"Metric used: {metric}")
__aggregates = await get_data_from_ontap(request, logger, "172.16.57.2", "admin", "Netapp12", "storage/aggregates", "fields=name,uuid,space,node,home_node")
logger.debug(__aggregates)
__aggregates = __aggregates.get("records")
if metric == MetricEnum.relative:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("used_percent"), reverse=True)
elif metric == MetricEnum.absolute:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("available"), reverse=False)
aggregates: list = [
AggregateSchema(
aggregate=a["name"],
node=a["node"]["name"],
available=a["space"]["block_storage"]["available"],
available_str=round_bytes(a["space"]["block_storage"]["available"]),
)
for a in __aggregates
]
return aggregates

View File

@@ -1,3 +0,0 @@
from src.config_upload.router import router as config_router
__all__ = ["config_router"]

View File

@@ -1,14 +0,0 @@
POST http://127.0.0.1:8000/config
Content-Type: application/json
{
"cluster_list": [
{
"hostname": "cluster1.demo.netapp.com",
"username": "admin",
"password": "Netapp1!"
}
]
}
###

View File

@@ -1,25 +0,0 @@
import logging
from fastapi import APIRouter
from .schema import ConfigReturnSchema, ConfigSchema
from src.database import get_config_from_db
from src.main import shared_redis_conn
logger = logging.getLogger("uvicorn")
router = APIRouter(tags=["config_upload"])
@router.post(
"/config", summary="Upload a configuration", response_model=ConfigReturnSchema
)
async def create_config(config: ConfigSchema) -> ConfigSchema:
"""
Endpoint to receive and store configuration data.
⚠️ at this time the configuration is not stored anywhere. It's like logging to /dev/null
"""
logger.info("Received configuration data")
return config

View File

@@ -1,21 +0,0 @@
# contains the schema definitions for the config_upload service
from pydantic import BaseModel
class ConfigEntrySchema(BaseModel):
hostname: str
username: str
password: str
class ConfigOutSchema(BaseModel):
hostname: str
username: str
class ConfigReturnSchema(BaseModel):
cluster_list: list[ConfigOutSchema]
class ConfigSchema(BaseModel):
cluster_list: list[ConfigEntrySchema]

View File

@@ -1,2 +0,0 @@
# contains the business logic for the config_upload service
async def save_config() -> None: ...

View File

@@ -3,7 +3,7 @@ import logging
from redis import Redis, ConnectionError
from typing import List
from pydantic import TypeAdapter
from src.schema import ConfigSchema
from schema import ConfigSchema
def setup_db_conn(redishost, redisport: str):

View File

@@ -0,0 +1,3 @@
from .router import router as example_router
__all__ = ["example_router"]

View File

@@ -1,10 +1,8 @@
# contains the router for the aggregate endpoint
# contains the router for the example endpoint
from fastapi import APIRouter
from .schema import ExampleSchema
from src.example.schema import ExampleSchema
router = APIRouter(tags=["aggregate"])
router = APIRouter(tags=["example"])
@router.get("/example")
async def example_endpoint() -> ExampleSchema:

View File

@@ -1,18 +1,14 @@
# contains the schema definitions for the aggregate service
# contains the schema definitions for the example service
from pydantic import BaseModel
from pathlib import Path
class ExampleSchema(BaseModel):
example_field: str
another_field: int
class ClusterCreds(BaseModel):
"""A structure to hold basic auth cluster credentials for a cluster"""
username: str
password: str
hostname: str = None
username: str
password: str
hostname: str = None
cert_filepath: Path = None
key_filepath: Path = None
key_filepath: Path = None

View File

@@ -1,3 +1,3 @@
# contains the business logic for the aggregate service
# contains the business logic for the example service
async def example_service() -> str:
return "This is an aggregate service"
return "This is an example service"

View File

@@ -5,36 +5,35 @@ import yaml
from pathlib import Path
from dotenv import load_dotenv
from src.database import setup_db_conn
from src.schema import ConfigSchema
from database import setup_db_conn
from schema import ConfigSchema
from typing import List
from pydantic import TypeAdapter
def initialize_config():
load_dotenv()
log = logging.getLogger("uvicorn")
ENV_INVENTORYPATH = os.getenv("cluster_inventory_path")
ENV_REDISHOST = os.getenv("redis_host")
ENV_REDISPORT = os.getenv("redis_port")
log = logging.getLogger('uvicorn')
ENV_INVENTORYPATH = os.getenv('cluster_inventory_path')
ENV_REDISHOST = os.getenv('redis_host')
ENV_REDISPORT = os.getenv('redis_port')
log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}")
if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file():
print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.")
return False
try:
with open(ENV_INVENTORYPATH, "r") as f:
with open(ENV_INVENTORYPATH, 'r') as f:
inv = yaml.safe_load(f)
inventory = json.dumps(inv)
except Exception as e:
print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}")
return False
print(f"[INFO] Importing configuration to DB...")
print(f'[INFO] Importing configuration to DB...')
try:
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv)
redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT)
redis_conn.hset("cluster_inventory", mapping={"inventory": inventory})
redis_conn.hset('cluster_inventory', mapping={'inventory': inventory})
redis_conn.close()
log.info("Configuration has been loaded.")

View File

@@ -1,33 +1,30 @@
import os
import json
import logging
import httpx
from fastapi import FastAPI
shared_redis_conn = None
requests_client = None
from src.aggregate import aggregate_router
from src.config_upload import config_router
import yaml
from pathlib import Path
from dotenv import load_dotenv
from redis import Redis
from contextlib import asynccontextmanager
from .database import setup_db_conn, get_config_from_db
from src.initialize import initialize_config
from .utils import setup_logging
from pydantic import BaseModel, ValidationError, SecretStr, AnyHttpUrl
from typing import Optional, Literal, List, Union
from fastapi import FastAPI
logger = logging.getLogger("uvicorn")
logger.setLevel("DEBUG")
logger.info("Starting application")
from database import setup_db_conn, get_inventory_from_redis, get_config_from_db
from src.initialize import initialize_config
from utils import setup_logging
@asynccontextmanager
async def lifespan(app: FastAPI):
"""make loading it async"""
global shared_redis_conn, requests_client
log = logging.getLogger("uvicorn")
''' make loading it async'''
log = logging.getLogger('uvicorn')
cfg_init_result = initialize_config()
shared_redis_conn = setup_db_conn(os.getenv("redis_host"), os.getenv("redis_port"))
shared_redis_conn = setup_db_conn(os.getenv('redis_host'), os.getenv('redis_port'))
if not shared_redis_conn:
log.error("Cannot connect to Redis DB. Exiting...")
exit(1)
@@ -37,16 +34,13 @@ async def lifespan(app: FastAPI):
if not cfg_init_result:
log.error("Configuration initialization failed. Exiting...")
# exit(1)
requests_client = httpx.AsyncClient(verify=False)
yield {"redis_conn": shared_redis_conn, "requests_client": requests_client}
await requests_client.aclose()
yield
log.info("Shutting down FastAPI app...")
setup_logging()
log = logging.getLogger("uvicorn")
log = logging.getLogger('uvicorn')
log.info("Starting FastAPI app...")
app = FastAPI(lifespan=lifespan)
app.include_router(aggregate_router)
app.include_router(config_router)

View File

@@ -1,17 +0,0 @@
import logging
from dotenv import dotenv_values
from src.schema import ConfigSchema
logger = logging.getLogger("uvicorn")
def load_config() -> ConfigSchema:
logger.info("Loading config from .env file")
config = dotenv_values(".env")
return ConfigSchema(
hostname=config["CLUSTER1_HOSTNAME"],
username=config["CLUSTER1_USERNAME"],
password=config["CLUSTER1_PASSWORD"],
)

View File

@@ -1,33 +1,9 @@
import logging
from fastapi import Request
import httpx
def round_bytes(size_in_bytes: int) -> str:
# Helper function to convert bytes to a human-readable format
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]:
if size_in_bytes < 1024:
return f"{size_in_bytes:.2f}{unit}"
size_in_bytes /= 1024
return f"{size_in_bytes:.2f}EB"
async def get_data_from_ontap(request: Request, logger, hostname: str, username: str, password: str, endpoint: str, query_string: str = ""):
url = f"https://{hostname}/api/{endpoint}"
if query_string:
url += f"?{query_string}"
async with request.state.requests_client as _client:
try:
logger.debug(f"Fetching data from ONTAP: {url}")
response = await _client.get(url, auth=(username, password))
response.raise_for_status()
return response.json()
except httpx.HTTPError as e:
logger.error(f"HTTP error occurred: {e}")
return None
def setup_logging() -> None:
"""Configure logging for the application"""
logging.basicConfig(level=logging.DEBUG, format="[%(asctime)s] [%(levelname)5s] %(message)s")
logging.basicConfig(
level=logging.DEBUG,
format="[%(asctime)s] [%(levelname)5s] %(message)s"
)
print(f"Logger is initialized.")