Compare commits

..

17 Commits

Author SHA1 Message Date
Alexey
72f738a816 backedn dev 2025-09-18 14:09:11 +02:00
root
fe13e49172 dev schema 2025-09-18 14:09:11 +02:00
root
d90a18053f restructure 2025-09-18 14:09:11 +02:00
root
22419ecf84 read and write with readis 2025-09-18 14:09:11 +02:00
root
cf09ba6431 req file extended 2025-09-18 14:08:14 +02:00
root
774fa3484c basic creds schema 2025-09-18 14:04:06 +02:00
b0d70e2120 Merge pull request 'feat/handle-get-data-from-ONTAP' (#3) from feat/handle-get-data-from-ONTAP into main
Reviewed-on: #3
2025-09-18 12:00:54 +00:00
Magel, Denis
fc71950039 refactor: added async await to ONTAP call 2025-09-18 13:39:22 +02:00
Magel, Denis
e8efde9892 feat: added functionality inside GET/aggregates 2025-09-18 12:16:30 +02:00
1592333ef8 Merge pull request 'get-aggregates' (#2) from get-aggregates into main
Reviewed-on: #2
2025-09-18 09:06:09 +00:00
Pascal Scheiben
73a42aae3b Moving example files back to its place 2025-09-18 10:39:36 +02:00
Pascal Scheiben
615d290773 Renamed folder from example -> aggregate 2025-09-18 10:23:40 +02:00
Magel, Denis
af4b60a0e3 style: 2025-09-18 10:17:24 +02:00
Magel, Denis
63bcd9b931 feat: add GET /aggregates 2025-09-18 10:10:30 +02:00
d564710004 Merge pull request 'feature/base_file' (#1) from feature/base_file into main
Reviewed-on: #1
2025-09-18 07:36:51 +00:00
Pascal Scheiben
76c5353afa Adding base logic for config handling. Adding example config to root 2025-09-18 09:33:55 +02:00
Pascal Scheiben
19e9cd6625 Adding python dotenv to requirements/pyproject 2025-09-18 09:07:09 +02:00
15 changed files with 135 additions and 13 deletions

8
.env Normal file
View File

@@ -0,0 +1,8 @@
# Environment variables for NetApp ONTAP clusters
CLUSTER1_HOSTNAME=172.16.57.2
CLUSTER1_USERNAME=admin
CLUSTER1_PASSWORD=Netapp12
CLUSTER2_HOSTNAME=172.16.56.2
CLUSTER2_USERNAME=admin
CLUSTER2_PASSWORD=Netapp12

2
.gitignore vendored
View File

@@ -129,7 +129,7 @@ celerybeat.pid
*.sage.py
# Environments
.env
#.env
.venv
env/
venv/

View File

@@ -12,5 +12,6 @@ requires-python = ">=3.13"
dependencies = [
"fastapi[standard]>=0.116.2",
"httpx>=0.28.1",
"python-dotenv>=1.1.1",
"redis>=6.4.0",
]

View File

@@ -1,6 +1,7 @@
fastapi[standard]>=0.116.2
httpx>=0.28.1
redis>=6.4.0
python-dotenv>=1.1.1
pydantic
redis[hiredis]
dotenv
dotenv

View File

@@ -0,0 +1,4 @@
from src.example.router import router as example_router
from .aggregate_router import router as aggregate_router
__all__ = ["example_router", "aggregate_router"]

View File

@@ -0,0 +1,16 @@
# contains the router for the aggregates endpoint
from fastapi import APIRouter, Query, Request
from typing import List
from .aggregate_schema import AggregateSchema, MetricEnum
from .aggregate_service import get_aggregates
router = APIRouter(tags=["aggregates"])
@router.get("/aggregates", response_model=List[AggregateSchema])
async def aggregates_endpoint(
request: Request,
metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"),
):
return await get_aggregates(request, metric)

View File

@@ -0,0 +1,15 @@
# contains the schema definitions for aggregates
from pydantic import BaseModel
from enum import Enum
class AggregateSchema(BaseModel):
aggregate: str
node: str
available: int
available_str: str
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"

View File

@@ -0,0 +1,38 @@
# contains the business logic for aggregates
from typing import List
from fastapi import Request
from .aggregate_schema import AggregateSchema, MetricEnum
from logging import getLogger
from ..utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn")
logger.setLevel("DEBUG")
async def get_aggregates(request: Request, metric: str = "relative") -> List[AggregateSchema]:
# Dummy data for demonstration
# You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage
logger.debug(f"Metric used: {metric}")
client = request.app.requests_client
__aggregates = await get_data_from_ontap(client, logger, "172.16.57.2", "admin", "Netapp12", "storage/aggregates", "fields=name,uuid,space,node,home_node")
logger.debug(__aggregates)
__aggregates = __aggregates.get("records")
if metric == MetricEnum.relative:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("used_percent"), reverse=True)
elif metric == MetricEnum.absolute:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("available"), reverse=False)
aggregates: list = [
AggregateSchema(
aggregate=a["name"],
node=a["node"]["name"],
available=a["space"]["block_storage"]["available"],
available_str=round_bytes(a["space"]["block_storage"]["available"]),
)
for a in __aggregates
]
return aggregates

View File

@@ -1,3 +0,0 @@
from .router import router as example_router
__all__ = ["example_router"]

View File

@@ -1,2 +1,2 @@
# contains a constant definition
FOO: int = 42
FOO: int = 42

View File

@@ -1,9 +1,10 @@
# contains the router for the example endpoint
# contains the router for the aggregate endpoint
from fastapi import APIRouter
from .schema import ExampleSchema
router = APIRouter(tags=["example"])
router = APIRouter(tags=["aggregate"])
@router.get("/example")
async def example_endpoint() -> ExampleSchema:
return ExampleSchema(example_field="foo", another_field=42)
return ExampleSchema(example_field="foo", another_field=42)

View File

@@ -1,6 +1,7 @@
# contains the schema definitions for the example service
# contains the schema definitions for the aggregate service
from pydantic import BaseModel
class ExampleSchema(BaseModel):
example_field: str
another_field: int

View File

@@ -1,3 +1,3 @@
# contains the business logic for the example service
# contains the business logic for the aggregate service
async def example_service() -> str:
return "This is an example service"
return "This is an aggregate service"

16
src/service.py Normal file
View File

@@ -0,0 +1,16 @@
from dotenv import dotenv_values
import logging
from src.schema import ConfigSchema
logger = logging.getLogger("uvicorn")
def load_config() -> ConfigSchema:
logger.info("Loading config from .env file")
config = dotenv_values(".env")
return ConfigSchema(
hostname=config["CLUSTER1_HOSTNAME"],
username=config["CLUSTER1_USERNAME"],
password=config["CLUSTER1_PASSWORD"],
)

View File

@@ -1,4 +1,28 @@
import logging
import httpx
def round_bytes(size_in_bytes: int) -> str:
# Helper function to convert bytes to a human-readable format
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]:
if size_in_bytes < 1024:
return f"{size_in_bytes:.2f}{unit}"
size_in_bytes /= 1024
return f"{size_in_bytes:.2f}EB"
async def get_data_from_ontap(client, logger, hostname: str, username: str, password: str, endpoint: str, query_string: str = ""):
url = f"https://{hostname}/api/{endpoint}"
if query_string:
url += f"?{query_string}"
async with client as _client:
try:
logger.debug(f"Fetching data from ONTAP: {url}")
response = await _client.get(url, auth=(username, password))
response.raise_for_status()
return response.json()
except httpx.HTTPError as e:
logger.error(f"HTTP error occurred: {e}")
return None
def setup_logging() -> None:
"""Configure logging for the application"""
@@ -6,4 +30,4 @@ def setup_logging() -> None:
level=logging.DEBUG,
format="[%(asctime)s] [%(levelname)5s] %(message)s"
)
print(f"Logger is initialized.")
print(f"Logger is initialized.")