Compare commits

36 Commits

Author SHA1 Message Date
Magel, Denis
362c470b3c fix: API does not suspend after 1st request 2025-09-18 18:36:51 +02:00
Alexey
b8885d7d73 tags dev 2025-09-18 18:01:06 +02:00
Magel, Denis
d2db261152 docs: add drawio 2025-09-18 17:05:10 +02:00
Magel, Denis
e73a18e981 docs: update readme 2025-09-18 16:39:54 +02:00
Magel, Denis
7d4b42df11 fix: now really use multiple clusters in the loop 2025-09-18 15:57:35 +02:00
Magel, Denis
e8aa7d7df5 feat: get cluster information from redis
feat: loop over all clusters
2025-09-18 15:36:39 +02:00
Magel, Denis
b60383071a make shared connections available via request.shared.XXX 2025-09-18 15:28:50 +02:00
58f7c5c393 Merge pull request 'Refactoring config to config_upload, making important vars global' (#6) from feature/config_features into main
Reviewed-on: #6
2025-09-18 13:01:16 +00:00
Pascal Scheiben
5dfba7416b Refactoring config to config_upload, making important vars global 2025-09-18 15:00:03 +02:00
Magel, Denis
fc3f39c6ae fix: duplicate API startup 2025-09-18 14:48:46 +02:00
1ee40b6647 Merge pull request 'feature/config_upload' (#5) from feature/config_upload into main
Reviewed-on: #5
2025-09-18 12:34:38 +00:00
Magel, Denis
60008fa947 fix: adjusted paths to run from root dir
updated inventory.yaml
2025-09-18 14:33:30 +02:00
Magel, Denis
767f43551e rebased 2025-09-18 14:23:19 +02:00
Pascal Scheiben
9d12045b81 Arranging imports 2025-09-18 14:14:53 +02:00
Pascal Scheiben
72992d651d Fixing typos 2025-09-18 14:13:50 +02:00
Pascal Scheiben
ab52169987 Enhancing comments, adding stub for business logic 2025-09-18 14:13:50 +02:00
Pascal Scheiben
1a4e2ff688 Rewriting comments 2025-09-18 14:13:50 +02:00
Pascal Scheiben
579c62319c Adding config endpoint 2025-09-18 14:13:50 +02:00
2a165c91b6 Merge pull request 'backend' (#4) from backend into main
Reviewed-on: #4
2025-09-18 12:09:49 +00:00
Alexey
72f738a816 backedn dev 2025-09-18 14:09:11 +02:00
root
fe13e49172 dev schema 2025-09-18 14:09:11 +02:00
root
d90a18053f restructure 2025-09-18 14:09:11 +02:00
root
22419ecf84 read and write with readis 2025-09-18 14:09:11 +02:00
root
cf09ba6431 req file extended 2025-09-18 14:08:14 +02:00
root
774fa3484c basic creds schema 2025-09-18 14:04:06 +02:00
b0d70e2120 Merge pull request 'feat/handle-get-data-from-ONTAP' (#3) from feat/handle-get-data-from-ONTAP into main
Reviewed-on: #3
2025-09-18 12:00:54 +00:00
Magel, Denis
fc71950039 refactor: added async await to ONTAP call 2025-09-18 13:39:22 +02:00
Magel, Denis
e8efde9892 feat: added functionality inside GET/aggregates 2025-09-18 12:16:30 +02:00
1592333ef8 Merge pull request 'get-aggregates' (#2) from get-aggregates into main
Reviewed-on: #2
2025-09-18 09:06:09 +00:00
Pascal Scheiben
73a42aae3b Moving example files back to its place 2025-09-18 10:39:36 +02:00
Pascal Scheiben
615d290773 Renamed folder from example -> aggregate 2025-09-18 10:23:40 +02:00
Magel, Denis
af4b60a0e3 style: 2025-09-18 10:17:24 +02:00
Magel, Denis
63bcd9b931 feat: add GET /aggregates 2025-09-18 10:10:30 +02:00
d564710004 Merge pull request 'feature/base_file' (#1) from feature/base_file into main
Reviewed-on: #1
2025-09-18 07:36:51 +00:00
Pascal Scheiben
76c5353afa Adding base logic for config handling. Adding example config to root 2025-09-18 09:33:55 +02:00
Pascal Scheiben
19e9cd6625 Adding python dotenv to requirements/pyproject 2025-09-18 09:07:09 +02:00
27 changed files with 319 additions and 66 deletions

3
.env Normal file
View File

@@ -0,0 +1,3 @@
cluster_inventory_path = config/inventory.yml
redis_host = '172.16.0.208'
redis_port = '6379'

2
.gitignore vendored
View File

@@ -129,7 +129,7 @@ celerybeat.pid
*.sage.py *.sage.py
# Environments # Environments
.env #.env
.venv .venv
env/ env/
venv/ venv/

View File

@@ -1,3 +1,19 @@
# generic_api_endpoint # generic_api_endpoint
Hackathon API endpoint Hackathon API endpoint
## management summary // usecase
This API acts as a middelware for service portals and frontends (like SNOW), that can retrieve data via REST API. It manages metadata.
## ideas for future
- store the data in redis on initialization or on first request
- also first query redis, and not directly ONTAP
- documentation -> make it understandable, so that users will use it!
- add capability to apply filters/conditions on the return
- Alexeys
-
- performance based filtering
- add capability for finding best clusters, volumes
- get credentials from credential-mgmt-system
-

BIN
concept.drawio.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 71 KiB

View File

@@ -1,8 +1,6 @@
- 1: - hostname: "172.16.57.2"
hostname: '172.16.57.2' username: "admin"
username: 'admin' password: "Netapp12"
password: 'Netapp12' - hostname: "172.16.56.2"
- 2: username: "admin"
hostname: '172.16.56.2' password: "Netapp12"
username: 'admin'
password: 'Netapp12'

View File

@@ -12,5 +12,6 @@ requires-python = ">=3.13"
dependencies = [ dependencies = [
"fastapi[standard]>=0.116.2", "fastapi[standard]>=0.116.2",
"httpx>=0.28.1", "httpx>=0.28.1",
"python-dotenv>=1.1.1",
"redis>=6.4.0", "redis>=6.4.0",
] ]

View File

@@ -1,6 +1,7 @@
fastapi[standard]>=0.116.2 fastapi[standard]>=0.116.2
httpx>=0.28.1 httpx>=0.28.1
redis>=6.4.0 redis>=6.4.0
python-dotenv>=1.1.1
pydantic pydantic
redis[hiredis] redis[hiredis]
dotenv dotenv

3
src/.env Normal file
View File

@@ -0,0 +1,3 @@
cluster_inventory_path = ./config/inventory.yml
redis_host = '172.16.0.208'
redis_port = '6379'

View File

@@ -0,0 +1,5 @@
from src.example.router import router as example_router
from src.aggregate.aggregate_router import router as aggregate_router
__all__ = ["example_router", "aggregate_router"]

View File

@@ -0,0 +1,23 @@
# contains the router for the aggregates endpoint
from fastapi import APIRouter, Query, Request
from typing import List, Dict
from .aggregate_schema import AggregateSchema, MetricEnum
from .aggregate_service import get_aggregates
router = APIRouter(tags=["aggregates"])
@router.get("/aggregates", response_model=List[AggregateSchema])
async def aggregates_endpoint(
request: Request,
metric: MetricEnum = Query(MetricEnum.relative, description="Metric type"),
):
# Extract tag parameters from query string
tags: Dict[str, str] = {}
for param_name, param_value in request.query_params.items():
if param_name.startswith("tag."):
tag_key = param_name[4:]
tags[tag_key] = param_value
return await get_aggregates(request, metric, tags)

View File

@@ -0,0 +1,23 @@
# contains the schema definitions for aggregates
from pydantic import BaseModel
from enum import Enum
class AggregateSchema(BaseModel):
aggregate: str
node: str
available: int
available_str: str
class MetricEnum(str, Enum):
relative = "relative"
absolute = "absolute"
TAG2REST = {
'worm_compliance': { 'snaplock_type': 'compliance' },
'worm_enterprise': { 'snaplock_type': 'enterprise' },
'flash': { 'block_storage.storage_type': 'ssd' },
'hdd': { 'block_storage.storage_type': 'hdd' },
'mcc': { 'block_storage.mirror.enabled': 'true' }
}

View File

@@ -0,0 +1,58 @@
# contains the business logic for aggregates
from typing import List, Dict
from pprint import pprint
from fastapi import Request
from src.aggregate.aggregate_schema import AggregateSchema, MetricEnum
from logging import getLogger
from src.utils import round_bytes, get_data_from_ontap
logger = getLogger("uvicorn")
logger.setLevel("DEBUG")
# TAG2REST = {
# 'worm_compliance': { 'snaplock_type': 'compliance' },
# 'worm_enterprise': { 'snaplock_type': 'enterprise' },
# 'flash': { 'block_storage.storage_type': 'ssd' },
# 'hdd': { 'block_storage.storage_type': 'hdd' },
# 'mcc': { 'block_storage.mirror.enabled': 'true' }
# }
# {
# "flash": "production",
# "performance": "gold",
# "worm": "compliance"
# }
async def get_aggregates(request: Request, metric: str = "relative", tags: Dict[str, str] = None) -> List[AggregateSchema]:
# Dummy data for demonstration
# You can use the metric parameter to filter or modify results as needed
# For now, just return the same data and show metric usage
logger.debug(f"Metric used: {metric}")
logger.debug(f"Tags used: {tags}")
# convert tags to ONTAP filter
# filter_str = ""
# if tags:
# str_filter_parts = [f"tag.{key} eq '{value}'" for key, value in tags.items()]
# param_str = "&".join([f"{TAG2REST[key]}" for key, value in tags.items()])
__aggregates = await get_data_from_ontap(request, logger, "storage/aggregates", "fields=*")
pprint(__aggregates)
if metric == MetricEnum.relative:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("used_percent"), reverse=True)
elif metric == MetricEnum.absolute:
__aggregates = sorted(__aggregates, key=lambda r: r["space"]["block_storage"].get("available"), reverse=False)
aggregates: list = [
AggregateSchema(
aggregate=a["name"],
node=a["node"]["name"],
available=a["space"]["block_storage"]["available"],
available_str=round_bytes(a["space"]["block_storage"]["available"]),
)
for a in __aggregates
]
return aggregates

View File

@@ -0,0 +1,3 @@
from src.config_upload.router import router as config_router
__all__ = ["config_router"]

View File

@@ -0,0 +1,14 @@
POST http://127.0.0.1:8000/config
Content-Type: application/json
{
"cluster_list": [
{
"hostname": "cluster1.demo.netapp.com",
"username": "admin",
"password": "Netapp1!"
}
]
}
###

View File

@@ -0,0 +1,23 @@
import logging
from fastapi import APIRouter
from .schema import ConfigReturnSchema, ConfigSchema
logger = logging.getLogger("uvicorn")
router = APIRouter(tags=["config_upload"])
@router.post(
"/config", summary="Upload a configuration", response_model=ConfigReturnSchema
)
async def create_config(config: ConfigSchema) -> ConfigSchema:
"""
Endpoint to receive and store configuration data.
⚠️ at this time the configuration is not stored anywhere. It's like logging to /dev/null
"""
logger.info("Received configuration data")
return config

View File

@@ -0,0 +1,21 @@
# contains the schema definitions for the config_upload service
from pydantic import BaseModel
class ConfigEntrySchema(BaseModel):
hostname: str
username: str
password: str
class ConfigOutSchema(BaseModel):
hostname: str
username: str
class ConfigReturnSchema(BaseModel):
cluster_list: list[ConfigOutSchema]
class ConfigSchema(BaseModel):
cluster_list: list[ConfigEntrySchema]

View File

@@ -0,0 +1,2 @@
# contains the business logic for the config_upload service
async def save_config() -> None: ...

View File

@@ -3,7 +3,7 @@ import logging
from redis import Redis, ConnectionError from redis import Redis, ConnectionError
from typing import List from typing import List
from pydantic import TypeAdapter from pydantic import TypeAdapter
from schema import ConfigSchema from src.schema import ConfigSchema
def setup_db_conn(redishost, redisport: str): def setup_db_conn(redishost, redisport: str):

View File

@@ -1,3 +0,0 @@
from .router import router as example_router
__all__ = ["example_router"]

View File

@@ -1,2 +1,2 @@
# contains a constant definition # contains a constant definition
FOO: int = 42 FOO: int = 42

View File

@@ -1,9 +1,11 @@
# contains the router for the example endpoint # contains the router for the aggregate endpoint
from fastapi import APIRouter from fastapi import APIRouter
from .schema import ExampleSchema
router = APIRouter(tags=["example"]) from src.example.schema import ExampleSchema
router = APIRouter(tags=["aggregate"])
@router.get("/example") @router.get("/example")
async def example_endpoint() -> ExampleSchema: async def example_endpoint() -> ExampleSchema:
return ExampleSchema(example_field="foo", another_field=42) return ExampleSchema(example_field="foo", another_field=42)

View File

@@ -1,14 +1,18 @@
# contains the schema definitions for the example service # contains the schema definitions for the aggregate service
from pydantic import BaseModel from pydantic import BaseModel
from pathlib import Path
class ExampleSchema(BaseModel): class ExampleSchema(BaseModel):
example_field: str example_field: str
another_field: int another_field: int
class ClusterCreds(BaseModel): class ClusterCreds(BaseModel):
"""A structure to hold basic auth cluster credentials for a cluster""" """A structure to hold basic auth cluster credentials for a cluster"""
username: str
password: str username: str
hostname: str = None password: str
hostname: str = None
cert_filepath: Path = None cert_filepath: Path = None
key_filepath: Path = None key_filepath: Path = None

View File

@@ -1,3 +1,3 @@
# contains the business logic for the example service # contains the business logic for the aggregate service
async def example_service() -> str: async def example_service() -> str:
return "This is an example service" return "This is an aggregate service"

View File

@@ -5,40 +5,41 @@ import yaml
from pathlib import Path from pathlib import Path
from dotenv import load_dotenv from dotenv import load_dotenv
from database import setup_db_conn from src.database import setup_db_conn
from schema import ConfigSchema from src.schema import ConfigSchema
from typing import List from typing import List
from pydantic import TypeAdapter from pydantic import TypeAdapter
def initialize_config(): def initialize_config():
load_dotenv() load_dotenv()
log = logging.getLogger('uvicorn') log = logging.getLogger("uvicorn")
ENV_INVENTORYPATH = os.getenv('cluster_inventory_path') ENV_INVENTORYPATH = os.getenv("cluster_inventory_path")
ENV_REDISHOST = os.getenv('redis_host') ENV_REDISHOST = os.getenv("redis_host")
ENV_REDISPORT = os.getenv('redis_port') ENV_REDISPORT = os.getenv("redis_port")
log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}") log.info(f"Found Cluster Inventory file at: {ENV_INVENTORYPATH}")
if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file(): if not ENV_INVENTORYPATH or not Path(ENV_INVENTORYPATH).is_file():
print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.") print(f"FATAL: Inventory file {ENV_INVENTORYPATH} is missing or not a file.")
return False return False
try: try:
with open(ENV_INVENTORYPATH, 'r') as f: with open(ENV_INVENTORYPATH, "r") as f:
inv = yaml.safe_load(f) inv = yaml.safe_load(f)
inventory = json.dumps(inv) inventory = json.dumps(inv)
except Exception as e: except Exception as e:
print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}") print(f"FATAL: Cannot read inventory file {ENV_INVENTORYPATH}. Err: {e}")
return False return False
print(f'[INFO] Importing configuration to DB...') log.info(f"Importing configuration to DB...")
try: try:
GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv) GLOBAL_INVENTORY_VALID = TypeAdapter(List[ConfigSchema]).validate_python(inv)
redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT) redis_conn = setup_db_conn(ENV_REDISHOST, ENV_REDISPORT)
redis_conn.hset('cluster_inventory', mapping={'inventory': inventory}) redis_conn.hset("cluster_inventory", mapping={"inventory": inventory})
redis_conn.close() redis_conn.close()
log.info("Configuration has been loaded.") log.info("Configuration has been loaded.")
return True return True
except Exception as e: except Exception as e:
print(f"FATAL: Redis DB error: {e}") print(f"FATAL: Redis DB error: {e}")
return False return False

View File

@@ -1,30 +1,30 @@
import os import os
import json
import logging import logging
import yaml import httpx
from pathlib import Path
from dotenv import load_dotenv
from redis import Redis
from contextlib import asynccontextmanager
from pydantic import BaseModel, ValidationError, SecretStr, AnyHttpUrl
from typing import Optional, Literal, List, Union
from fastapi import FastAPI from fastapi import FastAPI
from src.aggregate import aggregate_router
from src.config_upload import config_router
from contextlib import asynccontextmanager
from .database import setup_db_conn, get_config_from_db
from src.initialize import initialize_config
from .utils import setup_logging
logger = logging.getLogger("uvicorn")
logger.setLevel("DEBUG")
logger.info("Starting application")
from database import setup_db_conn, get_inventory_from_redis, get_config_from_db
from src.initialize import initialize_config
from utils import setup_logging
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
''' make loading it async''' """make loading it async"""
log = logging.getLogger('uvicorn') global shared_redis_conn, requests_client
log = logging.getLogger("uvicorn")
cfg_init_result = initialize_config() cfg_init_result = initialize_config()
shared_redis_conn = setup_db_conn(os.getenv('redis_host'), os.getenv('redis_port')) shared_redis_conn = setup_db_conn(os.getenv("redis_host"), os.getenv("redis_port"))
if not shared_redis_conn: if not shared_redis_conn:
log.error("Cannot connect to Redis DB. Exiting...") log.error("Cannot connect to Redis DB. Exiting...")
exit(1) exit(1)
@@ -34,13 +34,16 @@ async def lifespan(app: FastAPI):
if not cfg_init_result: if not cfg_init_result:
log.error("Configuration initialization failed. Exiting...") log.error("Configuration initialization failed. Exiting...")
# exit(1) # exit(1)
requests_client = httpx.AsyncClient(verify=False)
yield yield {"redis_conn": shared_redis_conn, "requests_client": requests_client}
await requests_client.aclose()
log.info("Shutting down FastAPI app...") log.info("Shutting down FastAPI app...")
setup_logging() setup_logging()
log = logging.getLogger('uvicorn') log = logging.getLogger("uvicorn")
log.info("Starting FastAPI app...") log.info("Starting FastAPI app...")
app = FastAPI(lifespan=lifespan) app = FastAPI(lifespan=lifespan)
app.include_router(aggregate_router)
app.include_router(config_router)

17
src/service.py Normal file
View File

@@ -0,0 +1,17 @@
import logging
from dotenv import dotenv_values
from src.schema import ConfigSchema
logger = logging.getLogger("uvicorn")
def load_config() -> ConfigSchema:
logger.info("Loading config from .env file")
config = dotenv_values(".env")
return ConfigSchema(
hostname=config["CLUSTER1_HOSTNAME"],
username=config["CLUSTER1_USERNAME"],
password=config["CLUSTER1_PASSWORD"],
)

View File

@@ -1,9 +1,44 @@
import logging import logging
from fastapi import Request
import httpx
from src.database import get_config_from_db
def round_bytes(size_in_bytes: int) -> str:
# Helper function to convert bytes to a human-readable format
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]:
if size_in_bytes < 1024:
return f"{size_in_bytes:.2f}{unit}"
size_in_bytes /= 1024
return f"{size_in_bytes:.2f}EB"
async def get_data_from_ontap(request: Request, logger, endpoint: str, query_string: str = ""):
# get clusters from redis
redis_conn = request.state.redis_conn
config = get_config_from_db(redis_conn)
logger.debug("Got the config from REDIS: %s", config)
results = []
client = request.state.requests_client
for cluster in config:
print(f"\n\n looping, {cluster}")
url = f"https://{cluster.hostname}/api/{endpoint}"
if query_string:
url += f"?{query_string}"
try:
logger.debug(f"Fetching data from ONTAP: {url}")
response = await client.get(url, auth=(cluster.username, cluster.password))
response.raise_for_status()
results.extend(response.json()["records"])
except httpx.HTTPError as e:
logger.error(f"HTTP error occurred: {e}")
return None
return results
def setup_logging() -> None: def setup_logging() -> None:
"""Configure logging for the application""" """Configure logging for the application"""
logging.basicConfig( logging.basicConfig(level=logging.DEBUG, format="[%(asctime)s] [%(levelname)5s] %(message)s")
level=logging.DEBUG, print("Logger is initialized.")
format="[%(asctime)s] [%(levelname)5s] %(message)s"
)
print(f"Logger is initialized.")