Submodel Service Extensibility #69
Replies: 2 comments 2 replies
-
|
Extending those ideas: SDK repository
Using ChatGPT I have make a quick draft about how we could implement it from abc import ABC, abstractmethod
class BaseAdapter(ABC):
def __init__(self, **kwargs):
"""Base adapter initialized with dynamic config parameters."""
self.config = kwargs # Store all config dynamically
@abstractmethod
def upload_file(self, file_path, destination):
"""Upload a file to storage."""
pass
@abstractmethod
def download_file(self, file_name, destination):
"""Download a file from storage."""
pass
@classmethod
@abstractmethod
def get_required_config(cls):
"""Returns a list of required configuration keys."""
pass
import shutil
import os
from base_storage import BaseStorage
class LocalStorage(BaseStorage):
def __init__(self, config):
super().__init__(config)
self.storage_path = config.get("local_storaga_path", "./storage")
def upload_file(self, file_path, destination):
os.makedirs(self.storage_path, exist_ok=True)
dest_path = os.path.join(self.storage_path, destination)
shutil.copy(file_path, dest_path)
print(f"Uploaded {file_path} to local storage at {dest_path}")
def download_file(self, file_name, destination):
src_path = os.path.join(self.storage_path, file_name)
shutil.copy(src_path, destination)
print(f"Downloaded {file_name} from local storage to {destination}")
import boto3
from base_adapter import BaseAdapter
class S3Adapter(BaseAdapter):
def __init__(self, **kwargs):
"""Initialize with dynamic configuration."""
super().__init__(**kwargs)
self.s3 = boto3.client(
"s3",
aws_access_key_id=self.config.get("aws_access_key"),
aws_secret_access_key=self.config.get("aws_secret_key"),
region_name=self.config.get("aws_region"),
)
def upload_file(self, file_path, destination):
bucket = self.config.get("s3_bucket")
self.s3.upload_file(file_path, bucket, destination)
print(f"Uploaded {file_path} to S3 bucket {bucket}/{destination}")
def download_file(self, file_name, destination):
bucket = self.config.get("s3_bucket")
self.s3.download_file(bucket, file_name, destination)
print(f"Downloaded {file_name} from S3 to {destination}")
@classmethod
def get_required_config(cls):
return ["aws_access_key", "aws_secret_key", "aws_region", "s3_bucket"]
import sys
import importlib
import os
from base_adapter import BaseAdapter
class SubmodelController:
def __init__(self, adapter_name, external_path=None, **kwargs):
self.adapter = self._load_adapter(adapter_name, external_path, **kwargs)
def _load_adapter(self, adapter_name, external_path, **kwargs):
"""Dynamically import an adapter from either internal or external sources."""
if external_path:
sys.path.append(os.path.abspath(external_path)) # Add external path
module_name = adapter_name # External adapter uses direct module name
else:
module_name = f"extensions.{adapter_name}_adapter" # Internal adapter path
try:
module = importlib.import_module(module_name)
for attr in dir(module):
obj = getattr(module, attr)
if isinstance(obj, type) and issubclass(obj, BaseAdapter) and obj is not BaseAdapter:
return obj(**kwargs)
except ModuleNotFoundError:
raise ImportError(f"Adapter '{adapter_name}' not found in {'external' if external_path else 'internal'} modules")
raise ImportError(f"No valid class found in '{module_name}'")
def upload_file(self, file_path, destination):
"""Upload a file using the selected adapter."""
if self.adapter:
self.adapter.upload_file(file_path, destination)
else:
print("No valid adapter loaded.")
def download_file(self, file_name, destination):
"""Download a file using the selected adapter."""
if self.adapter:
self.adapter.download_file(file_name, destination)
else:
print("No valid adapter loaded.")Example program (similar of how would we call the controller from the ichub backend): default_adapter: local_storage
adapters:
s3:
aws_access_key: "your-access-key"
aws_secret_key: "your-secret-key"
aws_region: "us-east-1"
s3_bucket: "my-s3-bucket"
azure:
azure_connection_string: "your-azure-connection-string"
azure_container: "my-container"
local_storage:
storage_path: "./local_storage"
import os
from controller import SubmodelController
import yaml
def load_config():
with open("config.yaml", "r") as f:
return yaml.safe_load(f)
if __name__ == "__main__":
# Read storage type
config = load_config()
adapter_name = config["default_adapter"]
adapter_config = config["adapters"].get(adapter_name, {})
adapter_cls = self._load_adapter_class(adapter_name)
validate_config(adapter_cls, kwargs)
self.adapter = adapter_cls(**kwargs)
# Initialize the storage controller with the chosen adapter
storage = SubmodelController(adapter_name, **config)
# Example usage
storage.upload_file("test.txt", "uploaded_test.txt")
storage.download_file("uploaded_test.txt", "downloaded_test.txt")There are likely many ways to improve this, but I wanted to share my initial idea as a foundation we can build upon and refine further. This solution has the following handicaps:
While this approach would make it easy to add new adapters. But the adapters config is coupled to the SDK and the backend, which means every new adapter requires updating both |
Beta Was this translation helpful? Give feedback.
-
|
Methods to create and get files from the file system will be created in the dataspace sdk and imported by the ic-hub to use in the FS adapter |
Beta Was this translation helpful? Give feedback.
Uh oh!
There was an error while loading. Please reload this page.
Uh oh!
There was an error while loading. Please reload this page.
-
Some ideas to implement this.
SDK repository
tractusx-sdk/src/tractusx_sdk/extensions/submodel/adapters, with an official interface that anyone who wants to implement their own adapter must follow, ensuring standardized methods for calling.ICHUB repository
Beta Was this translation helpful? Give feedback.
All reactions