Skip to content
Merged
Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
9bcb1db
Grafana updates including: Upgraded to v12, switched internal DB to P…
DavidEdell Nov 29, 2025
e540ba4
Added to UI a prototype Help page and a Not Found page handler.
DavidEdell Nov 29, 2025
d8c5af4
Resolve default database warning in grafana (hopefully).
DavidEdell Dec 1, 2025
ca174dd
Added upgrading file
DavidEdell Dec 1, 2025
d968c1a
Add amp-manager to UI Status page
DavidEdell Dec 2, 2025
c24e9a3
added inifinity-datasource plugin and example panels
Dec 9, 2025
ddc8798
updated layout of panels
Dec 9, 2025
04d219c
Fix core '/services' REST API to return JSON instead of text
DavidEdell Dec 9, 2025
fc9caae
Core REST API bugfixes to detect timeouts and resolve potential error…
DavidEdell Dec 9, 2025
356a034
initial dev creating a internal transcoder
Dec 17, 2025
8a49c42
working version, added new profile default is internal transcoding
Dec 18, 2025
b506a84
Merge remote-tracking branch 'origin/295-new-configuration-of-transco…
DavidEdell Dec 18, 2025
14eda5f
working transcoder fixed issue with threading and DB
Dec 18, 2025
ccde72c
Merge remote-tracking branch 'origin/295-new-configuration-of-transco…
DavidEdell Dec 18, 2025
337e912
Automatically create GHCR images when a tag is created. This suppleme…
DavidEdell Dec 19, 2025
4170e90
Fix ghcr name if triggered by tag
DavidEdell Dec 19, 2025
a055b23
Fixing grafana authnz/demo websocket proxy configuration.
DavidEdell Feb 9, 2026
2154cfc
removed services related to transcoder since they arent up by default
Feb 10, 2026
928c859
added --ignore flag for podman volume creaete to avoid name error
Feb 12, 2026
73096f8
moved grafana db password to enviroment var
Feb 19, 2026
31c881c
Updated build-test workflow to use SHA instead of user-friendly branc…
DavidEdell Feb 23, 2026
a263587
updated test to include new changes
Feb 24, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ DB_HEALTHCHECK_PASSWORD=healthcheck
GRAFANA_CONTAINER_PORT=3000
GRAFANA_HOST_PORT=grafana:${GRAFANA_CONTAINER_PORT}
GRAFANA_PROXIES_PATH=localhost/grafana
GRAFANA_DB_NAME=grafana_internal_db

REDIS_PORT=6379
JS_AMP_PORT=3001
ANMS_UI_HTTP_PORT=9030
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/publish_images.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ on:
push:
branches:
- main
tags:
- 'v*' # Triggers the workflow for any tag starting with 'v', e.g., v1.0.0
release:
types: [published]

Expand Down Expand Up @@ -36,6 +38,8 @@ jobs:
run: |
if [[ "${{ github.event_name }}" == "release" ]]; then
echo "DOCKER_IMAGE_TAG=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
elif [[ "${{ github.ref_type }}" == "tag" ]]; then
echo "DOCKER_IMAGE_TAG=${{ github.ref_name }}" >> $GITHUB_ENV
else
echo "DOCKER_IMAGE_TAG=latest" >> $GITHUB_ENV
fi
Expand Down
17 changes: 17 additions & 0 deletions UPGRADING.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
This file will contain upgrading instructions for all future tagged releases.

# Upgrading v2.0.0 to main

## Grafana DB Update
A new database named `grafana_internal_db` needs to be created in postgres.

This can be done from the commandline or via the dev UI.

In the latter case, ensure your instance is started with the `dev` and `full` profiles. Go to Adminer (link provided in UI Help page) and either manually create the DB or 'Execute SQL' and upload the file `grafana/create_grafana_db.sql`

# Upgrading v1.x to v.2.0.0

It is recommended to start fresh (delete any existing ANMS-related containers and volumes) when transitioning from ANMS v1 to v2.

If you have data or customizations in a v1 installation that you need to migrate, please contact us or open an issue to discuss.

4 changes: 3 additions & 1 deletion anms-core/anms/config_template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -108,4 +108,6 @@ EMAIL_ENABLED: EMAIL_ENABLED_TEMPLATE
EMAIL_FROM_NAME: EMAIL_FROM_NAME_TEMPLATE
EMAIL_FROM_EMAIL: EMAIL_FROM_EMAIL_TEMPLATE
EMAIL_TEMPLATES_DIR: EMAIL_TEMPLATES_DIR_TEMPLATE
EMAIL_RESET_TOKEN_EXPIRE_LIFETIME: EMAIL_RESET_TOKEN_EXPIRE_LIFETIME_TEMPLATE
EMAIL_RESET_TOKEN_EXPIRE_LIFETIME: EMAIL_RESET_TOKEN_EXPIRE_LIFETIME_TEMPLATE

Transcoder: TRANSCODER_TEMPLATE
14 changes: 4 additions & 10 deletions anms-core/anms/init_adms.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from anms.routes.adms.adm import handle_adm
from anms.shared.config_utils import ConfigBuilder
from anms.shared.opensearch_logger import OpenSearchLogger
from anms.shared.transmogrifier import TRANSMORGIFIER


logger = OpenSearchLogger(__name__).logger
Expand Down Expand Up @@ -67,17 +68,10 @@ async def import_adms():
logger.error('ADM %s handling failed: %s', adm_file.norm_name, err)
logger.debug('%s', traceback.format_exc())


# Notify the aricodec of startup
config = ConfigBuilder.get_config()
host = config.get('MQTT_HOST')
port = config.get('MQTT_PORT')

logger.info('Connecting to MQTT broker %s to notify aricodec', host)
client = mqtt.client.Client()
client.connect(host, port)
msg = client.publish('aricodec/reload', b'')
msg.wait_for_publish()
client.disconnect()
TRANSMORGIFIER.reload()


logger.info('Startup finished')

Expand Down
13 changes: 5 additions & 8 deletions anms-core/anms/routes/adms/adm.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,17 @@
from pydantic import BaseModel
import io
import traceback
from typing import TextIO

# Internal modules
from sqlalchemy import delete, select, and_
from sqlalchemy.engine import Result
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import delete, and_

from anms.models.relational.adms import (adm_data, data_model_view)
from anms.models.relational.adms.data_model_view import DataModel as ADM

from anms.routes.adms.adm_compare import (AdmCompare)
from anms.shared.opensearch_logger import OpenSearchLogger
from anms.shared.mqtt_client import MQTT_CLIENT
from anms.models.relational import get_async_session, get_session

from anms.shared.transmogrifier import TRANSMORGIFIER
from anms.models.relational import get_async_session
from anms.components.schemas.adm import DataModelSchema
import ace
from camp.generators import (create_sql)
Expand Down Expand Up @@ -296,7 +293,7 @@ async def update_adm(file: UploadFile, request: Request):
if error_message:
raise Exception(error_message)
# Notify the transcoder
MQTT_CLIENT.publish('aricodec/reload', adm_file.norm_name)
TRANSMORGIFIER.reload(adm_file.norm_name)
logger.info(f"{info_message} adm file: {file.filename} successfully")
except Exception as err:
logger.error(f"{sql_dialect} execution error: {err.args}")
Expand Down
12 changes: 11 additions & 1 deletion anms-core/anms/routes/network_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,17 @@ def do_nm_put_hex_eid(eid: str, ari: str):
logger.info('post to nm manager %s with eid %s and data %s' % (url, eid, ari))

try:
request = requests.post(url=url, data=ari, headers={'Content-Type': 'text/plain'})
request = requests.post(url=url,
data=ari,
headers={'Content-Type': 'text/plain'},
timeout=(2.0, 8.0) # 2s for manager to connect, 8s for it to respond
)
except requests.exceptions.ConnectTimeout:
return status.HTTP_504_GATEWAY_TIMEOUT
except requests.exceptions.ReadTimeout:
return status.HTTP_504_GATEWAY_TIMEOUT
except requests.exceptions.Timeout:
return status.HTTP_504_GATEWAY_TIMEOUT
except Exception:
return status.HTTP_500_INTERNAL_SERVER_ERROR
return request.status_code
Expand Down
3 changes: 2 additions & 1 deletion anms-core/anms/routes/system_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,4 +138,5 @@ async def sys_status_get_version():
async def sys_status_get_services_status():
statuses = get_containers_status()
logger.debug(f"Checking all services' status: {str(statuses)}")
return json.dumps(statuses)
return statuses

132 changes: 46 additions & 86 deletions anms-core/anms/routes/transcoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
# the prime contract 80NM0018D0004 between the Caltech and NASA under
# subcontract 1658085.
#
import json
import time
import asyncio

Expand All @@ -28,15 +27,15 @@
from fastapi_pagination import Page, Params
from fastapi_pagination.ext.async_sqlalchemy import paginate

from sqlalchemy import select, or_, String, desc
from sqlalchemy import select, or_, desc
from anms.models.relational import get_session

from anms.components.schemas import TranscoderLog as TL
from anms.models.relational import get_async_session
from anms.models.relational.transcoder_log import TranscoderLog
from anms.shared.mqtt_client import MQTT_CLIENT
from anms.shared.opensearch_logger import OpenSearchLogger

from anms.shared.opensearch_logger import OpenSearchLogger
from anms.shared.transmogrifier import TRANSMORGIFIER
from anms.routes.network_manager import do_nm_put_hex_eid

router = APIRouter(tags=["Transcoder"])
Expand Down Expand Up @@ -65,64 +64,22 @@ async def paged_transcoder_log(query: str, params: Params = Depends()):

@router.get("/db/id/{id}", status_code=status.HTTP_200_OK, response_model=TL)
def transcoder_log_by_id(id: str):
return do_transcoder_log_by_id(id)
return _do_transcoder_log_by_id(id)

def do_transcoder_log_by_id(id: str):
def _do_transcoder_log_by_id(id: str):
with get_session() as session:
return TranscoderLog.query.filter_by(transcoder_log_id=id).first()

# PUT /ui/incoming/{cbor}/hex
@router.put("/ui/incoming/{input_cbor}/hex", status_code=status.HTTP_200_OK)
async def transcoder_put_input_cbor(input_cbor: str):
msg = json.dumps({'uri': input_cbor})
transcoder_log_id = None
with get_session() as session:
curr_uri = TranscoderLog.query.filter(or_(TranscoderLog.input_string==input_cbor, TranscoderLog.cbor==input_cbor)).first()
if curr_uri is None:
c1 = TranscoderLog(input_string=input_cbor, parsed_as='pending')
session.add(c1)
session.flush()
session.refresh(c1)
transcoder_log_id = c1.transcoder_log_id
session.commit()
status = "Submitted ARI to transcoder"
else:
# the input_ari has already been submitted
status = "ARI previously submitted, check log"
transcoder_log_id = curr_uri.transcoder_log_id

logger.info('PUBLISH to transcode/CoreFacing/Outgoing, msg = %s' % msg)
MQTT_CLIENT.publish("transcode/CoreFacing/Outgoing", msg)

return {"id": transcoder_log_id, "status": status}
return _transcoder_put_cbor(input_cbor)


@router.get("/ui/incoming/await/{cbor}/hex", status_code=status.HTTP_200_OK)
async def transcoder_put_cbor_await(cbor: str):
curr_uri = ""
msg = json.dumps({'uri': cbor})
transcoder_log_id = None
with get_session() as session:
curr_uri = TranscoderLog.query.filter(or_(TranscoderLog.input_string==cbor, TranscoderLog.cbor==cbor)).first()
if curr_uri is None:
c1 = TranscoderLog(input_string=cbor, parsed_as='pending')
session.add(c1)
session.flush()
session.refresh(c1)
transcoder_log_id = c1.transcoder_log_id
session.commit()
logger.info('PUBLISH to transcode/CoreFacing/Outgoing, msg = %s' % msg)
MQTT_CLIENT.publish("transcode/CoreFacing/Outgoing", msg)
else:
transcoder_log_id = curr_uri.transcoder_log_id
if curr_uri.parsed_as != "pending":
if curr_uri.parsed_as == "ERROR":
curr_uri = "ARI://BADARI"
else:
curr_uri = curr_uri.uri
return {"data": curr_uri}


curr_entry = _transcoder_put_cbor(cbor)
transcoder_log_id = curr_entry["id"]
while True:
with get_session() as session:
curr_uri = TranscoderLog.query.filter(TranscoderLog.transcoder_log_id==transcoder_log_id).first()
Expand All @@ -137,36 +94,37 @@ async def transcoder_put_cbor_await(cbor: str):

return {"data": curr_uri}

# PUT /ui/incoming/str Body is str ARI to send to transcoder
@router.get("/ui/incoming/await/str", status_code=status.HTTP_200_OK)
async def transcoder_put_await_str(input_ari: str):
input_ari = input_ari.strip()
msg = json.dumps({"uri": input_ari})
def _transcoder_put_cbor(input_cbor):
transcoder_log_id = None
curr_uri = None
send_to_transcode = False
with get_session() as session:
curr_uri = TranscoderLog.query.filter(or_(TranscoderLog.input_string==input_ari,TranscoderLog.ari==input_ari, TranscoderLog.cbor==input_ari)).first()
curr_uri = TranscoderLog.query.filter(or_(TranscoderLog.input_string==input_cbor, TranscoderLog.cbor==input_cbor)).first()

if curr_uri is None:
c1 = TranscoderLog(input_string=input_ari, parsed_as='pending')
c1 = TranscoderLog(input_string=input_cbor, parsed_as='pending')
session.add(c1)
session.flush()
session.refresh(c1)
transcoder_log_id = c1.transcoder_log_id
session.commit()
logger.info('PUBLISH to transcode/CoreFacing/Outgoing, msg = %s' % msg)
MQTT_CLIENT.publish("transcode/CoreFacing/Outgoing", msg)
send_to_transcode = True
else:
# the input_ari has already been submitted
status = "ARI previously submitted, check log"
transcoder_log_id = curr_uri.transcoder_log_id
if curr_uri.parsed_as != "pending":
if curr_uri.parsed_as == "ERROR":
curr_uri = "ARI://BADARI"
else:
curr_uri = curr_uri.uri
return {"data": curr_uri}


if(send_to_transcode):
status = "Submitted ARI to transcoder"
TRANSMORGIFIER.transcode(input_cbor)


return {"id": transcoder_log_id, "status": status}


# PUT /ui/incoming/str Body is str ARI to send to transcoder
@router.get("/ui/incoming/await/str", status_code=status.HTTP_200_OK)
async def transcoder_put_await_str(input_ari: str):
curr_entry = _transcoder_put_str(input_ari)
transcoder_log_id = curr_entry["id"]
while(True):
with get_session() as session:
curr_uri = TranscoderLog.query.filter_by(transcoder_log_id=transcoder_log_id).first()
Expand All @@ -178,19 +136,18 @@ async def transcoder_put_await_str(input_ari: str):
break
time.sleep(1)


return {"data": curr_uri}


# PUT /ui/incoming/str Body is str ARI to send to transcoder
@router.put("/ui/incoming/str", status_code=status.HTTP_200_OK)
def transcoder_incoming_str(input_ari: str):
return transcoder_put_str(input_ari)
return _transcoder_put_str(input_ari)

def transcoder_put_str(input_ari: str):
def _transcoder_put_str(input_ari: str):
input_ari = input_ari.strip()
msg = json.dumps({"uri": input_ari})
transcoder_log_id = None
send_to_transcode = False
with get_session() as session:
curr_uri = TranscoderLog.query.filter(or_(TranscoderLog.input_string==input_ari,TranscoderLog.ari==input_ari, TranscoderLog.cbor==input_ari)).first()
if curr_uri is None:
Expand All @@ -200,44 +157,47 @@ def transcoder_put_str(input_ari: str):
session.refresh(c1)
transcoder_log_id = c1.transcoder_log_id
session.commit()
status = "Submitted ARI to transcoder"
send_to_transcode = True
else:
# the input_ari has already been submitted
status = "ARI previously submitted, check log"
state = "ARI previously submitted, check log"
transcoder_log_id = curr_uri.transcoder_log_id

if(send_to_transcode):
state = "Submitted ARI to transcoder"
TRANSMORGIFIER.transcode(input_ari)

logger.info('PUBLISH to transcode/CoreFacing/Outgoing, msg = %s' % msg)
MQTT_CLIENT.publish("transcode/CoreFacing/Outgoing", msg)

return {"id": transcoder_log_id, "status": status}

return {"id": transcoder_log_id, "status": state}


# PUT /ui/incoming_send/str Body is str ARI to send to transcoder
@router.put("/ui/incoming_send/str", status_code=status.HTTP_200_OK)
async def transcoder_send_ari_str(eid: str, ari: str):
try:
# Perform translation (API wrapper)
idinfo = transcoder_put_str(ari)
idinfo = _transcoder_put_str(ari)

# Retrieve details and wait for completion
retries = 10
while True:
info = do_transcoder_log_by_id(idinfo["id"])
# Wait for request to process before checking state
await asyncio.sleep(1)

info = _do_transcoder_log_by_id(idinfo["id"])

if info.parsed_as != "pending":
break
if retries <= 0:
return { "idinfo" : idinfo, "info" : info, "status" : 504 }
await asyncio.sleep(1)
retries -= 1

if info.parsed_as == "ERROR":
return { "idinfo" : idinfo, "info" : info, "status" : 500 }

# Publish
status = do_nm_put_hex_eid( eid, info.cbor )
state = do_nm_put_hex_eid( eid, info.cbor )

return { "idinfo" : idinfo, "info" : info, "status" : status }
return { "idinfo" : idinfo, "info" : info, "status" : state }
except Exception as e:
logger.exception(e)
return status.HTTP_500_INTERNAL_SERVER_ERROR
Expand Down
4 changes: 4 additions & 0 deletions anms-core/anms/shared/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,10 @@ class BaseConfig(AbstractConfig):
UI_PORT = 9030
UI_API_BASE = "/api/"


#Transcoding
Transcoder = "Internal"

def on_finalized(self):
pass

Expand Down
Loading
Loading