Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,24 @@ jobs:
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}

test_schemas:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v6
- name: Install python
uses: actions/setup-python@v6
with:
python-version: '3.12'

- name: Install extractor uploader
env:
PYPI_ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_READONLY_TOKEN_USER_PUBLIC_REPOS }}
PYPI_ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_READONLY_TOKEN_PUBLIC_REPOS }}
run: |
python -m pip install --upgrade pip
pip install cognite-extractor-publisher --extra-index-url "https://${PYPI_ARTIFACTORY_USERNAME}:${PYPI_ARTIFACTORY_PASSWORD}@cognite.jfrog.io/cognite/api/pypi/snakepit/simple"

- name: Test schemas
run: ./ExtractorUtils.Test/test_schemas.sh
17 changes: 17 additions & 0 deletions ExtractorUtils.Test/test_schemas.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/bin/bash

set -e -x

target_schemas=(
"schema/base_config.schema.json"
"schema/unstable/connection_config.schema.json"
"schema/unstable/base_cognite_config.schema.json"
)

for schema in "${target_schemas[@]}"; do
echo "Processing $schema"
publish-extractor schema --schema "$schema" --output bundled.schema.json
echo "Generating docs for $schema"
publish-extractor docs --schema bundled.schema.json
rm bundled.schema.json
done
6 changes: 3 additions & 3 deletions ExtractorUtils/Unstable/Configuration/Connection.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@ public class ConnectionConfig : VersionedConfig
/// <summary>
/// Configuration for the connection to CDF.
/// </summary>
public CdfConnectionConfig CdfConnection { get => _cdfConnection; set { _cdfConnection = value ?? _cdfConnection; } }
public CdfConnectionConfig Connection { get => _connection; set { _connection = value ?? _connection; } }

private CdfConnectionConfig _cdfConnection = new CdfConnectionConfig();
private CdfConnectionConfig _connection = new CdfConnectionConfig();

/// <summary>
/// Register any necessary yaml converters.
Expand Down Expand Up @@ -157,4 +157,4 @@ public class SdkLoggingConfig

}

}
}
8 changes: 4 additions & 4 deletions ExtractorUtils/Unstable/DestinationUtilsUnstable.cs
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public static IHttpClientBuilder ConfigureCogniteHttpClientHandlers(this IHttpCl
{
try
{
var retryConfig = provider.GetService<ConnectionConfig>()?.CdfConnection.Retries ?? new Configuration.RetryConfig();
var retryConfig = provider.GetService<ConnectionConfig>()?.Connection.Retries ?? new Configuration.RetryConfig();
return CogniteExtensions.GetRetryPolicy(provider.GetService<ILogger<Client>>(),
retryConfig.MaxRetries, retryConfig.MaxBackoffValue.Value);
}
Expand All @@ -123,7 +123,7 @@ public static IHttpClientBuilder ConfigureCogniteHttpClientHandlers(this IHttpCl
{
try
{
var retryConfig = provider.GetService<ConnectionConfig>()?.CdfConnection.Retries ?? new Configuration.RetryConfig();
var retryConfig = provider.GetService<ConnectionConfig>()?.Connection.Retries ?? new Configuration.RetryConfig();
return CogniteExtensions.GetTimeoutPolicy(retryConfig.TimeoutValue.Value);
}
catch (ObjectDisposedException)
Expand All @@ -136,7 +136,7 @@ public static IHttpClientBuilder ConfigureCogniteHttpClientHandlers(this IHttpCl
{
try
{
var certConfig = provider.GetService<ConnectionConfig>()?.CdfConnection?.SslCertificates;
var certConfig = provider.GetService<ConnectionConfig>()?.Connection?.SslCertificates;
return GetClientHandler(certConfig);
}
catch (ObjectDisposedException)
Expand Down Expand Up @@ -295,4 +295,4 @@ public static Client.Builder Configure(
return builder;
}
}
}
}
31 changes: 22 additions & 9 deletions schema/cognite_config.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,13 @@
"description": "Maximum number of data modeling instances per get/create instance request",
"max": 1000,
"min": 1
},
"stream-records": {
"type": "integer",
"default": 1000,
"description": "Maximum number of stream records per get/create stream records request",
"max": 1000,
"min": 1
}
},
"unevaluatedProperties": false
Expand All @@ -260,51 +267,57 @@
"properties": {
"time-series": {
"type": "integer",
"default": 20,
"default": 5,
"description": "Maximum number of parallel requests per timeseries operation",
"min": 1
},
"assets": {
"type": "integer",
"default": 20,
"default": 5,
"description": "Maximum number of parallel requests per assets operation",
"min": 1
},
"data-points": {
"type": "integer",
"default": 10,
"default": 5,
"description": "Maximum number of parallel requests per datapoints operation",
"min": 1
},
"raw": {
"type": "integer",
"default": 10,
"default": 5,
"description": "Maximum number of parallel requests per raw operation",
"min": 1
},
"ranges": {
"type": "integer",
"default": 20,
"default": 5,
"description": "Maximum number of parallel requests per get first/last datapoint operation",
"min": 1
},
"events": {
"type": "integer",
"default": 20,
"default": 5,
"description": "Maximum number of parallel requests per events operation",
"min": 1
},
"sequences": {
"type": "integer",
"default": 10,
"default": 5,
"description": "Maximum number of parallel requests per sequences operation",
"min": 1
},
"instances": {
"type": "integer",
"default": 4,
"default": 2,
"description": "Maximum number of parallel requests per data modeling instances operation",
"min": 1
},
"stream-records": {
"type": "integer",
"default": 4,
"description": "Maximum number of parallel requests per stream records operation",
"min": 1
}
},
"unevaluatedProperties": false
Expand Down Expand Up @@ -382,4 +395,4 @@
"unevaluatedProperties": false
}
}
}
}
192 changes: 192 additions & 0 deletions schema/unstable/base_cognite_config.schema.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
{
"$id": "base_cognite_config.schema.json",
"$schema": "https://json-schema.org/draft/2020-12/schema",
"type": "object",
"unevaluatedProperties": false,
"properties": {
"cdf-chunking": {
"type": "object",
"description": "Configure chunking of data on requests to CDF. Note that increasing these may cause requests to fail due to limits in the API itself",
"properties": {
"time-series": {
"type": "integer",
"default": 1000,
"description": "Maximum number of timeseries per get/create timeseries request",
"max": 1000,
"min": 1
},
"assets": {
"type": "integer",
"default": 1000,
"description": "Maximum number of assets per get/create assets request",
"max": 1000,
"min": 1
},
"data-point-time-series": {
"type": "integer",
"default": 10000,
"description": "Maximum number of timeseries per datapoint create request",
"max": 10000,
"min": 1
},
"data-point-delete": {
"type": "integer",
"default": 10000,
"description": "Maximum number of ranges per delete datapoints request",
"max": 10000,
"min": 1
},
"data-point-list": {
"type": "integer",
"default": 100,
"description": "Maximum number of timeseries per datapoint read request. Used when getting the first point in a timeseries.",
"max": 100,
"min": 1
},
"data-points": {
"type": "integer",
"default": 100000,
"description": "Maximum number of datapoints per datapoints create request",
"max": 100000,
"min": 1
},
"data-points-gzip-limit": {
"type": "integer",
"default": 5000,
"description": "Minimum number of datapoints in request to switch to using gzip. Set to -1 to disable, and 0 to always enable (not recommended). The minimum HTTP packet size is generally 1500 bytes, so this should never be set below 100 for numeric datapoints. Even for larger packages gzip is efficient enough that packages are compressed below 1500 bytes. At 5000 it is always a performance gain. It can be set lower if bandwidth is a major issue",
"max": 100000,
"min": 1
},
"raw-rows": {
"type": "integer",
"default": 10000,
"description": "Maximum number of rows per request to cdf raw",
"max": 10000,
"min": 1
},
"raw-rows-delete": {
"type": "integer",
"default": 1000,
"description": "Maximum number of row keys per delete request to raw",
"max": 1000,
"min": 1
},
"data-point-latest": {
"type": "integer",
"default": 100,
"description": "Maximum number of timeseries per datapoint read latest request",
"max": 100,
"min": 1
},
"events": {
"type": "integer",
"default": 1000,
"description": "Maximum number of events per get/create events request",
"max": 1000,
"min": 1
},
"sequences": {
"type": "integer",
"default": 1000,
"description": "Maximum number of sequences per get/create sequences request",
"max": 1000,
"min": 1
},
"sequence-row-sequences": {
"type": "integer",
"default": 1000,
"description": "Maximum number of sequences per create sequence rows request",
"max": 1000,
"min": 1
},
"sequence-rows": {
"type": "integer",
"default": 10000,
"description": "Maximum number of sequence rows per sequence when creating rows",
"max": 10000,
"min": 1
},
"instances": {
"type": "integer",
"default": 1000,
"description": "Maximum number of data modeling instances per get/create instance request",
"max": 1000,
"min": 1
},
"stream-records": {
"type": "integer",
"default": 1000,
"description": "Maximum number of stream records per get/create stream records request",
"max": 1000,
"min": 1
}
},
"unevaluatedProperties": false
},
"cdf-throttling": {
"type": "object",
"description": "Configure the maximum number of parallel requests for different CDF resources.",
"properties": {
"time-series": {
"type": "integer",
"default": 5,
"description": "Maximum number of parallel requests per timeseries operation",
"min": 1
},
"assets": {
"type": "integer",
"default": 5,
"description": "Maximum number of parallel requests per assets operation",
"min": 1
},
"data-points": {
"type": "integer",
"default": 5,
"description": "Maximum number of parallel requests per datapoints operation",
"min": 1
},
"raw": {
"type": "integer",
"default": 5,
"description": "Maximum number of parallel requests per raw operation",
"min": 1
},
"ranges": {
"type": "integer",
"default": 5,
"description": "Maximum number of parallel requests per get first/last datapoint operation",
"min": 1
},
"events": {
"type": "integer",
"default": 5,
"description": "Maximum number of parallel requests per events operation",
"min": 1
},
"sequences": {
"type": "integer",
"default": 5,
"description": "Maximum number of parallel requests per sequences operation",
"min": 1
},
"instances": {
"type": "integer",
"default": 2,
"description": "Maximum number of parallel requests per data modeling instances operation",
"min": 1
},
"stream-records": {
"type": "integer",
"default": 4,
"description": "Maximum number of parallel requests per stream records operation",
"min": 1
}
},
"unevaluatedProperties": false
},
"nan-replacement": {
"type": "number",
"description": "Replacement for NaN values when writing to CDF. If left out, NaN values are skipped."
}
}
}
Loading