@@ -45,9 +45,9 @@ void should_extract_tokens_usage_from_gpt_response(HttpClient client, VertxTestC
4545 .extracting (Metrics ::getAdditionalMetrics )
4646 .asInstanceOf (InstanceOfAssertFactories .SET )
4747 .containsExactlyInAnyOrder (
48- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
49- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
50- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "gpt-4" )
48+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
49+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
50+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "gpt-4" )
5151 )
5252 )
5353 .ignoreElements ();
@@ -67,11 +67,11 @@ void should_extract_tokens_usage_and_pricing_from_gpt_response(HttpClient client
6767 .extracting (Metrics ::getAdditionalMetrics )
6868 .asInstanceOf (InstanceOfAssertFactories .SET )
6969 .containsExactlyInAnyOrder (
70- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
71- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
72- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "gpt-4" ),
73- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-sent -cost" , 0.2 ),
74- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-receive -cost" , 0.6 )
70+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
71+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
72+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "gpt-4" ),
73+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_sent -cost" , 0.2 ),
74+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_received -cost" , 0.6 )
7575 )
7676 )
7777 .ignoreElements ();
@@ -97,9 +97,9 @@ void should_extract_tokens_usage(HttpClient client, VertxTestContext context) {
9797 .extracting (Metrics ::getAdditionalMetrics )
9898 .asInstanceOf (InstanceOfAssertFactories .SET )
9999 .containsExactlyInAnyOrder (
100- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
101- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
102- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "gemini-2" )
100+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
101+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
102+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "gemini-2" )
103103 )
104104 )
105105 .ignoreElements ();
@@ -119,11 +119,11 @@ void should_extract_tokens_usage_and_pricing(HttpClient client, VertxTestContext
119119 .extracting (Metrics ::getAdditionalMetrics )
120120 .asInstanceOf (InstanceOfAssertFactories .SET )
121121 .containsExactlyInAnyOrder (
122- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
123- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
124- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "gemini-2" ),
125- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-sent -cost" , 0.2 ),
126- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-receive -cost" , 0.6 )
122+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
123+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
124+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "gemini-2" ),
125+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_sent -cost" , 0.2 ),
126+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_received -cost" , 0.6 )
127127 )
128128 )
129129 .ignoreElements ();
@@ -149,9 +149,9 @@ void should_extract_tokens_usage(HttpClient client, VertxTestContext context) {
149149 .extracting (Metrics ::getAdditionalMetrics )
150150 .asInstanceOf (InstanceOfAssertFactories .SET )
151151 .containsExactlyInAnyOrder (
152- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
153- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
154- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "claude-3" )
152+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
153+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
154+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "claude-3" )
155155 )
156156 )
157157 .ignoreElements ();
@@ -171,11 +171,11 @@ void should_extract_tokens_usage_and_pricing(HttpClient client, VertxTestContext
171171 .extracting (Metrics ::getAdditionalMetrics )
172172 .asInstanceOf (InstanceOfAssertFactories .SET )
173173 .containsExactlyInAnyOrder (
174- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
175- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
176- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "claude-3" ),
177- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-sent -cost" , 0.2 ),
178- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-receive -cost" , 0.6 )
174+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
175+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
176+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "claude-3" ),
177+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_sent -cost" , 0.2 ),
178+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_received -cost" , 0.6 )
179179 )
180180 )
181181 .ignoreElements ();
@@ -201,9 +201,9 @@ void should_extract_tokens_usage(HttpClient client, VertxTestContext context) {
201201 .extracting (Metrics ::getAdditionalMetrics )
202202 .asInstanceOf (InstanceOfAssertFactories .SET )
203203 .containsExactlyInAnyOrder (
204- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
205- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
206- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "mistral-1" )
204+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
205+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
206+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "mistral-1" )
207207 )
208208 )
209209 .ignoreElements ();
@@ -223,11 +223,11 @@ void should_extract_tokens_usage_and_pricing(HttpClient client, VertxTestContext
223223 .extracting (Metrics ::getAdditionalMetrics )
224224 .asInstanceOf (InstanceOfAssertFactories .SET )
225225 .containsExactlyInAnyOrder (
226- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
227- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
228- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "mistral-1" ),
229- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-sent -cost" , 0.2 ),
230- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-receive -cost" , 0.6 )
226+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
227+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
228+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "mistral-1" ),
229+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_sent -cost" , 0.2 ),
230+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_received -cost" , 0.6 )
231231 )
232232 )
233233 .ignoreElements ();
@@ -253,9 +253,9 @@ void should_extract_tokens_usage(HttpClient client, VertxTestContext context) {
253253 .extracting (Metrics ::getAdditionalMetrics )
254254 .asInstanceOf (InstanceOfAssertFactories .SET )
255255 .containsExactlyInAnyOrder (
256- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
257- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
258- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "custom-1" )
256+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
257+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
258+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "custom-1" )
259259 )
260260 )
261261 .ignoreElements ();
@@ -275,11 +275,11 @@ void should_extract_tokens_usage_and_pricing(HttpClient client, VertxTestContext
275275 .extracting (Metrics ::getAdditionalMetrics )
276276 .asInstanceOf (InstanceOfAssertFactories .SET )
277277 .containsExactlyInAnyOrder (
278- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
279- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
280- new AdditionalMetric .KeywordMetric ("keyword_ai-prompt-token-model " , "custom-1" ),
281- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-sent -cost" , 0.2 ),
282- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-receive -cost" , 0.6 )
278+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
279+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
280+ new AdditionalMetric .KeywordMetric ("keyword_llm-proxy_model " , "custom-1" ),
281+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_sent -cost" , 0.2 ),
282+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_received -cost" , 0.6 )
283283 )
284284 )
285285 .ignoreElements ();
@@ -305,10 +305,10 @@ void should_extract_tokens_usage_and_pricing_event_dont_find_model(HttpClient cl
305305 .extracting (Metrics ::getAdditionalMetrics )
306306 .asInstanceOf (InstanceOfAssertFactories .SET )
307307 .containsExactlyInAnyOrder (
308- new AdditionalMetric .LongMetric ("long_ai-prompt-token -sent" , 500000L ),
309- new AdditionalMetric .LongMetric ("long_ai-prompt-token-receive " , 750000L ),
310- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-sent -cost" , 0.2 ),
311- new AdditionalMetric .DoubleMetric ("double_ai-prompt-token-receive -cost" , 0.6 )
308+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens -sent" , 500000L ),
309+ new AdditionalMetric .LongMetric ("long_llm-proxy_tokens-received " , 750000L ),
310+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_sent -cost" , 0.2 ),
311+ new AdditionalMetric .DoubleMetric ("double_llm-proxy_received -cost" , 0.6 )
312312 )
313313 )
314314 .ignoreElements ();
@@ -333,10 +333,10 @@ void should_extract_tokens_usage_and_pricing_event_dont_find_model(HttpClient cl
333333 assertThat (metrics .getAdditionalMetrics ())
334334 .extracting (AdditionalMetric ::name )
335335 .doesNotContain (
336- "long_ai-prompt-token -sent" ,
337- "long_ai-prompt-token-receive " ,
338- "double_ai-prompt-token-sent -cost" ,
339- "double_ai-prompt-token-receive -cost"
336+ "long_llm-proxy_tokens -sent" ,
337+ "long_llm-proxy_tokens-received " ,
338+ "double_llm-proxy_sent -cost" ,
339+ "double_llm-proxy_received -cost"
340340 )
341341 )
342342 .ignoreElements ();
0 commit comments