@@ -33,8 +33,8 @@ exports[`telemetry > should not record telemetry inputs / outputs when disabled
3333 " stop" ,
3434 ],
3535 " gen_ai.system" : " mock-provider" ,
36- " gen_ai.usage.completion_tokens " : 10 ,
37- " gen_ai.usage.prompt_tokens " : 3 ,
36+ " gen_ai.usage.input_tokens " : 3 ,
37+ " gen_ai.usage.output_tokens " : 10 ,
3838 " operation.name" : " ai.streamObject.doStream" ,
3939 },
4040 " events" : [
@@ -81,8 +81,8 @@ exports[`telemetry > should not record telemetry inputs / outputs when disabled
8181 " stop" ,
8282 ],
8383 " gen_ai.system" : " mock-provider" ,
84- " gen_ai.usage.completion_tokens " : 10 ,
85- " gen_ai.usage.prompt_tokens " : 3 ,
84+ " gen_ai.usage.input_tokens " : 3 ,
85+ " gen_ai.usage.output_tokens " : 10 ,
8686 " operation.name" : " ai.streamObject.doStream" ,
8787 },
8888 " events" : [
@@ -112,8 +112,13 @@ exports[`telemetry > should record telemetry data when enabled with mode "json"
112112 " ai.schema" : " {" type " :" object " ," properties " :{" content " :{" type " :" string " }}," required " :[" content " ]," additionalProperties " :false," $schema " :" http :// json-schema.org/draft-07/schema#"}",
113113 " ai.schema.description" : " test description" ,
114114 " ai.schema.name" : " test-name" ,
115+ " ai.settings.frequencyPenalty" : 0.3 ,
115116 " ai.settings.mode" : " json" ,
116117 " ai.settings.output" : " object" ,
118+ " ai.settings.presencePenalty" : 0.4 ,
119+ " ai.settings.temperature" : 0.5 ,
120+ " ai.settings.topK" : 0.1 ,
121+ " ai.settings.topP" : 0.2 ,
117122 " ai.telemetry.functionId" : " test-function-id" ,
118123 " ai.telemetry.metadata.test1" : " value1" ,
119124 " ai.telemetry.metadata.test2" : false ,
@@ -136,20 +141,30 @@ exports[`telemetry > should record telemetry data when enabled with mode "json"
136141 " ai.request.headers.header1" : " value1" ,
137142 " ai.request.headers.header2" : " value2" ,
138143 " ai.result.object" : " {" content " :" Hello , world!"}" ,
144+ " ai.settings.frequencyPenalty" : 0.3 ,
139145 " ai.settings.mode" : " json" ,
146+ " ai.settings.presencePenalty" : 0.4 ,
147+ " ai.settings.temperature" : 0.5 ,
148+ " ai.settings.topK" : 0.1 ,
149+ " ai.settings.topP" : 0.2 ,
140150 " ai.stream.msToFirstChunk" : 0 ,
141151 " ai.telemetry.functionId" : " test-function-id" ,
142152 " ai.telemetry.metadata.test1" : " value1" ,
143153 " ai.telemetry.metadata.test2" : false ,
144154 " ai.usage.completionTokens" : 10 ,
145155 " ai.usage.promptTokens" : 3 ,
156+ " gen_ai.request.frequency_penalty" : 0.3 ,
146157 " gen_ai.request.model" : " mock-model-id" ,
158+ " gen_ai.request.presence_penalty" : 0.4 ,
159+ " gen_ai.request.temperature" : 0.5 ,
160+ " gen_ai.request.top_k" : 0.1 ,
161+ " gen_ai.request.top_p" : 0.2 ,
147162 " gen_ai.response.finish_reasons" : [
148163 " stop" ,
149164 ],
150165 " gen_ai.system" : " mock-provider" ,
151- " gen_ai.usage.completion_tokens " : 10 ,
152- " gen_ai.usage.prompt_tokens " : 3 ,
166+ " gen_ai.usage.input_tokens " : 3 ,
167+ " gen_ai.usage.output_tokens " : 10 ,
153168 " operation.name" : " ai.streamObject.doStream test-function-id" ,
154169 " resource.name" : " test-function-id" ,
155170 } ,
@@ -180,8 +195,13 @@ exports[`telemetry > should record telemetry data when enabled with mode "tool"
180195 " ai.schema" : " {" type " :" object " ," properties " :{" content " :{" type " :" string " }}," required " :[" content " ]," additionalProperties " :false," $schema " :" http :// json-schema.org/draft-07/schema#"}",
181196 " ai.schema.description" : " test description" ,
182197 " ai.schema.name" : " test-name" ,
198+ " ai.settings.frequencyPenalty" : 0.3 ,
183199 " ai.settings.mode" : " tool" ,
184200 " ai.settings.output" : " object" ,
201+ " ai.settings.presencePenalty" : 0.4 ,
202+ " ai.settings.temperature" : 0.5 ,
203+ " ai.settings.topK" : 0.1 ,
204+ " ai.settings.topP" : 0.2 ,
185205 " ai.telemetry.functionId" : " test-function-id" ,
186206 " ai.telemetry.metadata.test1" : " value1" ,
187207 " ai.telemetry.metadata.test2" : false ,
@@ -204,20 +224,30 @@ exports[`telemetry > should record telemetry data when enabled with mode "tool"
204224 " ai.request.headers.header1" : " value1" ,
205225 " ai.request.headers.header2" : " value2" ,
206226 " ai.result.object" : " {" content " :" Hello , world!"}" ,
227+ " ai.settings.frequencyPenalty" : 0.3 ,
207228 " ai.settings.mode" : " tool" ,
229+ " ai.settings.presencePenalty" : 0.4 ,
230+ " ai.settings.temperature" : 0.5 ,
231+ " ai.settings.topK" : 0.1 ,
232+ " ai.settings.topP" : 0.2 ,
208233 " ai.stream.msToFirstChunk" : 0 ,
209234 " ai.telemetry.functionId" : " test-function-id" ,
210235 " ai.telemetry.metadata.test1" : " value1" ,
211236 " ai.telemetry.metadata.test2" : false ,
212237 " ai.usage.completionTokens" : 10 ,
213238 " ai.usage.promptTokens" : 3 ,
239+ " gen_ai.request.frequency_penalty" : 0.3 ,
214240 " gen_ai.request.model" : " mock-model-id" ,
241+ " gen_ai.request.presence_penalty" : 0.4 ,
242+ " gen_ai.request.temperature" : 0.5 ,
243+ " gen_ai.request.top_k" : 0.1 ,
244+ " gen_ai.request.top_p" : 0.2 ,
215245 " gen_ai.response.finish_reasons" : [
216246 " stop" ,
217247 ],
218248 " gen_ai.system" : " mock-provider" ,
219- " gen_ai.usage.completion_tokens " : 10 ,
220- " gen_ai.usage.prompt_tokens " : 3 ,
249+ " gen_ai.usage.input_tokens " : 3 ,
250+ " gen_ai.usage.output_tokens " : 10 ,
221251 " operation.name" : " ai.streamObject.doStream test-function-id" ,
222252 " resource.name" : " test-function-id" ,
223253 } ,
0 commit comments