From 6452cfc57d3bbe6b71102c243e3d45c0bffd548b Mon Sep 17 00:00:00 2001 From: Aman Agarwal Date: Wed, 7 Aug 2024 01:26:19 +0530 Subject: [PATCH 1/3] feat: added openai sdk for opentelemetry for chat completion --- package-lock.json | 247 ++++++++++++++--- .../.eslintignore | 1 + .../.eslintrc.js | 7 + .../CHANGELOG.md | 1 + .../LICENSE | 201 ++++++++++++++ .../README.md | 74 +++++ .../package.json | 53 ++++ .../src/config.ts | 81 ++++++ .../src/index.ts | 18 ++ .../src/instrumentation.ts | 88 ++++++ .../src/semantic-conventions.ts | 56 ++++ .../src/types.ts | 30 ++ .../src/utils.ts | 97 +++++++ .../src/wrapper.ts | 261 ++++++++++++++++++ .../tsconfig.json | 14 + 15 files changed, 1197 insertions(+), 32 deletions(-) create mode 100644 plugins/node/opentelemetry-instrumentation-openai/.eslintignore create mode 100644 plugins/node/opentelemetry-instrumentation-openai/.eslintrc.js create mode 100644 plugins/node/opentelemetry-instrumentation-openai/CHANGELOG.md create mode 100644 plugins/node/opentelemetry-instrumentation-openai/LICENSE create mode 100644 plugins/node/opentelemetry-instrumentation-openai/README.md create mode 100644 plugins/node/opentelemetry-instrumentation-openai/package.json create mode 100644 plugins/node/opentelemetry-instrumentation-openai/src/config.ts create mode 100644 plugins/node/opentelemetry-instrumentation-openai/src/index.ts create mode 100644 plugins/node/opentelemetry-instrumentation-openai/src/instrumentation.ts create mode 100644 plugins/node/opentelemetry-instrumentation-openai/src/semantic-conventions.ts create mode 100644 plugins/node/opentelemetry-instrumentation-openai/src/types.ts create mode 100644 plugins/node/opentelemetry-instrumentation-openai/src/utils.ts create mode 100644 plugins/node/opentelemetry-instrumentation-openai/src/wrapper.ts create mode 100644 plugins/node/opentelemetry-instrumentation-openai/tsconfig.json diff --git a/package-lock.json b/package-lock.json index ffc20fc4fa..524fb01b1f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10909,6 +10909,10 @@ "resolved": "plugins/node/opentelemetry-instrumentation-net", "link": true }, + "node_modules/@opentelemetry/instrumentation-openai": { + "resolved": "plugins/node/opentelemetry-instrumentation-openai", + "link": true + }, "node_modules/@opentelemetry/instrumentation-pg": { "resolved": "plugins/node/opentelemetry-instrumentation-pg", "link": true @@ -13051,6 +13055,15 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.6.5.tgz", "integrity": "sha512-Xjt5ZGUa5WusGZJ4WJPbOT8QOqp6nDynVFRKcUt32bOgvXEoc6o085WNkYTMO7ifAj2isEfQQ2cseE+wT6jsRw==" }, + "node_modules/@types/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, "node_modules/@types/normalize-package-data": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", @@ -14179,7 +14192,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "dev": true, "dependencies": { "event-target-shim": "^5.0.0" }, @@ -14279,7 +14291,6 @@ "version": "4.5.0", "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz", "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==", - "dev": true, "dependencies": { "humanize-ms": "^1.2.1" }, @@ -14814,8 +14825,7 @@ "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/at-least-node": { "version": "1.0.0", @@ -16150,7 +16160,6 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, "funding": [ { "type": "github", @@ -17463,7 +17472,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -18500,7 +18508,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true, "engines": { "node": ">=0.4.0" } @@ -19883,7 +19890,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "dev": true, "engines": { "node": ">=6" } @@ -20787,7 +20793,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "dev": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -20797,6 +20802,23 @@ "node": ">= 6" } }, + "node_modules/form-data-encoder": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", + "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==" + }, + "node_modules/formdata-node": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz", + "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", + "dependencies": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.3" + }, + "engines": { + "node": ">= 12.20" + } + }, "node_modules/formidable": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", @@ -21988,7 +22010,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", - "dev": true, "dependencies": { "ms": "^2.0.0" } @@ -23672,6 +23693,14 @@ "integrity": "sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA==", "dev": true }, + "node_modules/js-tiktoken": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.12.tgz", + "integrity": "sha512-L7wURW1fH9Qaext0VzaUDpFGVQgjkdE3Dgsy9/+yXyGEpBKnylTd0mU0bfbNkKDlXRb6TEsZkwuflu1B8uQbJQ==", + "dependencies": { + "base64-js": "^1.5.1" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -26644,7 +26673,6 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, "engines": { "node": ">= 0.6" } @@ -26653,7 +26681,6 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, "dependencies": { "mime-db": "1.52.0" }, @@ -28016,6 +28043,24 @@ "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", "dev": true }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, "node_modules/node-environment-flags": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.6.tgz", @@ -29668,6 +29713,31 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/openai": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.54.0.tgz", + "integrity": "sha512-e/12BdtTtj+tXs7iHm+Dm7H7WjEWnw7O52B2wSfCQ6lD5F6cvjzo7cANXy5TJ1Q3/qc8YRPT5wBTTFtP5sBp1g==", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7" + }, + "bin": { + "openai": "bin/cli" + } + }, + "node_modules/openai/node_modules/@types/node": { + "version": "18.19.43", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.43.tgz", + "integrity": "sha512-Mw/YlgXnyJdEwLoFv2dpuJaDFriX+Pc+0qOBJ57jC1H6cDxIj2xc5yUrdtArDVG0m+KV6622a4p2tenEqB3C/g==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, "node_modules/opentracing": { "version": "0.14.7", "resolved": "https://registry.npmjs.org/opentracing/-/opentracing-0.14.7.tgz", @@ -35396,8 +35466,7 @@ "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dev": true + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" }, "node_modules/unicode-canonical-property-names-ecmascript": { "version": "2.0.0", @@ -35798,6 +35867,14 @@ "defaults": "^1.0.3" } }, + "node_modules/web-streams-polyfill": { + "version": "4.0.0-beta.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", + "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", + "engines": { + "node": ">= 14" + } + }, "node_modules/webidl-conversions": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", @@ -39388,6 +39465,40 @@ "@opentelemetry/api": "^1.3.0" } }, + "plugins/node/opentelemetry-instrumentation-openai": { + "name": "@opentelemetry/instrumentation-openai", + "version": "0.1.0", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "js-tiktoken": "^1.0.12", + "openai": "^4.47.1" + }, + "devDependencies": { + "@opentelemetry/api": "^1.3.0", + "@types/node": "18.6.5", + "typescript": "^5.5.4" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "plugins/node/opentelemetry-instrumentation-openai/node_modules/typescript": { + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, "plugins/node/opentelemetry-instrumentation-pg": { "name": "@opentelemetry/instrumentation-pg", "version": "0.43.0", @@ -52979,6 +53090,25 @@ "typescript": "4.4.4" } }, + "@opentelemetry/instrumentation-openai": { + "version": "file:plugins/node/opentelemetry-instrumentation-openai", + "requires": { + "@opentelemetry/api": "^1.3.0", + "@opentelemetry/instrumentation": "^0.52.0", + "@types/node": "18.6.5", + "js-tiktoken": "^1.0.12", + "openai": "^4.47.1", + "typescript": "^5.5.4" + }, + "dependencies": { + "typescript": { + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "dev": true + } + } + }, "@opentelemetry/instrumentation-pg": { "version": "file:plugins/node/opentelemetry-instrumentation-pg", "requires": { @@ -57244,6 +57374,15 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.6.5.tgz", "integrity": "sha512-Xjt5ZGUa5WusGZJ4WJPbOT8QOqp6nDynVFRKcUt32bOgvXEoc6o085WNkYTMO7ifAj2isEfQQ2cseE+wT6jsRw==" }, + "@types/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", + "requires": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, "@types/normalize-package-data": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", @@ -58165,7 +58304,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "dev": true, "requires": { "event-target-shim": "^5.0.0" } @@ -58241,7 +58379,6 @@ "version": "4.5.0", "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz", "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==", - "dev": true, "requires": { "humanize-ms": "^1.2.1" } @@ -58660,8 +58797,7 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "at-least-node": { "version": "1.0.0", @@ -59908,8 +60044,7 @@ "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, "base64id": { "version": "2.0.0", @@ -60891,7 +61026,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, "requires": { "delayed-stream": "~1.0.0" } @@ -61705,8 +61839,7 @@ "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, "delegates": { "version": "1.0.0", @@ -62768,8 +62901,7 @@ "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "dev": true + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" }, "eventemitter3": { "version": "4.0.7", @@ -63494,13 +63626,26 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "dev": true, "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, + "form-data-encoder": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", + "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==" + }, + "formdata-node": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz", + "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", + "requires": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.3" + } + }, "formidable": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", @@ -64409,7 +64554,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", - "dev": true, "requires": { "ms": "^2.0.0" } @@ -65645,6 +65789,14 @@ "integrity": "sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA==", "dev": true }, + "js-tiktoken": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.12.tgz", + "integrity": "sha512-L7wURW1fH9Qaext0VzaUDpFGVQgjkdE3Dgsy9/+yXyGEpBKnylTd0mU0bfbNkKDlXRb6TEsZkwuflu1B8uQbJQ==", + "requires": { + "base64-js": "^1.5.1" + } + }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -68040,14 +68192,12 @@ "mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" }, "mime-types": { "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, "requires": { "mime-db": "1.52.0" } @@ -69164,6 +69314,11 @@ "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", "dev": true }, + "node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==" + }, "node-environment-flags": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.6.tgz", @@ -70447,6 +70602,30 @@ "is-wsl": "^2.2.0" } }, + "openai": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.54.0.tgz", + "integrity": "sha512-e/12BdtTtj+tXs7iHm+Dm7H7WjEWnw7O52B2wSfCQ6lD5F6cvjzo7cANXy5TJ1Q3/qc8YRPT5wBTTFtP5sBp1g==", + "requires": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7" + }, + "dependencies": { + "@types/node": { + "version": "18.19.43", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.43.tgz", + "integrity": "sha512-Mw/YlgXnyJdEwLoFv2dpuJaDFriX+Pc+0qOBJ57jC1H6cDxIj2xc5yUrdtArDVG0m+KV6622a4p2tenEqB3C/g==", + "requires": { + "undici-types": "~5.26.4" + } + } + } + }, "opentracing": { "version": "0.14.7", "resolved": "https://registry.npmjs.org/opentracing/-/opentracing-0.14.7.tgz", @@ -74793,8 +74972,7 @@ "undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dev": true + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" }, "unicode-canonical-property-names-ecmascript": { "version": "2.0.0", @@ -75116,6 +75294,11 @@ "defaults": "^1.0.3" } }, + "web-streams-polyfill": { + "version": "4.0.0-beta.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", + "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==" + }, "webidl-conversions": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", diff --git a/plugins/node/opentelemetry-instrumentation-openai/.eslintignore b/plugins/node/opentelemetry-instrumentation-openai/.eslintignore new file mode 100644 index 0000000000..378eac25d3 --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/.eslintignore @@ -0,0 +1 @@ +build diff --git a/plugins/node/opentelemetry-instrumentation-openai/.eslintrc.js b/plugins/node/opentelemetry-instrumentation-openai/.eslintrc.js new file mode 100644 index 0000000000..f756f4488b --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/.eslintrc.js @@ -0,0 +1,7 @@ +module.exports = { + "env": { + "mocha": true, + "node": true + }, + ...require('../../../eslint.config.js') +} diff --git a/plugins/node/opentelemetry-instrumentation-openai/CHANGELOG.md b/plugins/node/opentelemetry-instrumentation-openai/CHANGELOG.md new file mode 100644 index 0000000000..825c32f0d0 --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog diff --git a/plugins/node/opentelemetry-instrumentation-openai/LICENSE b/plugins/node/opentelemetry-instrumentation-openai/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/node/opentelemetry-instrumentation-openai/README.md b/plugins/node/opentelemetry-instrumentation-openai/README.md new file mode 100644 index 0000000000..8e704266a3 --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/README.md @@ -0,0 +1,74 @@ +# OpenTelemetry OpenAI Instrumentation for Node.js + +[![NPM Published Version][npm-img]][npm-url] +[![Apache License][license-image]][license-image] + +This module provides automatic instrumentation for the [`openai`](https://github.com/openai) module, which may be loaded using the [`@opentelemetry/instrumentation-openai`](https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation/opentelemetry-instrumentation-openai) package. + + +## Installation + +```bash +npm install --save @opentelemetry/instrumentation-openai +``` + +### Supported Versions + +- [`openai`](https://www.npmjs.com/package/openai) versions `>=3.1.0 <5` + +## Usage + +```js +import { + OpenAIInstrumentation, + type InstrumentationHelperConfigInterface, +} from '@opentelemetry/instrumentation-openai'; +import openAI from 'openai'; + +const openAIInstrumentationOptions: InstrumentationHelperConfigInterface = { + environment: "production", + applicationName: "Example Application", + otlpEndpoint: "otlpEndpoint", + otlpHeaders: "otlpHeaders", + traceContent: true, + pricing_json: { "chat": { "gpt-4-1106-preview" : 0.04 } } +} + +const { NodeTracerProvider } = require('@opentelemetry/sdk-trace-node'); +const { registerInstrumentations } = require('@opentelemetry/instrumentation'); + +const provider = new NodeTracerProvider(); +provider.register(); + +const openAIInstrument = new OpenAIInstrumentation(openAIInstrumentationOptions); + +// Auto instrumentation : patch method will only when the "openai" module is required in any trace calls +registerInstrumentations({ + instrumentations: [openAIInstrument], +}); + +// Or + +// Manual patch +openAIInstrument.setTracerProvider(tracerProvider) +openAIInstrument.manualPatch(openAI) +registerInstrumentations({ + tracerProvider, +}); +``` + +## Useful links + +- For more information on OpenTelemetry, visit: +- For more about OpenTelemetry JavaScript: +- For help or feedback on this project, join us in [GitHub Discussions][discussions-url] + +## License + +Apache 2.0 - See [LICENSE][license-url] for more information. + +[discussions-url]: https://github.com/open-telemetry/opentelemetry-js/discussions +[license-url]: https://github.com/open-telemetry/opentelemetry-js-contrib/blob/main/LICENSE +[license-image]: https://img.shields.io/badge/license-Apache_2.0-green.svg?style=flat +[npm-url]: https://www.npmjs.com/package/@opentelemetry/instrumentation-openai +[npm-img]: https://badge.fury.io/js/%40opentelemetry%2Finstrumentation-openai.svg diff --git a/plugins/node/opentelemetry-instrumentation-openai/package.json b/plugins/node/opentelemetry-instrumentation-openai/package.json new file mode 100644 index 0000000000..ae42009d42 --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/package.json @@ -0,0 +1,53 @@ +{ + "name": "@opentelemetry/instrumentation-openai", + "version": "0.1.0", + "description": "OpenTelemetry instrumentation for `openai`", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "repository": "open-telemetry/opentelemetry-js-contrib", + "scripts": { + "compile": "tsc -p .", + "lint": "eslint . --ext .ts", + "lint:fix": "eslint . --ext .ts --fix", + "lint:readme": "node ../../../scripts/lint-readme.js", + "precompile": "tsc --version && lerna run version:update --scope @opentelemetry/instrumentation-openai --include-dependencies", + "prewatch": "npm run precompile", + "prepublishOnly": "npm run compile", + "watch": "tsc -w", + "version:update": "node ../../../scripts/version-update.js" + }, + "keywords": [ + "instrumentation", + "nodejs", + "opentelemetry", + "openai", + "plugin", + "tracing" + ], + "author": "OpenTelemetry Authors", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + }, + "files": [ + "build/src/**/*.js", + "build/src/**/*.js.map", + "build/src/**/*.d.ts" + ], + "publishConfig": { + "access": "public" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + }, + "dependencies": { + "@opentelemetry/instrumentation": "^0.52.0", + "openai": "^4.47.1" + }, + "devDependencies": { + "@opentelemetry/api": "^1.3.0", + "@types/node": "18.6.5", + "typescript": "^5.5.4" + }, + "homepage": "https://github.com/open-telemetry/opentelemetry-js-contrib/tree/main/plugins/node/opentelemetry-instrumentation-openai#readme" +} diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/config.ts b/plugins/node/opentelemetry-instrumentation-openai/src/config.ts new file mode 100644 index 0000000000..5a40f6d38d --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/src/config.ts @@ -0,0 +1,81 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { InstrumentationHelperConfigInterface } from './types'; +import InstrumentationUtil from './utils'; + +export default class InstrumentationHelperConfig { + /** + * A Singleton Configuration class for instrumentation helper config. + * + * This class maintains a single instance of configuration settings including + * environment details, application name, and tracing information throughout the package. + * + * Attributes: + * environment (string): Deployment environment of the application. + * applicationName (string): Name of the application. + * pricing_json (Object): Pricing information. + * otlpEndpoint (string): Endpoint for OTLP. + * otlpHeaders (Object): Headers for OTLP. + * traceContent (boolean): Flag to enable or disable tracing of content. + */ + + static environment: string; + static applicationName: string; + static pricingInfo: Record | string; + static otlpEndpoint: string; + static otlpHeaders?: Record | string; + static traceContent: boolean; + static pricing_json: Record; + + static async updatePricingJson(pricingArg: Record | string) { + try { + const response = await InstrumentationUtil.fetchPricingInfo(pricingArg); + this.pricingInfo = response; + } catch (e) { + this.pricingInfo = {}; + } + return this.pricingInfo; + } + + static updateConfig({ + environment = 'production', + applicationName = 'default', + otlpEndpoint = '', + otlpHeaders, + traceContent = true, + pricing_json, + }: InstrumentationHelperConfigInterface) { + /** + * Updates the configuration based on provided parameters. + * + * Args: + * environment (string): Deployment environment. + * applicationName (string): Application name. + * otlpEndpoint (string): OTLP endpoint. + * otlpHeaders (Object): OTLP headers. + * traceContent (boolean): Enable or disable content tracing. + * pricing_json (string): path or url to the pricing json file + */ + + this.environment = environment; + this.applicationName = applicationName; + this.otlpEndpoint = otlpEndpoint; + this.otlpHeaders = otlpHeaders; + this.traceContent = traceContent; + this.pricing_json = pricing_json as Record; + } +} diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/index.ts b/plugins/node/opentelemetry-instrumentation-openai/src/index.ts new file mode 100644 index 0000000000..c26f998cff --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/src/index.ts @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export * from './instrumentation'; +export * from './types'; diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/instrumentation.ts b/plugins/node/opentelemetry-instrumentation-openai/src/instrumentation.ts new file mode 100644 index 0000000000..27e6a6ece5 --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/src/instrumentation.ts @@ -0,0 +1,88 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { + InstrumentationBase, + InstrumentationModuleDefinition, + InstrumentationNodeModuleDefinition, + isWrapped, +} from '@opentelemetry/instrumentation'; +import { PACKAGE_NAME, PACKAGE_VERSION } from './version'; +import OpenAI from 'openai'; +import OpenAIWrapper from './wrapper'; +import InstrumentationHelperConfig from './config'; +import { OpenAIInstrumentationConfig } from './types'; + +export class OpenAIInstrumentation extends InstrumentationBase { + constructor(config: OpenAIInstrumentationConfig = {}) { + super(PACKAGE_NAME, PACKAGE_VERSION, config); + InstrumentationHelperConfig.updateConfig(config); + } + + protected init(): + | void + | InstrumentationModuleDefinition + | InstrumentationModuleDefinition[] { + const initModule = new InstrumentationNodeModuleDefinition( + 'openai', + ['>=3.1.0 <5'], + (module: any) => { + const moduleExports: typeof OpenAI = + module[Symbol.toStringTag] === 'Module' + ? module.default // ESM + : module; // CommonJS + + this.patch(moduleExports); + return moduleExports; + }, + (module: any) => { + const moduleExports: typeof OpenAI = + module[Symbol.toStringTag] === 'Module' + ? module.default // ESM + : module; // CommonJS + if (moduleExports !== undefined) { + this.unpatch(moduleExports); + } + } + ); + + return [initModule]; + } + + public manualPatch(openai: typeof OpenAI): void { + this.patch(openai); + } + + protected patch(moduleExports: typeof OpenAI) { + try { + if (isWrapped(moduleExports.OpenAI.Chat.Completions.prototype.create)) { + this._unwrap(moduleExports.OpenAI.Chat.Completions.prototype, 'create'); + } + + this._wrap( + moduleExports.OpenAI.Chat.Completions.prototype, + 'create', + OpenAIWrapper._patchChatCompletionCreate(this.tracer) + ); + } catch (e) { + console.error('Error in _patch method:', e); + } + } + + protected unpatch(moduleExports: typeof OpenAI) { + this._unwrap(moduleExports.OpenAI.Chat.Completions.prototype, 'create'); + } +} diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/semantic-conventions.ts b/plugins/node/opentelemetry-instrumentation-openai/src/semantic-conventions.ts new file mode 100644 index 0000000000..736bc55641 --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/src/semantic-conventions.ts @@ -0,0 +1,56 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export default class SemanticConvention { + // GenAI General + static GEN_AI_ENDPOINT = 'gen_ai.endpoint'; + static GEN_AI_SYSTEM = 'gen_ai.system'; + static GEN_AI_ENVIRONMENT = 'gen_ai.environment'; + static GEN_AI_APPLICATION_NAME = 'gen_ai.application_name'; + static GEN_AI_TYPE = 'gen_ai.type'; + + // GenAI Request + static GEN_AI_REQUEST_MODEL = 'gen_ai.request.model'; + static GEN_AI_REQUEST_TEMPERATURE = 'gen_ai.request.temperature'; + static GEN_AI_REQUEST_TOP_P = 'gen_ai.request.top_p'; + static GEN_AI_REQUEST_TOP_K = 'gen_ai.request.top_k'; + static GEN_AI_REQUEST_MAX_TOKENS = 'gen_ai.request.max_tokens'; + static GEN_AI_REQUEST_IS_STREAM = 'gen_ai.request.is_stream'; + static GEN_AI_REQUEST_USER = 'gen_ai.request.user'; + static GEN_AI_REQUEST_SEED = 'gen_ai.request.seed'; + static GEN_AI_REQUEST_FREQUENCY_PENALTY = 'gen_ai.request.frequency_penalty'; + static GEN_AI_REQUEST_PRESENCE_PENALTY = 'gen_ai.request.presence_penalty'; + + // GenAI Usage + static GEN_AI_USAGE_PROMPT_TOKENS = 'gen_ai.usage.input_tokens'; + static GEN_AI_USAGE_COMPLETION_TOKENS = 'gen_ai.usage.output_tokens'; + static GEN_AI_USAGE_TOTAL_TOKENS = 'gen_ai.usage.total_tokens'; + static GEN_AI_USAGE_COST = 'gen_ai.usage.cost'; + + // GenAI Response + static GEN_AI_RESPONSE_ID = 'gen_ai.response.id'; + static GEN_AI_RESPONSE_FINISH_REASON = 'gen_ai.response.finish_reason'; + + // GenAI Content + static GEN_AI_CONTENT_PROMPT = 'gen_ai.content.prompt'; + static GEN_AI_CONTENT_COMPLETION = 'gen_ai.completion'; + + // GenAI functionality + static GEN_AI_TYPE_CHAT = 'chat'; + + // GenAI system provider + static GEN_AI_SYSTEM_OPENAI = 'openai'; +} diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/types.ts b/plugins/node/opentelemetry-instrumentation-openai/src/types.ts new file mode 100644 index 0000000000..9672835804 --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/src/types.ts @@ -0,0 +1,30 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { InstrumentationConfig } from '@opentelemetry/instrumentation'; + +export interface InstrumentationHelperConfigInterface { + environment?: string; + applicationName?: string; + otlpEndpoint?: string; + otlpHeaders?: Record | string; + traceContent?: boolean; + pricing_json?: Record | string; +} + +export interface OpenAIInstrumentationConfig + extends InstrumentationConfig, + InstrumentationHelperConfigInterface {} diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/utils.ts b/plugins/node/opentelemetry-instrumentation-openai/src/utils.ts new file mode 100644 index 0000000000..00cab11d36 --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/src/utils.ts @@ -0,0 +1,97 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Span, SpanStatusCode } from '@opentelemetry/api'; + +type PricingObject = { + chat: Record; +}; + +export default class InstrumentationUtil { + static readonly PROMPT_TOKEN_FACTOR = 1000; + + static getChatModelCost( + model: string, + pricingInfo: Record, + promptTokens: number, + completionTokens: number + ): number { + const pricingObject: PricingObject = pricingInfo as PricingObject; + try { + return ( + (promptTokens / InstrumentationUtil.PROMPT_TOKEN_FACTOR) * + pricingObject.chat[model].promptPrice + + (completionTokens / InstrumentationUtil.PROMPT_TOKEN_FACTOR) * + pricingObject.chat[model].completionPrice + ); + } catch (error) { + console.error(`Error in getChatModelCost: ${error}`); + return 0; + } + } + + static async fetchPricingInfo( + pricingJson: Record | string + ): Promise> { + let pricingUrl = + 'https://raw.githubusercontent.com/openlit/openlit/main/assets/pricing.json'; + if (pricingJson) { + let isUrl = false; + try { + isUrl = !!new URL(pricingJson as string); + } catch { + isUrl = false; + } + + if (isUrl) { + pricingUrl = pricingJson as string; + } else { + try { + if (typeof pricingJson === 'string') { + const json = JSON.parse(pricingJson); + return json; + } else { + const json = JSON.parse(JSON.stringify(pricingJson)); + return json; + } + } catch { + return {}; + } + } + } + + try { + const response = await fetch(pricingUrl); + if (response.ok) { + return response.json(); + } else { + throw new Error( + `HTTP error occurred while fetching pricing info: ${response.status}` + ); + } + } catch (error) { + console.error( + `Unexpected error occurred while fetching pricing info: ${error}` + ); + return {}; + } + } + + static handleException(span: Span, error: Error): void { + span.recordException(error); + span.setStatus({ code: SpanStatusCode.ERROR, message: error.message }); + } +} diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/wrapper.ts b/plugins/node/opentelemetry-instrumentation-openai/src/wrapper.ts new file mode 100644 index 0000000000..1cbdd850ea --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/src/wrapper.ts @@ -0,0 +1,261 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { + AttributeValue, + Span, + SpanKind, + SpanStatusCode, + Tracer, + context, + trace, +} from '@opentelemetry/api'; +import InstrumentationHelperConfig from './config'; +import InstrumentationUtil from './utils'; +import SemanticConvention from './semantic-conventions'; + +export default class OpenAIWrapper { + static setBaseSpanAttributes( + span: Span, + { + genAIEndpoint, + model, + user, + cost, + environment, + applicationName, + }: { + genAIEndpoint: string; + model: string; + user: unknown; + cost: unknown; + environment: string; + applicationName: string; + } + ) { + span.setAttribute( + SemanticConvention.GEN_AI_SYSTEM, + SemanticConvention.GEN_AI_SYSTEM_OPENAI + ); + span.setAttribute(SemanticConvention.GEN_AI_ENDPOINT, genAIEndpoint); + span.setAttribute(SemanticConvention.GEN_AI_ENVIRONMENT, environment); + span.setAttribute( + SemanticConvention.GEN_AI_APPLICATION_NAME, + applicationName + ); + span.setAttribute(SemanticConvention.GEN_AI_REQUEST_MODEL, model); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_USER, + user as AttributeValue + ); + if (cost !== undefined) + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_COST, + cost as AttributeValue + ); + + span.setStatus({ code: SpanStatusCode.OK }); + } + + static _patchChatCompletionCreate(tracer: Tracer): any { + const genAIEndpoint = 'openai.resources.chat.completions'; + const applicationName = InstrumentationHelperConfig.applicationName; + const environment = InstrumentationHelperConfig.environment; + const traceContent = InstrumentationHelperConfig.traceContent; + return (originalMethod: (...args: any[]) => any) => { + return async function (this: any, ...args: any[]) { + const span = tracer.startSpan(genAIEndpoint, { kind: SpanKind.CLIENT }); + + return context.with(trace.setSpan(context.active(), span), async () => { + try { + const response = await originalMethod.apply(this, args); + const { + messages, + frequency_penalty = 0, + max_tokens = null, + n = 1, + presence_penalty = 0, + seed = null, + temperature = 1, + tools, + top_p, + user, + stream = false, + } = args[0]; + + // Request Params attributes : Start + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_TOP_P, + top_p || 1 + ); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_MAX_TOKENS, + max_tokens + ); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_TEMPERATURE, + temperature + ); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_PRESENCE_PENALTY, + presence_penalty + ); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_FREQUENCY_PENALTY, + frequency_penalty + ); + span.setAttribute(SemanticConvention.GEN_AI_REQUEST_SEED, seed); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_IS_STREAM, + stream + ); + + if (!stream) { + if (traceContent) { + // Format 'messages' into a single string + const messagePrompt = messages || []; + const formattedMessages = []; + + for (const message of messagePrompt) { + const role = message.role; + const content = message.content; + + if (Array.isArray(content)) { + const contentStr = content + .map(item => { + if ('type' in item) { + return `${item.type}: ${ + item.text ? item.text : item.image_url + }`; + } else { + return `text: ${item.text}`; + } + }) + .join(', '); + formattedMessages.push(`${role}: ${contentStr}`); + } else { + formattedMessages.push(`${role}: ${content}`); + } + } + + const prompt = formattedMessages.join('\n'); + span.setAttribute( + SemanticConvention.GEN_AI_CONTENT_PROMPT, + prompt + ); + } + // Request Params attributes : End + + span.setAttribute( + SemanticConvention.GEN_AI_TYPE, + SemanticConvention.GEN_AI_TYPE_CHAT + ); + span.setAttribute( + SemanticConvention.GEN_AI_RESPONSE_ID, + response.id + ); + + const model = response.model || 'gpt-3.5-turbo'; + + const pricingInfo: Record = + await InstrumentationHelperConfig.updatePricingJson( + InstrumentationHelperConfig.pricing_json + ); + + // Calculate cost of the operation + const cost = InstrumentationUtil.getChatModelCost( + model, + pricingInfo, + response.usage.prompt_tokens, + response.usage.completion_tokens + ); + + OpenAIWrapper.setBaseSpanAttributes(span, { + genAIEndpoint, + model, + user, + cost, + applicationName, + environment, + }); + + if (!tools) { + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_PROMPT_TOKENS, + response.usage.prompt_tokens + ); + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_COMPLETION_TOKENS, + response.usage.completion_tokens + ); + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_TOTAL_TOKENS, + response.usage.total_tokens + ); + span.setAttribute( + SemanticConvention.GEN_AI_RESPONSE_FINISH_REASON, + response.choices[0].finish_reason + ); + + if (traceContent) { + if (n === 1) { + span.setAttribute( + SemanticConvention.GEN_AI_CONTENT_COMPLETION, + response.choices[0].message.content + ); + } else { + let i = 0; + while (i < n) { + const attribute_name = `${SemanticConvention.GEN_AI_CONTENT_COMPLETION}.[i]`; + span.setAttribute( + attribute_name, + response.choices[i].message.content + ); + i += 1; + } + } + } + } else { + span.setAttribute( + SemanticConvention.GEN_AI_CONTENT_COMPLETION, + 'Function called with tools' + ); + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_PROMPT_TOKENS, + response.usage.prompt_tokens + ); + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_COMPLETION_TOKENS, + response.usage.completion_tokens + ); + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_TOTAL_TOKENS, + response.usage.total_tokens + ); + } + } + + return response; + } catch (e: unknown) { + InstrumentationUtil.handleException(span, e as Error); + } finally { + span.end(); + } + }); + }; + }; + } +} diff --git a/plugins/node/opentelemetry-instrumentation-openai/tsconfig.json b/plugins/node/opentelemetry-instrumentation-openai/tsconfig.json new file mode 100644 index 0000000000..dd966dbe7e --- /dev/null +++ b/plugins/node/opentelemetry-instrumentation-openai/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../../../tsconfig.base", + "compilerOptions": { + "target": "es2016", + "rootDir": ".", + "outDir": "build", + "esModuleInterop": true, + "skipLibCheck": true, + "strict": true + }, + "include": [ + "src/**/*.ts" + ] +} From bbe4da0d8e2a8601af6df4f82edb8b37f1e9ef9b Mon Sep 17 00:00:00 2001 From: Aman Agarwal Date: Wed, 7 Aug 2024 01:37:05 +0530 Subject: [PATCH 2/3] updated readme --- plugins/node/opentelemetry-instrumentation-openai/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/node/opentelemetry-instrumentation-openai/README.md b/plugins/node/opentelemetry-instrumentation-openai/README.md index 8e704266a3..9d9f026e70 100644 --- a/plugins/node/opentelemetry-instrumentation-openai/README.md +++ b/plugins/node/opentelemetry-instrumentation-openai/README.md @@ -3,7 +3,7 @@ [![NPM Published Version][npm-img]][npm-url] [![Apache License][license-image]][license-image] -This module provides automatic instrumentation for the [`openai`](https://github.com/openai) module, which may be loaded using the [`@opentelemetry/instrumentation-openai`](https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation/opentelemetry-instrumentation-openai) package. +This module provides automatic instrumentation for the [`openai`](https://github.com/openai) module, which may be loaded using the [`@opentelemetry/instrumentation-openai`](https://github.com/open-telemetry/opentelemetry-js-contrib/tree/main/plugins/node/opentelemetry-instrumentation-openai) package. ## Installation From f97f6b3eec87ff24e6debbbed34539772948332c Mon Sep 17 00:00:00 2001 From: Aman Agarwal Date: Sat, 24 Aug 2024 00:59:23 +0530 Subject: [PATCH 3/3] fix: added a support for streaming --- .../README.md | 25 +- .../src/config.ts | 17 +- .../src/instrumentation.ts | 2 +- .../src/types.ts | 6 +- .../src/utils.ts | 84 ++-- .../src/wrapper.ts | 461 +++++++++++------- 6 files changed, 341 insertions(+), 254 deletions(-) diff --git a/plugins/node/opentelemetry-instrumentation-openai/README.md b/plugins/node/opentelemetry-instrumentation-openai/README.md index 9d9f026e70..3824eb085b 100644 --- a/plugins/node/opentelemetry-instrumentation-openai/README.md +++ b/plugins/node/opentelemetry-instrumentation-openai/README.md @@ -5,7 +5,6 @@ This module provides automatic instrumentation for the [`openai`](https://github.com/openai) module, which may be loaded using the [`@opentelemetry/instrumentation-openai`](https://github.com/open-telemetry/opentelemetry-js-contrib/tree/main/plugins/node/opentelemetry-instrumentation-openai) package. - ## Installation ```bash @@ -26,13 +25,17 @@ import { import openAI from 'openai'; const openAIInstrumentationOptions: InstrumentationHelperConfigInterface = { - environment: "production", - applicationName: "Example Application", - otlpEndpoint: "otlpEndpoint", - otlpHeaders: "otlpHeaders", + environment: 'production', + applicationName: 'Example Application', + otlpEndpoint: 'otlpEndpoint', + otlpHeaders: 'otlpHeaders', traceContent: true, - pricing_json: { "chat": { "gpt-4-1106-preview" : 0.04 } } -} + pricing_json: { + chat: { + 'gpt-4-1106-preview': { promptPrice: 0.04, completionPrice: 0.03 }, + }, + }, +}; const { NodeTracerProvider } = require('@opentelemetry/sdk-trace-node'); const { registerInstrumentations } = require('@opentelemetry/instrumentation'); @@ -40,7 +43,9 @@ const { registerInstrumentations } = require('@opentelemetry/instrumentation'); const provider = new NodeTracerProvider(); provider.register(); -const openAIInstrument = new OpenAIInstrumentation(openAIInstrumentationOptions); +const openAIInstrument = new OpenAIInstrumentation( + openAIInstrumentationOptions +); // Auto instrumentation : patch method will only when the "openai" module is required in any trace calls registerInstrumentations({ @@ -50,8 +55,8 @@ registerInstrumentations({ // Or // Manual patch -openAIInstrument.setTracerProvider(tracerProvider) -openAIInstrument.manualPatch(openAI) +openAIInstrument.setTracerProvider(tracerProvider); +openAIInstrument.manualPatch(openAI); registerInstrumentations({ tracerProvider, }); diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/config.ts b/plugins/node/opentelemetry-instrumentation-openai/src/config.ts index 5a40f6d38d..97d1902ca4 100644 --- a/plugins/node/opentelemetry-instrumentation-openai/src/config.ts +++ b/plugins/node/opentelemetry-instrumentation-openai/src/config.ts @@ -14,8 +14,7 @@ * limitations under the License. */ -import { InstrumentationHelperConfigInterface } from './types'; -import InstrumentationUtil from './utils'; +import { InstrumentationHelperConfigInterface, PricingObject } from './types'; export default class InstrumentationHelperConfig { /** @@ -39,17 +38,7 @@ export default class InstrumentationHelperConfig { static otlpEndpoint: string; static otlpHeaders?: Record | string; static traceContent: boolean; - static pricing_json: Record; - - static async updatePricingJson(pricingArg: Record | string) { - try { - const response = await InstrumentationUtil.fetchPricingInfo(pricingArg); - this.pricingInfo = response; - } catch (e) { - this.pricingInfo = {}; - } - return this.pricingInfo; - } + static pricing_json?: PricingObject; static updateConfig({ environment = 'production', @@ -76,6 +65,6 @@ export default class InstrumentationHelperConfig { this.otlpEndpoint = otlpEndpoint; this.otlpHeaders = otlpHeaders; this.traceContent = traceContent; - this.pricing_json = pricing_json as Record; + this.pricing_json = pricing_json; } } diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/instrumentation.ts b/plugins/node/opentelemetry-instrumentation-openai/src/instrumentation.ts index 27e6a6ece5..b4776c1d41 100644 --- a/plugins/node/opentelemetry-instrumentation-openai/src/instrumentation.ts +++ b/plugins/node/opentelemetry-instrumentation-openai/src/instrumentation.ts @@ -78,7 +78,7 @@ export class OpenAIInstrumentation extends InstrumentationBase; +}; + export interface InstrumentationHelperConfigInterface { environment?: string; applicationName?: string; otlpEndpoint?: string; otlpHeaders?: Record | string; traceContent?: boolean; - pricing_json?: Record | string; + pricing_json?: PricingObject; } export interface OpenAIInstrumentationConfig diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/utils.ts b/plugins/node/opentelemetry-instrumentation-openai/src/utils.ts index 00cab11d36..df6bf041df 100644 --- a/plugins/node/opentelemetry-instrumentation-openai/src/utils.ts +++ b/plugins/node/opentelemetry-instrumentation-openai/src/utils.ts @@ -15,27 +15,35 @@ */ import { Span, SpanStatusCode } from '@opentelemetry/api'; - -type PricingObject = { - chat: Record; -}; +import { encodingForModel, TiktokenModel } from 'js-tiktoken'; +import { PricingObject } from './types'; export default class InstrumentationUtil { static readonly PROMPT_TOKEN_FACTOR = 1000; + static openaiTokens(text: string, model: string): number { + try { + const encoding = encodingForModel(model as TiktokenModel); + return encoding.encode(text).length; + } catch (error) { + console.error(`Error in openaiTokens: ${error}`); + throw error; + } + } + static getChatModelCost( model: string, - pricingInfo: Record, promptTokens: number, - completionTokens: number + completionTokens: number, + pricingInfo?: PricingObject ): number { - const pricingObject: PricingObject = pricingInfo as PricingObject; try { + if (!pricingInfo) return 0; return ( (promptTokens / InstrumentationUtil.PROMPT_TOKEN_FACTOR) * - pricingObject.chat[model].promptPrice + + (pricingInfo.chat[model].promptPrice || 0) + (completionTokens / InstrumentationUtil.PROMPT_TOKEN_FACTOR) * - pricingObject.chat[model].completionPrice + pricingInfo.chat[model].completionPrice ); } catch (error) { console.error(`Error in getChatModelCost: ${error}`); @@ -43,55 +51,19 @@ export default class InstrumentationUtil { } } - static async fetchPricingInfo( - pricingJson: Record | string - ): Promise> { - let pricingUrl = - 'https://raw.githubusercontent.com/openlit/openlit/main/assets/pricing.json'; - if (pricingJson) { - let isUrl = false; - try { - isUrl = !!new URL(pricingJson as string); - } catch { - isUrl = false; - } - - if (isUrl) { - pricingUrl = pricingJson as string; - } else { - try { - if (typeof pricingJson === 'string') { - const json = JSON.parse(pricingJson); - return json; - } else { - const json = JSON.parse(JSON.stringify(pricingJson)); - return json; - } - } catch { - return {}; - } - } - } - - try { - const response = await fetch(pricingUrl); - if (response.ok) { - return response.json(); - } else { - throw new Error( - `HTTP error occurred while fetching pricing info: ${response.status}` - ); - } - } catch (error) { - console.error( - `Unexpected error occurred while fetching pricing info: ${error}` - ); - return {}; - } - } - static handleException(span: Span, error: Error): void { span.recordException(error); span.setStatus({ code: SpanStatusCode.ERROR, message: error.message }); } + + static async createStreamProxy (stream: any, generatorFuncResponse: any): Promise { + return new Proxy(stream, { + get (target, prop, receiver) { + if (prop === Symbol.asyncIterator) { + return () => generatorFuncResponse + } + return Reflect.get(target, prop, receiver) + } + }) + } } diff --git a/plugins/node/opentelemetry-instrumentation-openai/src/wrapper.ts b/plugins/node/opentelemetry-instrumentation-openai/src/wrapper.ts index 1cbdd850ea..cab43dd736 100644 --- a/plugins/node/opentelemetry-instrumentation-openai/src/wrapper.ts +++ b/plugins/node/opentelemetry-instrumentation-openai/src/wrapper.ts @@ -34,14 +34,12 @@ export default class OpenAIWrapper { genAIEndpoint, model, user, - cost, environment, applicationName, }: { genAIEndpoint: string; model: string; user: unknown; - cost: unknown; environment: string; applicationName: string; } @@ -61,201 +59,320 @@ export default class OpenAIWrapper { SemanticConvention.GEN_AI_REQUEST_USER, user as AttributeValue ); - if (cost !== undefined) - span.setAttribute( - SemanticConvention.GEN_AI_USAGE_COST, - cost as AttributeValue - ); span.setStatus({ code: SpanStatusCode.OK }); } static _patchChatCompletionCreate(tracer: Tracer): any { const genAIEndpoint = 'openai.resources.chat.completions'; - const applicationName = InstrumentationHelperConfig.applicationName; - const environment = InstrumentationHelperConfig.environment; - const traceContent = InstrumentationHelperConfig.traceContent; return (originalMethod: (...args: any[]) => any) => { return async function (this: any, ...args: any[]) { const span = tracer.startSpan(genAIEndpoint, { kind: SpanKind.CLIENT }); + return context + .with(trace.setSpan(context.active(), span), async () => { + return originalMethod.apply(this, args); + }) + .then(response => { + const { stream = false } = args[0]; - return context.with(trace.setSpan(context.active(), span), async () => { - try { - const response = await originalMethod.apply(this, args); - const { - messages, - frequency_penalty = 0, - max_tokens = null, - n = 1, - presence_penalty = 0, - seed = null, - temperature = 1, - tools, - top_p, - user, - stream = false, - } = args[0]; + if (!!stream) { + return InstrumentationUtil.createStreamProxy( + response, + OpenAIWrapper._chatCompletionGenerator({ + args, + genAIEndpoint, + response, + span, + }) + ); + } - // Request Params attributes : Start - span.setAttribute( - SemanticConvention.GEN_AI_REQUEST_TOP_P, - top_p || 1 - ); - span.setAttribute( - SemanticConvention.GEN_AI_REQUEST_MAX_TOKENS, - max_tokens - ); - span.setAttribute( - SemanticConvention.GEN_AI_REQUEST_TEMPERATURE, - temperature - ); - span.setAttribute( - SemanticConvention.GEN_AI_REQUEST_PRESENCE_PENALTY, - presence_penalty - ); - span.setAttribute( - SemanticConvention.GEN_AI_REQUEST_FREQUENCY_PENALTY, - frequency_penalty - ); - span.setAttribute(SemanticConvention.GEN_AI_REQUEST_SEED, seed); - span.setAttribute( - SemanticConvention.GEN_AI_REQUEST_IS_STREAM, - stream - ); + return OpenAIWrapper._chatCompletion({ + args, + genAIEndpoint, + response, + span, + }); + }) + .catch((e: any) => { + InstrumentationUtil.handleException(span, e); + span.end(); + }); + }; + }; + } - if (!stream) { - if (traceContent) { - // Format 'messages' into a single string - const messagePrompt = messages || []; - const formattedMessages = []; + static async _chatCompletion({ + args, + genAIEndpoint, + response, + span, + }: { + args: any[]; + genAIEndpoint: string; + response: any; + span: Span; + }): Promise { + try { + await OpenAIWrapper._chatCompletionCommonSetter({ + args, + genAIEndpoint, + result: response, + span, + }); + return response; + } catch (e: any) { + InstrumentationUtil.handleException(span, e); + } finally { + span.end(); + } + } - for (const message of messagePrompt) { - const role = message.role; - const content = message.content; + static async *_chatCompletionGenerator({ + args, + genAIEndpoint, + response, + span, + }: { + args: any[]; + genAIEndpoint: string; + response: any; + span: Span; + }): AsyncGenerator { + try { + const { messages } = args[0]; + let { tools } = args[0]; + const result = { + id: '0', + created: -1, + model: '', + choices: [ + { + index: 0, + logprobs: null, + finish_reason: 'stop', + message: { role: 'assistant', content: '' }, + }, + ], + usage: { + prompt_tokens: 0, + completion_tokens: 0, + total_tokens: 0, + }, + }; + for await (const chunk of response) { + result.id = chunk.id; + result.created = chunk.created; + result.model = chunk.model; - if (Array.isArray(content)) { - const contentStr = content - .map(item => { - if ('type' in item) { - return `${item.type}: ${ - item.text ? item.text : item.image_url - }`; - } else { - return `text: ${item.text}`; - } - }) - .join(', '); - formattedMessages.push(`${role}: ${contentStr}`); - } else { - formattedMessages.push(`${role}: ${content}`); - } - } + if (chunk.choices[0]?.finish_reason) { + result.choices[0].finish_reason = chunk.choices[0].finish_reason; + } + if (chunk.choices[0]?.logprobs) { + result.choices[0].logprobs = chunk.choices[0].logprobs; + } + if (chunk.choices[0]?.delta.content) { + result.choices[0].message.content += chunk.choices[0].delta.content; + } - const prompt = formattedMessages.join('\n'); - span.setAttribute( - SemanticConvention.GEN_AI_CONTENT_PROMPT, - prompt - ); - } - // Request Params attributes : End + if (chunk.choices[0]?.delta.tool_calls) { + tools = true; + } - span.setAttribute( - SemanticConvention.GEN_AI_TYPE, - SemanticConvention.GEN_AI_TYPE_CHAT - ); - span.setAttribute( - SemanticConvention.GEN_AI_RESPONSE_ID, - response.id - ); + yield chunk; + } - const model = response.model || 'gpt-3.5-turbo'; + let promptTokens = 0; + for (const message of messages || []) { + promptTokens += + InstrumentationUtil.openaiTokens( + message.content as string, + result.model + ) ?? 0; + } - const pricingInfo: Record = - await InstrumentationHelperConfig.updatePricingJson( - InstrumentationHelperConfig.pricing_json - ); + const completionTokens = InstrumentationUtil.openaiTokens( + result.choices[0].message.content ?? '', + result.model + ); + if (completionTokens) { + result.usage = { + prompt_tokens: promptTokens, + completion_tokens: completionTokens, + total_tokens: promptTokens + completionTokens, + }; + } - // Calculate cost of the operation - const cost = InstrumentationUtil.getChatModelCost( - model, - pricingInfo, - response.usage.prompt_tokens, - response.usage.completion_tokens - ); + args[0].tools = tools; + + await OpenAIWrapper._chatCompletionCommonSetter({ + args, + genAIEndpoint, + result, + span, + }); - OpenAIWrapper.setBaseSpanAttributes(span, { - genAIEndpoint, - model, - user, - cost, - applicationName, - environment, - }); + return result; + } catch (e: any) { + InstrumentationUtil.handleException(span, e); + } finally { + span.end(); + } + } - if (!tools) { - span.setAttribute( - SemanticConvention.GEN_AI_USAGE_PROMPT_TOKENS, - response.usage.prompt_tokens - ); - span.setAttribute( - SemanticConvention.GEN_AI_USAGE_COMPLETION_TOKENS, - response.usage.completion_tokens - ); - span.setAttribute( - SemanticConvention.GEN_AI_USAGE_TOTAL_TOKENS, - response.usage.total_tokens - ); - span.setAttribute( - SemanticConvention.GEN_AI_RESPONSE_FINISH_REASON, - response.choices[0].finish_reason - ); + static async _chatCompletionCommonSetter({ + args, + genAIEndpoint, + result, + span, + }: { + args: any[]; + genAIEndpoint: string; + result: any; + span: Span; + }) { + const applicationName = InstrumentationHelperConfig.applicationName; + const environment = InstrumentationHelperConfig.environment; + const traceContent = InstrumentationHelperConfig.traceContent; + const { + messages, + frequency_penalty = 0, + max_tokens = null, + n = 1, + presence_penalty = 0, + seed = null, + temperature = 1, + top_p, + user, + stream = false, + tools, + } = args[0]; - if (traceContent) { - if (n === 1) { - span.setAttribute( - SemanticConvention.GEN_AI_CONTENT_COMPLETION, - response.choices[0].message.content - ); - } else { - let i = 0; - while (i < n) { - const attribute_name = `${SemanticConvention.GEN_AI_CONTENT_COMPLETION}.[i]`; - span.setAttribute( - attribute_name, - response.choices[i].message.content - ); - i += 1; - } - } - } + // Request Params attributes : Start + span.setAttribute(SemanticConvention.GEN_AI_REQUEST_TOP_P, top_p || 1); + span.setAttribute(SemanticConvention.GEN_AI_REQUEST_MAX_TOKENS, max_tokens); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_TEMPERATURE, + temperature + ); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_PRESENCE_PENALTY, + presence_penalty + ); + span.setAttribute( + SemanticConvention.GEN_AI_REQUEST_FREQUENCY_PENALTY, + frequency_penalty + ); + span.setAttribute(SemanticConvention.GEN_AI_REQUEST_SEED, seed); + span.setAttribute(SemanticConvention.GEN_AI_REQUEST_IS_STREAM, stream); + + if (traceContent) { + // Format 'messages' into a single string + const messagePrompt = messages || []; + const formattedMessages = []; + + for (const message of messagePrompt) { + const role = message.role; + const content = message.content; + + if (Array.isArray(content)) { + const contentStr = content + .map(item => { + if ('type' in item) { + return `${item.type}: ${ + item.text ? item.text : item.image_url + }`; } else { - span.setAttribute( - SemanticConvention.GEN_AI_CONTENT_COMPLETION, - 'Function called with tools' - ); - span.setAttribute( - SemanticConvention.GEN_AI_USAGE_PROMPT_TOKENS, - response.usage.prompt_tokens - ); - span.setAttribute( - SemanticConvention.GEN_AI_USAGE_COMPLETION_TOKENS, - response.usage.completion_tokens - ); - span.setAttribute( - SemanticConvention.GEN_AI_USAGE_TOTAL_TOKENS, - response.usage.total_tokens - ); + return `text: ${item.text}`; } - } + }) + .join(', '); + formattedMessages.push(`${role}: ${contentStr}`); + } else { + formattedMessages.push(`${role}: ${content}`); + } + } - return response; - } catch (e: unknown) { - InstrumentationUtil.handleException(span, e as Error); - } finally { - span.end(); + const prompt = formattedMessages.join('\n'); + span.setAttribute(SemanticConvention.GEN_AI_CONTENT_PROMPT, prompt); + } + // Request Params attributes : End + + span.setAttribute( + SemanticConvention.GEN_AI_TYPE, + SemanticConvention.GEN_AI_TYPE_CHAT + ); + + span.setAttribute(SemanticConvention.GEN_AI_RESPONSE_ID, result.id); + + const model = result.model || 'gpt-3.5-turbo'; + + if (InstrumentationHelperConfig.pricing_json) { + // Calculate cost of the operation + const cost = InstrumentationUtil.getChatModelCost( + model, + result.usage.prompt_tokens, + result.usage.completion_tokens, + InstrumentationHelperConfig.pricing_json + ); + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_COST, + cost as AttributeValue + ); + } + + OpenAIWrapper.setBaseSpanAttributes(span, { + genAIEndpoint, + model, + user, + applicationName, + environment, + }); + + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_PROMPT_TOKENS, + result.usage.prompt_tokens + ); + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_COMPLETION_TOKENS, + result.usage.completion_tokens + ); + span.setAttribute( + SemanticConvention.GEN_AI_USAGE_TOTAL_TOKENS, + result.usage.total_tokens + ); + + if (result.choices[0].finish_reason) { + span.setAttribute( + SemanticConvention.GEN_AI_RESPONSE_FINISH_REASON, + result.choices[0].finish_reason + ); + } + + if (tools) { + span.setAttribute( + SemanticConvention.GEN_AI_CONTENT_COMPLETION, + 'Function called with tools' + ); + } else { + if (traceContent) { + if (n === 1) { + span.setAttribute( + SemanticConvention.GEN_AI_CONTENT_COMPLETION, + result.choices[0].message.content + ); + } else { + let i = 0; + while (i < n) { + const attribute_name = `${SemanticConvention.GEN_AI_CONTENT_COMPLETION}.[i]`; + span.setAttribute( + attribute_name, + result.choices[i].message.content + ); + i += 1; } - }); - }; - }; + } + } + } } }