first commit

This commit is contained in:
Stefan Hacker
2026-04-03 09:38:48 +02:00
commit 37ad745546
47450 changed files with 3120798 additions and 0 deletions
@@ -0,0 +1,4 @@
import { IModulePatcher } from "diagnostic-channel";
export declare const AzureMonitorSymbol = "Azure_Monitor_Tracer";
export declare const azureCoreTracing: IModulePatcher;
export declare function enable(): void;
@@ -0,0 +1,76 @@
"use strict";
var __assign = (this && this.__assign) || Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });
var diagnostic_channel_1 = require("diagnostic-channel");
exports.AzureMonitorSymbol = "Azure_Monitor_Tracer";
var isPatched = false;
/**
* By default, @azure/core-tracing default tracer is a NoopTracer.
* This patching changes the default tracer to a patched BasicTracer
* which emits ended spans as diag-channel events.
*
* The @opentelemetry/tracing package must be installed to use these patches
* https://www.npmjs.com/package/@opentelemetry/tracing
* @param coreTracing
*/
var azureCoreTracingPatchFunction = function (coreTracing) {
if (isPatched) {
// tracer is already cached -- noop
return coreTracing;
}
try {
var tracing = require("@opentelemetry/tracing");
var api_1 = require("@opentelemetry/api");
var tracerConfig = diagnostic_channel_1.channel.spanContextPropagator
? { contextManager: diagnostic_channel_1.channel.spanContextPropagator }
: undefined;
new tracing.BasicTracerProvider().register(tracerConfig);
var tracer = api_1.trace.getTracer("applicationinsights tracer");
// Patch startSpan instead of using spanProcessor.onStart because parentSpan must be
// set while the span is constructed
var startSpanOriginal_1 = tracer.startSpan;
tracer.startSpan = function (name, options) {
// if no parent span was provided, apply the current context
if (!options || !options.parent) {
var parentOperation = api_1.getSpan(api_1.context.active());
if (parentOperation && parentOperation.operation && parentOperation.operation.traceparent) {
options = __assign({}, options, { parent: {
traceId: parentOperation.operation.traceparent.traceId,
spanId: parentOperation.operation.traceparent.spanId,
traceFlags: 1,
} });
}
}
var span = startSpanOriginal_1.call(this, name, options);
var originalEnd = span.end;
span.end = function () {
var result = originalEnd.apply(this, arguments);
diagnostic_channel_1.channel.publish("azure-coretracing", span);
return result;
};
return span;
};
api_1.getSpan(api_1.context.active()); // seed OpenTelemetryScopeManagerWrapper with "active" symbol
tracer[exports.AzureMonitorSymbol] = true;
coreTracing.setTracer(tracer); // recordSpanData is not present on BasicTracer - cast to any
isPatched = true;
}
catch (e) { /* squash errors */ }
return coreTracing;
};
exports.azureCoreTracing = {
versionSpecifier: ">= 1.0.0 < 2.0.0",
patch: azureCoreTracingPatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("@azure/core-tracing", exports.azureCoreTracing);
}
exports.enable = enable;
//# sourceMappingURL=azure-coretracing.pub.js.map
+7
View File
@@ -0,0 +1,7 @@
import { IModulePatcher } from "diagnostic-channel";
export interface IBunyanData {
level: number;
result: string;
}
export declare const bunyan: IModulePatcher;
export declare function enable(): void;
+29
View File
@@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
var bunyanPatchFunction = function (originalBunyan) {
var originalEmit = originalBunyan.prototype._emit;
originalBunyan.prototype._emit = function (rec, noemit) {
var ret = originalEmit.apply(this, arguments);
if (!noemit) {
var str = ret;
if (!str) {
str = originalEmit.call(this, rec, true);
}
diagnostic_channel_1.channel.publish("bunyan", { level: rec.level, result: str });
}
return ret;
};
return originalBunyan;
};
exports.bunyan = {
versionSpecifier: ">= 1.0.0 < 2.0.0",
patch: bunyanPatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("bunyan", exports.bunyan);
}
exports.enable = enable;
//# sourceMappingURL=bunyan.pub.js.map
+7
View File
@@ -0,0 +1,7 @@
import { IModulePatcher } from "diagnostic-channel";
export interface IConsoleData {
message: string;
stderr?: boolean;
}
export declare const console: IModulePatcher;
export declare function enable(): void;
+63
View File
@@ -0,0 +1,63 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
var stream_1 = require("stream");
var consolePatchFunction = function (originalConsole) {
var aiLoggingOutStream = new stream_1.Writable();
var aiLoggingErrStream = new stream_1.Writable();
// Default console is roughly equivalent to `new Console(process.stdout, process.stderr)`
// We create a version which publishes to the channel and also to stdout/stderr
aiLoggingOutStream.write = function (chunk) {
if (!chunk) {
return true;
}
var message = chunk.toString();
diagnostic_channel_1.channel.publish("console", { message: message });
return true;
};
aiLoggingErrStream.write = function (chunk) {
if (!chunk) {
return true;
}
var message = chunk.toString();
diagnostic_channel_1.channel.publish("console", { message: message, stderr: true });
return true;
};
var aiLoggingConsole = new originalConsole.Console(aiLoggingOutStream, aiLoggingErrStream);
var consoleMethods = ["log", "info", "warn", "error", "dir", "time", "timeEnd", "trace", "assert"];
var _loop_1 = function (method) {
var originalMethod = originalConsole[method];
if (originalMethod) {
originalConsole[method] = function () {
if (aiLoggingConsole[method]) {
try {
aiLoggingConsole[method].apply(aiLoggingConsole, arguments);
}
catch (e) {
// Ignore errors; allow the original method to throw if necessary
}
}
return originalMethod.apply(originalConsole, arguments);
};
}
};
for (var _i = 0, consoleMethods_1 = consoleMethods; _i < consoleMethods_1.length; _i++) {
var method = consoleMethods_1[_i];
_loop_1(method);
}
return originalConsole;
};
exports.console = {
versionSpecifier: ">= 4.0.0",
patch: consolePatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("console", exports.console);
// Force patching of console
/* tslint:disable-next-line:no-var-requires */
require("console");
}
exports.enable = enable;
//# sourceMappingURL=console.pub.js.map
+14
View File
@@ -0,0 +1,14 @@
import * as azuresdk from "./azure-coretracing.pub";
import * as bunyan from "./bunyan.pub";
import * as consolePub from "./console.pub";
import * as mongodbCore from "./mongodb-core.pub";
import * as mongodb from "./mongodb.pub";
import * as mysql from "./mysql.pub";
import * as pgPool from "./pg-pool.pub";
import * as pg from "./pg.pub";
import { IPostgresData, IPostgresResult } from "./pg.pub";
import * as redis from "./redis.pub";
import * as tedious from "./tedious.pub";
import * as winston from "./winston.pub";
export { azuresdk, bunyan, consolePub as console, mongodbCore, mongodb, mysql, redis, winston, pg, pgPool, tedious, IPostgresData, IPostgresResult, };
export declare function enable(): void;
+41
View File
@@ -0,0 +1,41 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
Object.defineProperty(exports, "__esModule", { value: true });
var azuresdk = require("./azure-coretracing.pub");
exports.azuresdk = azuresdk;
var bunyan = require("./bunyan.pub");
exports.bunyan = bunyan;
var consolePub = require("./console.pub");
exports.console = consolePub;
var mongodbCore = require("./mongodb-core.pub");
exports.mongodbCore = mongodbCore;
var mongodb = require("./mongodb.pub");
exports.mongodb = mongodb;
var mysql = require("./mysql.pub");
exports.mysql = mysql;
var pgPool = require("./pg-pool.pub");
exports.pgPool = pgPool;
var pg = require("./pg.pub");
exports.pg = pg;
var redis = require("./redis.pub");
exports.redis = redis;
var tedious = require("./tedious.pub");
exports.tedious = tedious;
var winston = require("./winston.pub");
exports.winston = winston;
function enable() {
bunyan.enable();
consolePub.enable();
mongodbCore.enable();
mongodb.enable();
mysql.enable();
pg.enable();
pgPool.enable();
redis.enable();
winston.enable();
azuresdk.enable();
tedious.enable();
}
exports.enable = enable;
//# sourceMappingURL=index.js.map
@@ -0,0 +1,3 @@
import { IModulePatcher } from "diagnostic-channel";
export declare const mongoCore: IModulePatcher;
export declare function enable(): void;
@@ -0,0 +1,42 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
var mongodbcorePatchFunction = function (originalMongoCore) {
var originalConnect = originalMongoCore.Server.prototype.connect;
originalMongoCore.Server.prototype.connect = function contextPreservingConnect() {
var ret = originalConnect.apply(this, arguments);
// Messages sent to mongo progress through a pool
// This can result in context getting mixed between different responses
// so we wrap the callbacks to restore appropriate state
var originalWrite = this.s.pool.write;
this.s.pool.write = function contextPreservingWrite() {
var cbidx = typeof arguments[1] === "function" ? 1 : 2;
if (typeof arguments[cbidx] === "function") {
arguments[cbidx] = diagnostic_channel_1.channel.bindToContext(arguments[cbidx]);
}
return originalWrite.apply(this, arguments);
};
// Logout is a special case, it doesn't call the write function but instead
// directly calls into connection.write
var originalLogout = this.s.pool.logout;
this.s.pool.logout = function contextPreservingLogout() {
if (typeof arguments[1] === "function") {
arguments[1] = diagnostic_channel_1.channel.bindToContext(arguments[1]);
}
return originalLogout.apply(this, arguments);
};
return ret;
};
return originalMongoCore;
};
exports.mongoCore = {
versionSpecifier: ">= 2.0.0 < 4.0.0",
patch: mongodbcorePatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("mongodb-core", exports.mongoCore);
}
exports.enable = enable;
//# sourceMappingURL=mongodb-core.pub.js.map
+19
View File
@@ -0,0 +1,19 @@
import { IModulePatcher } from "diagnostic-channel";
export interface IMongoData {
startedData: {
databaseName?: string;
command?: any;
time: Date;
};
event: {
commandName?: string;
duration?: number;
failure?: string;
reply?: any;
};
succeeded: boolean;
}
export declare const mongo2: IModulePatcher;
export declare const mongo3: IModulePatcher;
export declare const mongo330: IModulePatcher;
export declare function enable(): void;
+181
View File
@@ -0,0 +1,181 @@
"use strict";
var __assign = (this && this.__assign) || Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
var mongodbPatchFunction = function (originalMongo) {
var listener = originalMongo.instrument({
operationIdGenerator: {
next: function () {
return diagnostic_channel_1.channel.bindToContext(function (cb) { return cb(); });
},
},
});
var eventMap = {};
listener.on("started", function (event) {
if (eventMap[event.requestId]) {
// Note: Mongo can generate 2 completely separate requests
// which share the same requestId, if a certain race condition is triggered.
// For now, we accept that this can happen and potentially miss or mislabel some events.
return;
}
eventMap[event.requestId] = __assign({}, event, { time: new Date() });
});
listener.on("succeeded", function (event) {
var startedData = eventMap[event.requestId];
if (startedData) {
delete eventMap[event.requestId];
}
if (typeof event.operationId === "function") {
event.operationId(function () { return diagnostic_channel_1.channel.publish("mongodb", { startedData: startedData, event: event, succeeded: true }); });
}
else {
// fallback -- correlation will not work here
diagnostic_channel_1.channel.publish("mongodb", { startedData: startedData, event: event, succeeded: true });
}
});
listener.on("failed", function (event) {
var startedData = eventMap[event.requestId];
if (startedData) {
delete eventMap[event.requestId];
}
if (typeof event.operationId === "function") {
event.operationId(function () { return diagnostic_channel_1.channel.publish("mongodb", { startedData: startedData, event: event, succeeded: false }); });
}
else {
// fallback -- correlation will not work here
diagnostic_channel_1.channel.publish("mongodb", { startedData: startedData, event: event, succeeded: false });
}
});
return originalMongo;
};
var mongodb3PatchFunction = function (originalMongo) {
var listener = originalMongo.instrument();
var eventMap = {};
var contextMap = {};
listener.on("started", function (event) {
if (eventMap[event.requestId]) {
// Note: Mongo can generate 2 completely separate requests
// which share the same requestId, if a certain race condition is triggered.
// For now, we accept that this can happen and potentially miss or mislabel some events.
return;
}
contextMap[event.requestId] = diagnostic_channel_1.channel.bindToContext(function (cb) { return cb(); });
eventMap[event.requestId] = __assign({}, event, { time: new Date() });
});
listener.on("succeeded", function (event) {
var startedData = eventMap[event.requestId];
if (startedData) {
delete eventMap[event.requestId];
}
if (typeof event === "object" && typeof contextMap[event.requestId] === "function") {
contextMap[event.requestId](function () { return diagnostic_channel_1.channel.publish("mongodb", { startedData: startedData, event: event, succeeded: true }); });
delete contextMap[event.requestId];
}
});
listener.on("failed", function (event) {
var startedData = eventMap[event.requestId];
if (startedData) {
delete eventMap[event.requestId];
}
if (typeof event === "object" && typeof contextMap[event.requestId] === "function") {
contextMap[event.requestId](function () { return diagnostic_channel_1.channel.publish("mongodb", { startedData: startedData, event: event, succeeded: false }); });
delete contextMap[event.requestId];
}
});
return originalMongo;
};
// In mongodb 3.3.0, mongodb-core was merged into mongodb, so the same patching
// can be used here. this.s.pool was changed to this.s.coreTopology.s.pool
var mongodbcorePatchFunction = function (originalMongo) {
var originalConnect = originalMongo.Server.prototype.connect;
originalMongo.Server.prototype.connect = function contextPreservingConnect() {
var ret = originalConnect.apply(this, arguments);
// Messages sent to mongo progress through a pool
// This can result in context getting mixed between different responses
// so we wrap the callbacks to restore appropriate state
var originalWrite = this.s.coreTopology.s.pool.write;
this.s.coreTopology.s.pool.write = function contextPreservingWrite() {
var cbidx = typeof arguments[1] === "function" ? 1 : 2;
if (typeof arguments[cbidx] === "function") {
arguments[cbidx] = diagnostic_channel_1.channel.bindToContext(arguments[cbidx]);
}
return originalWrite.apply(this, arguments);
};
// Logout is a special case, it doesn't call the write function but instead
// directly calls into connection.write
var originalLogout = this.s.coreTopology.s.pool.logout;
this.s.coreTopology.s.pool.logout = function contextPreservingLogout() {
if (typeof arguments[1] === "function") {
arguments[1] = diagnostic_channel_1.channel.bindToContext(arguments[1]);
}
return originalLogout.apply(this, arguments);
};
return ret;
};
return originalMongo;
};
var mongodb330PatchFunction = function (originalMongo) {
mongodbcorePatchFunction(originalMongo); // apply mongodb-core patches
var listener = originalMongo.instrument();
var eventMap = {};
var contextMap = {};
listener.on("started", function (event) {
if (eventMap[event.requestId]) {
// Note: Mongo can generate 2 completely separate requests
// which share the same requestId, if a certain race condition is triggered.
// For now, we accept that this can happen and potentially miss or mislabel some events.
return;
}
contextMap[event.requestId] = diagnostic_channel_1.channel.bindToContext(function (cb) { return cb(); });
eventMap[event.requestId] = event;
});
listener.on("succeeded", function (event) {
var startedData = eventMap[event.requestId];
if (startedData) {
delete eventMap[event.requestId];
}
if (typeof event === "object" && typeof contextMap[event.requestId] === "function") {
contextMap[event.requestId](function () { return diagnostic_channel_1.channel.publish("mongodb", { startedData: startedData, event: event, succeeded: true }); });
delete contextMap[event.requestId];
}
});
listener.on("failed", function (event) {
var startedData = eventMap[event.requestId];
if (startedData) {
delete eventMap[event.requestId];
}
if (typeof event === "object" && typeof contextMap[event.requestId] === "function") {
contextMap[event.requestId](function () { return diagnostic_channel_1.channel.publish("mongodb", { startedData: startedData, event: event, succeeded: false }); });
delete contextMap[event.requestId];
}
});
return originalMongo;
};
exports.mongo2 = {
versionSpecifier: ">= 2.0.0 <= 3.0.5",
patch: mongodbPatchFunction,
};
exports.mongo3 = {
versionSpecifier: "> 3.0.5 < 3.3.0",
patch: mongodb3PatchFunction,
};
exports.mongo330 = {
versionSpecifier: ">= 3.3.0 < 4.0.0",
patch: mongodb330PatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("mongodb", exports.mongo2);
diagnostic_channel_1.channel.registerMonkeyPatch("mongodb", exports.mongo3);
diagnostic_channel_1.channel.registerMonkeyPatch("mongodb", exports.mongo330);
}
exports.enable = enable;
//# sourceMappingURL=mongodb.pub.js.map
+19
View File
@@ -0,0 +1,19 @@
import { IModulePatcher } from "diagnostic-channel";
export interface IMysqlData {
query: {
sql?: string;
_connection?: {
config?: {
socketPath?: string;
host?: string;
port?: number;
};
};
};
callbackArgs: IArguments;
err: Error;
duration: number;
time: Date;
}
export declare const mysql: IModulePatcher;
export declare function enable(): void;
+82
View File
@@ -0,0 +1,82 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
var path = require("path");
var mysqlPatchFunction = function (originalMysql, originalMysqlPath) {
// The `name` passed in here is for debugging purposes,
// to help distinguish which object is being patched.
var patchObjectFunction = function (obj, name) {
return function (func, cbWrapper) {
var originalFunc = obj[func];
if (originalFunc) {
obj[func] = function mysqlContextPreserver() {
// Find the callback, if there is one
var cbidx = arguments.length - 1;
for (var i = arguments.length - 1; i >= 0; --i) {
if (typeof arguments[i] === "function") {
cbidx = i;
break;
}
else if (typeof arguments[i] !== "undefined") {
break;
}
}
var cb = arguments[cbidx];
var resultContainer = { result: null, startTime: null, startDate: null };
if (typeof cb === "function") {
// Preserve context on the callback.
// If this is one of the functions that we want to track,
// then wrap the callback with the tracking wrapper
if (cbWrapper) {
resultContainer.startTime = process.hrtime();
resultContainer.startDate = new Date();
arguments[cbidx] = diagnostic_channel_1.channel.bindToContext(cbWrapper(resultContainer, cb));
}
else {
arguments[cbidx] = diagnostic_channel_1.channel.bindToContext(cb);
}
}
var result = originalFunc.apply(this, arguments);
resultContainer.result = result;
return result;
};
}
};
};
var patchClassMemberFunction = function (classObject, name) {
return patchObjectFunction(classObject.prototype, name + ".prototype");
};
var connectionCallbackFunctions = [
"connect", "changeUser",
"ping", "statistics", "end",
];
var connectionClass = require(path.dirname(originalMysqlPath) + "/lib/Connection");
connectionCallbackFunctions.forEach(function (value) { return patchClassMemberFunction(connectionClass, "Connection")(value); });
// Connection.createQuery is a static method
patchObjectFunction(connectionClass, "Connection")("createQuery", function (resultContainer, cb) {
return function (err) {
var hrDuration = process.hrtime(resultContainer.startTime);
/* tslint:disable-next-line:no-bitwise */
var duration = (hrDuration[0] * 1e3 + hrDuration[1] / 1e6) | 0;
diagnostic_channel_1.channel.publish("mysql", { query: resultContainer.result, callbackArgs: arguments, err: err, duration: duration, time: resultContainer.startDate });
cb.apply(this, arguments);
};
});
var poolCallbackFunctions = [
"_enqueueCallback",
];
var poolClass = require(path.dirname(originalMysqlPath) + "/lib/Pool");
poolCallbackFunctions.forEach(function (value) { return patchClassMemberFunction(poolClass, "Pool")(value); });
return originalMysql;
};
exports.mysql = {
versionSpecifier: ">= 2.0.0 < 3.0.0",
patch: mysqlPatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("mysql", exports.mysql);
}
exports.enable = enable;
//# sourceMappingURL=mysql.pub.js.map
+3
View File
@@ -0,0 +1,3 @@
import { IModulePatcher } from "diagnostic-channel";
export declare const postgresPool1: IModulePatcher;
export declare function enable(): void;
+24
View File
@@ -0,0 +1,24 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
function postgresPool1PatchFunction(originalPgPool) {
var originalConnect = originalPgPool.prototype.connect;
originalPgPool.prototype.connect = function connect(callback) {
if (callback) {
arguments[0] = diagnostic_channel_1.channel.bindToContext(callback);
}
return originalConnect.apply(this, arguments);
};
return originalPgPool;
}
exports.postgresPool1 = {
versionSpecifier: ">= 1.0.0 < 3.0.0",
patch: postgresPool1PatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("pg-pool", exports.postgresPool1);
}
exports.enable = enable;
//# sourceMappingURL=pg-pool.pub.js.map
+26
View File
@@ -0,0 +1,26 @@
import { IModulePatcher } from "diagnostic-channel";
export interface IPostgresResult {
rowCount: number;
command: string;
}
export interface IPostgresData {
query: {
text?: string;
plan?: string;
preparable?: {
text: string;
args: any[];
};
};
database: {
host: string;
port: string;
};
result?: IPostgresResult;
duration: number;
error?: Error;
time: Date;
}
export declare const postgres6: IModulePatcher;
export declare const postgres7: IModulePatcher;
export declare function enable(): void;
+276
View File
@@ -0,0 +1,276 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
var events_1 = require("events");
function postgres6PatchFunction(originalPg, originalPgPath) {
var originalClientQuery = originalPg.Client.prototype.query;
var diagnosticOriginalFunc = "__diagnosticOriginalFunc";
// wherever the callback is passed, find it, save it, and remove it from the call
// to the the original .query() function
originalPg.Client.prototype.query = function query(config, values, callback) {
var data = {
query: {},
database: {
host: this.connectionParameters.host,
port: this.connectionParameters.port,
},
result: null,
error: null,
duration: 0,
time: new Date(),
};
var start = process.hrtime();
var queryResult;
function patchCallback(cb) {
if (cb && cb[diagnosticOriginalFunc]) {
cb = cb[diagnosticOriginalFunc];
}
var trackingCallback = diagnostic_channel_1.channel.bindToContext(function (err, res) {
var end = process.hrtime(start);
data.result = res && { rowCount: res.rowCount, command: res.command };
data.error = err;
data.duration = Math.ceil((end[0] * 1e3) + (end[1] / 1e6));
diagnostic_channel_1.channel.publish("postgres", data);
// emulate weird internal behavior in pg@6
// on success, the callback is called *before* query events are emitted
// on failure, the callback is called *instead of* the query emitting events
// with no events, that means no promises (since the promise is resolved/rejected in an event handler)
// since we are always inserting ourselves as a callback, we have to restore the original
// behavior if the user didn't provide one themselves
if (err) {
if (cb) {
return cb.apply(this, arguments);
}
else if (queryResult && queryResult instanceof events_1.EventEmitter) {
queryResult.emit("error", err);
}
}
else if (cb) {
cb.apply(this, arguments);
}
});
try {
Object.defineProperty(trackingCallback, diagnosticOriginalFunc, { value: cb });
return trackingCallback;
}
catch (e) {
// this should never happen, but bailout in case it does
return cb;
}
}
// this function takes too many variations of arguments.
// this patches any provided callback or creates a new callback if one wasn't provided.
// since the callback is always called (if provided) in addition to always having a Promisified
// EventEmitter returned (well, sometimes -- see above), its safe to insert a callback if none was given
try {
if (typeof config === "string") {
if (values instanceof Array) {
data.query.preparable = {
text: config,
args: values,
};
callback = patchCallback(callback);
}
else {
data.query.text = config;
// pg v6 will, for some reason, accept both
// client.query("...", undefined, () => {...})
// **and**
// client.query("...", () => {...});
// Internally, precedence is given to the callback argument
if (callback) {
callback = patchCallback(callback);
}
else {
values = patchCallback(values);
}
}
}
else {
if (typeof config.name === "string") {
data.query.plan = config.name;
}
else if (config.values instanceof Array) {
data.query.preparable = {
text: config.text,
args: config.values,
};
}
else {
data.query.text = config.text;
}
if (callback) {
callback = patchCallback(callback);
}
else if (values) {
values = patchCallback(values);
}
else {
config.callback = patchCallback(config.callback);
}
}
}
catch (e) {
// if our logic here throws, bail out and just let pg do its thing
return originalClientQuery.apply(this, arguments);
}
arguments[0] = config;
arguments[1] = values;
arguments[2] = callback;
arguments.length = (arguments.length > 3) ? arguments.length : 3;
queryResult = originalClientQuery.apply(this, arguments);
return queryResult;
};
return originalPg;
}
function postgres7PatchFunction(originalPg, originalPgPath) {
var originalClientQuery = originalPg.Client.prototype.query;
var diagnosticOriginalFunc = "__diagnosticOriginalFunc";
// wherever the callback is passed, find it, save it, and remove it from the call
// to the the original .query() function
originalPg.Client.prototype.query = function query(config, values, callback) {
var _this = this;
var callbackProvided = !!callback; // Starting in pg@7.x+, Promise is returned only if !callbackProvided
var data = {
query: {},
database: {
host: this.connectionParameters.host,
port: this.connectionParameters.port,
},
result: null,
error: null,
duration: 0,
time: new Date(),
};
var start = process.hrtime();
var queryResult;
function patchCallback(cb) {
if (cb && cb[diagnosticOriginalFunc]) {
cb = cb[diagnosticOriginalFunc];
}
var trackingCallback = diagnostic_channel_1.channel.bindToContext(function (err, res) {
var end = process.hrtime(start);
data.result = res && { rowCount: res.rowCount, command: res.command };
data.error = err;
data.duration = Math.ceil((end[0] * 1e3) + (end[1] / 1e6));
diagnostic_channel_1.channel.publish("postgres", data);
if (err) {
if (cb) {
return cb.apply(this, arguments);
}
else if (queryResult && queryResult instanceof events_1.EventEmitter) {
queryResult.emit("error", err);
}
}
else if (cb) {
cb.apply(this, arguments);
}
});
try {
Object.defineProperty(trackingCallback, diagnosticOriginalFunc, { value: cb });
return trackingCallback;
}
catch (e) {
// this should never happen, but bailout in case it does
return cb;
}
}
// Only try to wrap the callback if it is a function. We want to keep the same
// behavior of returning a promise only if no callback is provided. Wrapping
// a nonfunction makes it a function and pg will interpret it as a callback
try {
if (typeof config === "string") {
if (values instanceof Array) {
data.query.preparable = {
text: config,
args: values,
};
callbackProvided = typeof callback === "function";
callback = callbackProvided ? patchCallback(callback) : callback;
}
else {
data.query.text = config;
if (callback) {
callbackProvided = typeof callback === "function";
callback = callbackProvided ? patchCallback(callback) : callback;
}
else {
callbackProvided = typeof values === "function";
values = callbackProvided ? patchCallback(values) : values;
}
}
}
else {
if (typeof config.name === "string") {
data.query.plan = config.name;
}
else if (config.values instanceof Array) {
data.query.preparable = {
text: config.text,
args: config.values,
};
}
else {
data.query.text = config.text;
}
if (callback) {
callbackProvided = typeof callback === "function";
callback = patchCallback(callback);
}
else if (values) {
callbackProvided = typeof values === "function";
values = callbackProvided ? patchCallback(values) : values;
}
else {
callbackProvided = typeof config.callback === "function";
config.callback = callbackProvided ? patchCallback(config.callback) : config.callback;
}
}
}
catch (e) {
// if our logic here throws, bail out and just let pg do its thing
return originalClientQuery.apply(this, arguments);
}
arguments[0] = config;
arguments[1] = values;
arguments[2] = callback;
arguments.length = (arguments.length > 3) ? arguments.length : 3;
queryResult = originalClientQuery.apply(this, arguments);
if (!callbackProvided) {
// no callback, so create a pass along promise
return queryResult
// pass resolved promise after publishing the event
.then(function (result) {
patchCallback()(undefined, result);
return new _this._Promise(function (resolve, reject) {
resolve(result);
});
})
// pass along rejected promise after publishing the error
.catch(function (error) {
patchCallback()(error, undefined);
return new _this._Promise(function (resolve, reject) {
reject(error);
});
});
}
return queryResult;
};
return originalPg;
}
exports.postgres6 = {
versionSpecifier: "6.*",
patch: postgres6PatchFunction,
};
exports.postgres7 = {
versionSpecifier: ">=7.* <=8.*",
patch: postgres7PatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("pg", exports.postgres6);
diagnostic_channel_1.channel.registerMonkeyPatch("pg", exports.postgres7);
}
exports.enable = enable;
//# sourceMappingURL=pg.pub.js.map
+11
View File
@@ -0,0 +1,11 @@
import { IModulePatcher } from "diagnostic-channel";
export interface IRedisData {
duration: number;
address: string;
commandObj: any;
err: Error;
result: any;
time: Date;
}
export declare const redis: IModulePatcher;
export declare function enable(): void;
+44
View File
@@ -0,0 +1,44 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
var redisPatchFunction = function (originalRedis) {
var originalSend = originalRedis.RedisClient.prototype.internal_send_command;
// Note: This is mixing together both context tracking and dependency tracking
originalRedis.RedisClient.prototype.internal_send_command = function (commandObj) {
if (commandObj) {
var cb_1 = commandObj.callback;
if (!cb_1 || !cb_1.pubsubBound) {
var address_1 = this.address;
var startTime_1 = process.hrtime();
var startDate_1 = new Date();
// Note: augmenting the callback on internal_send_command is correct for context
// tracking, but may be too low-level for dependency tracking. There are some 'errors'
// which higher levels expect in some cases
// However, the only other option is to intercept every individual command.
commandObj.callback = diagnostic_channel_1.channel.bindToContext(function (err, result) {
var hrDuration = process.hrtime(startTime_1);
/* tslint:disable-next-line:no-bitwise */
var duration = (hrDuration[0] * 1e3 + hrDuration[1] / 1e6) | 0;
diagnostic_channel_1.channel.publish("redis", { duration: duration, address: address_1, commandObj: commandObj, err: err, result: result, time: startDate_1 });
if (typeof cb_1 === "function") {
cb_1.apply(this, arguments);
}
});
commandObj.callback.pubsubBound = true;
}
}
return originalSend.call(this, commandObj);
};
return originalRedis;
};
exports.redis = {
versionSpecifier: ">= 2.0.0 < 4.0.0",
patch: redisPatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("redis", exports.redis);
}
exports.enable = enable;
//# sourceMappingURL=redis.pub.js.map
+24
View File
@@ -0,0 +1,24 @@
import { IModulePatcher } from "diagnostic-channel";
export interface ITediousResult {
rowCount: number;
rows: any;
}
export interface ITediousData {
query: {
text?: string;
plan?: string;
preparable?: {
text: string;
args: any[];
};
};
database: {
host: string;
port: string;
};
result?: ITediousResult;
duration: number;
error?: Error;
}
export declare const tedious: IModulePatcher;
export declare function enable(): void;
+55
View File
@@ -0,0 +1,55 @@
"use strict";
var __assign = (this && this.__assign) || Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
var tediousPatchFunction = function (originalTedious) {
var originalMakeRequest = originalTedious.Connection.prototype.makeRequest;
originalTedious.Connection.prototype.makeRequest = function makeRequest() {
function getPatchedCallback(origCallback) {
var start = process.hrtime();
var data = {
query: {},
database: {
host: null,
port: null,
},
result: null,
error: null,
duration: 0,
};
return diagnostic_channel_1.channel.bindToContext(function (err, rowCount, rows) {
var end = process.hrtime(start);
data = __assign({}, data, { database: {
host: this.connection.config.server,
port: this.connection.config.options.port,
}, result: !err && { rowCount: rowCount, rows: rows }, query: {
text: this.parametersByName.statement.value,
}, error: err, duration: Math.ceil((end[0] * 1e3) + (end[1] / 1e6)) });
diagnostic_channel_1.channel.publish("tedious", data);
origCallback.call(this, err, rowCount, rows);
});
}
var request = arguments[0];
arguments[0].callback = getPatchedCallback(request.callback);
originalMakeRequest.apply(this, arguments);
};
return originalTedious;
};
exports.tedious = {
versionSpecifier: ">= 6.0.0 < 9.0.0",
patch: tediousPatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("tedious", exports.tedious);
}
exports.enable = enable;
//# sourceMappingURL=tedious.pub.js.map
+10
View File
@@ -0,0 +1,10 @@
import { IModulePatcher } from "diagnostic-channel";
export interface IWinstonData {
message: string | Error;
meta: any;
level: string;
levelKind: string;
}
export declare const winston3: IModulePatcher;
export declare const winston2: IModulePatcher;
export declare function enable(): void;
+153
View File
@@ -0,0 +1,153 @@
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) if (e.indexOf(p[i]) < 0)
t[p[i]] = s[p[i]];
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
var diagnostic_channel_1 = require("diagnostic-channel");
// register a "filter" with each logger that publishes the data about to be logged
var winston2PatchFunction = function (originalWinston) {
var originalLog = originalWinston.Logger.prototype.log;
var curLevels;
var loggingFilter = function (level, message, meta) {
var levelKind;
if (curLevels === originalWinston.config.npm.levels) {
levelKind = "npm";
}
else if (curLevels === originalWinston.config.syslog.levels) {
levelKind = "syslog";
}
else {
levelKind = "unknown";
}
diagnostic_channel_1.channel.publish("winston", { level: level, message: message, meta: meta, levelKind: levelKind });
return message;
};
// whenever someone logs, ensure our filter comes last
originalWinston.Logger.prototype.log = function log() {
curLevels = this.levels;
if (!this.filters || this.filters.length === 0) {
this.filters = [loggingFilter];
}
else if (this.filters[this.filters.length - 1] !== loggingFilter) {
this.filters = this.filters.filter(function (f) { return f !== loggingFilter; });
this.filters.push(loggingFilter);
}
return originalLog.apply(this, arguments);
};
return originalWinston;
};
var winston3PatchFunction = function (originalWinston) {
var mapLevelToKind = function (winston, level) {
var levelKind;
if (winston.config.npm.levels[level] != null) {
levelKind = "npm";
}
else if (winston.config.syslog.levels[level] != null) {
levelKind = "syslog";
}
else {
levelKind = "unknown";
}
return levelKind;
};
var AppInsightsTransport = /** @class */ (function (_super) {
__extends(AppInsightsTransport, _super);
function AppInsightsTransport(winston, opts) {
var _this = _super.call(this, opts) || this;
_this.winston = winston;
return _this;
}
AppInsightsTransport.prototype.log = function (info, callback) {
// tslint:disable-next-line:prefer-const - try to obtain level from Symbol(level) afterwards
var message = info.message, level = info.level, meta = info.meta, splat = __rest(info, ["message", "level", "meta"]);
level = typeof Symbol["for"] === "function" ? info[Symbol["for"]("level")] : level; // Symbol(level) is uncolorized, so prefer getting it from here
message = info instanceof Error ? info : message; // Winston places Errors at info, strings at info.message
var levelKind = mapLevelToKind(this.winston, level);
meta = meta || {}; // Winston _somtimes_ puts metadata inside meta, so start from here
for (var key in splat) {
if (splat.hasOwnProperty(key)) {
meta[key] = splat[key];
}
}
diagnostic_channel_1.channel.publish("winston", { message: message, level: level, levelKind: levelKind, meta: meta });
callback();
};
return AppInsightsTransport;
}(originalWinston.Transport));
// Patch this function
function patchedConfigure() {
// Grab highest sev logging level in case of custom logging levels
var levels = arguments[0].levels || originalWinston.config.npm.levels;
var lastLevel;
for (var level in levels) {
if (levels.hasOwnProperty(level)) {
lastLevel = lastLevel === undefined || levels[level] > levels[lastLevel] ? level : lastLevel;
}
}
this.add(new AppInsightsTransport(originalWinston, { level: lastLevel }));
}
var origCreate = originalWinston.createLogger;
originalWinston.createLogger = function patchedCreate() {
// Grab highest sev logging level in case of custom logging levels
var levels = arguments[0].levels || originalWinston.config.npm.levels;
var lastLevel;
for (var level in levels) {
if (levels.hasOwnProperty(level)) {
lastLevel = lastLevel === undefined || levels[level] > levels[lastLevel] ? level : lastLevel;
}
}
// Add custom app insights transport to the end
// Remark: Configure is not available until after createLogger()
// and the Logger prototype is not exported in winston 3.x, so
// patch both createLogger and configure. Could also call configure
// again after createLogger, but that would cause configure to be called
// twice per create.
var result = origCreate.apply(this, arguments);
result.add(new AppInsightsTransport(originalWinston, { level: lastLevel }));
var origConfigure = result.configure;
result.configure = function () {
origConfigure.apply(this, arguments);
patchedConfigure.apply(this, arguments);
};
return result;
};
var origRootConfigure = originalWinston.configure;
originalWinston.configure = function () {
origRootConfigure.apply(this, arguments);
patchedConfigure.apply(this, arguments);
};
originalWinston.add(new AppInsightsTransport(originalWinston));
return originalWinston;
};
exports.winston3 = {
versionSpecifier: "3.x",
patch: winston3PatchFunction,
};
exports.winston2 = {
versionSpecifier: "2.x",
patch: winston2PatchFunction,
};
function enable() {
diagnostic_channel_1.channel.registerMonkeyPatch("winston", exports.winston2);
diagnostic_channel_1.channel.registerMonkeyPatch("winston", exports.winston3);
}
exports.enable = enable;
//# sourceMappingURL=winston.pub.js.map