Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 20 additions & 2 deletions .github/workflows/node-ec2-default-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -244,9 +244,27 @@ jobs:
--instance-id ${{ env.MAIN_SERVICE_INSTANCE_ID }}
--rollup'

- name: Validate custom metrics
id: cwagent-metric-validation
if: (success() || steps.log-validation.outcome == 'failure') && !cancelled()
run: ./gradlew validator:run --args='-c node/ec2/default/custom-metric-validation.yml
--testing-id ${{ env.TESTING_ID }}
--endpoint http://${{ env.MAIN_SERVICE_ENDPOINT }}
--remote-service-deployment-name ${{ env.REMOTE_SERVICE_IP }}:8001
--region ${{ inputs.aws-region }}
--account-id ${{ env.ACCOUNT_ID }}
--metric-namespace CWAgent
--log-group ${{ env.LOG_GROUP_NAME }}
--service-name node-sample-application-${{ env.TESTING_ID }}
--remote-service-name node-sample-remote-application-${{ env.TESTING_ID }}
--query-string ip=${{ env.REMOTE_SERVICE_IP }}&testingId=${{ env.TESTING_ID }}
--instance-ami ${{ env.EC2_INSTANCE_AMI }}
--instance-id ${{ env.MAIN_SERVICE_INSTANCE_ID }}
--rollup'

- name: Validate generated traces
id: trace-validation
if: (success() || steps.log-validation.outcome == 'failure' || steps.metric-validation.outcome == 'failure') && !cancelled()
if: (success() || steps.log-validation.outcome == 'failure' || steps.metric-validation.outcome == 'failure' || steps.cwagent-metric-validation.outcome == 'failure') && !cancelled()
run: ./gradlew validator:run --args='-c node/ec2/default/trace-validation.yml
--testing-id ${{ env.TESTING_ID }}
--endpoint http://${{ env.MAIN_SERVICE_ENDPOINT }}
Expand All @@ -273,7 +291,7 @@ jobs:
if: always()
id: validation-result
run: |
if [ "${{ steps.log-validation.outcome }}" = "success" ] && [ "${{ steps.metric-validation.outcome }}" = "success" ] && [ "${{ steps.trace-validation.outcome }}" = "success" ]; then
if [ "${{ steps.log-validation.outcome }}" = "success" ] && [ "${{ steps.cwagent-metric-validation.outcome }}" = "success" ] && [ "${{ steps.metric-validation.outcome }}" = "success" ] && [ "${{ steps.trace-validation.outcome }}" = "success" ]; then
echo "validation-result=success" >> $GITHUB_OUTPUT
else
echo "validation-result=failure" >> $GITHUB_OUTPUT
Expand Down
71 changes: 70 additions & 1 deletion sample-apps/node/frontend-service/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ const express = require('express');
const mysql = require('mysql2');
const bunyan = require('bunyan');
const { S3Client, GetBucketLocationCommand } = require('@aws-sdk/client-s3');
const opentelemetry = require('@opentelemetry/sdk-node');
const { metrics } = require('@opentelemetry/api');
const { randomInt } = require('crypto');

const PORT = parseInt(process.env.SAMPLE_APP_PORT || '8000', 10);

Expand All @@ -13,6 +16,58 @@ const app = express();
// Create bunyan logger
const logger = bunyan.createLogger({name: 'express-app', level: 'info'});

let pipelineMeter = null;

// Creation of conditional pipeline. Will only be created if SERVICE_NAME & DEPLOYMENT_ENVIRONMENT_NAME are defined in main.tf file
if (process.env.SERVICE_NAME && process.env.DEPLOYMENT_ENVIRONMENT_NAME) {
const { Resource } = require('@opentelemetry/resources');
const { MeterProvider, PeriodicExportingMetricReader } = require('@opentelemetry/sdk-metrics');
const { OTLPMetricExporter } = require('@opentelemetry/exporter-metrics-otlp-proto');

const serviceName = process.env.SERVICE_NAME;
const deploymentEnv = process.env.DEPLOYMENT_ENVIRONMENT_NAME;

const pipelineResource = new Resource({
// SEMRESATTRS_DEPLOYMENT_ENVIRONMENT_NAME maps to dimension 'deployment.name' so "deployment.environment.name" used
// to assign value correctly.
'service.name': serviceName,
'deployment.environment.name': deploymentEnv
});

const pipelineMetricExporter = new OTLPMetricExporter({
url: 'http://localhost:4318/v1/metrics'
});

const pipelineMetricReader = new PeriodicExportingMetricReader({
exporter: pipelineMetricExporter,
exportIntervalMillis: 1000
});

const pipelineMeterProvider = new MeterProvider({
resource: pipelineResource,
readers: [pipelineMetricReader]
});

pipelineMeter = pipelineMeterProvider.getMeter('myMeter');
}

// Use global meter
const meter = metrics.getMeter('myMeter');
const agent_based_counter = meter.createCounter('agent_based_counter', {description: 'agent export counter'});
const agent_based_histogram = meter.createHistogram('agent_based_histogram', {description: 'agent export histogram'});
const agent_based_gauge = meter.createUpDownCounter('agent_based_gauge', {description: 'agent export gauge'});

// Continuation of conditional custom pipeline
let custom_pipeline_counter = null;
let custom_pipeline_histogram = null;
let custom_pipeline_gauge = null;

if (pipelineMeter) {
custom_pipeline_counter = pipelineMeter.createCounter('custom_pipeline_counter', {unit: '1', description: 'pipeline export counter'});
custom_pipeline_histogram = pipelineMeter.createHistogram('custom_pipeline_histogram', {description: 'pipeline export histogram'});
custom_pipeline_gauge = pipelineMeter.createUpDownCounter('custom_pipeline_gauge', {unit: '1', description: 'pipeline export gauge'});
}

app.get('/healthcheck', (req, res) => {
logger.info('/healthcheck called successfully');
res.send('healthcheck');
Expand Down Expand Up @@ -44,10 +99,22 @@ app.get('/aws-sdk-call', async (req, res) => {
const s3Client = new S3Client({ region: 'us-east-1' });
const bucketName = 'e2e-test-bucket-name-' + (req.query.testingId || 'MISSING_ID');

// Increment counter/histogram/gauge for agent export
agent_based_counter.add(1, { Operation : 'counter' });
agent_based_histogram.record(randomInt(100,1001), { Operation : 'histogram' });
agent_based_gauge.add(randomInt(-10, 11), { Operation : 'gauge' });

// Increment counter/histogram/gauge for pipeline export
if (custom_pipeline_counter) {
custom_pipeline_counter.add(1, { Operation : 'pipeline_counter' });
custom_pipeline_histogram.record(randomInt(100,1001), { Operation : 'pipeline_histogram' });
custom_pipeline_gauge.add(randomInt(-10, 11), { Operation : 'pipeline_gauge' });
}

// Add custom warning log for validation testing
const warningMsg = "This is a custom log for validation testing";
logger.warn(warningMsg);

try {
await s3Client.send(
new GetBucketLocationCommand({
Expand Down Expand Up @@ -120,6 +187,8 @@ app.get('/client-call', (req, res) => {
makeAsyncCall = true;
});



Comment on lines +190 to +191
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: Remove whitespace changes

app.get('/mysql', (req, res) => {
// Create a connection to the MySQL database
const connection = mysql.createConnection({
Expand Down
13 changes: 10 additions & 3 deletions sample-apps/node/frontend-service/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,18 @@
"author": "",
"license": "Apache-2.0",
"dependencies": {
"@aws-sdk/client-s3": "3.621.0",
"@aws-sdk/client-s3": "^3.621.0",
"@aws/aws-distro-opentelemetry-node-autoinstrumentation": "^0.8.0",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is @aws/aws-distro-opentelemetry-node-autoinstrumentation needed?

"@opentelemetry/api": "^1.9.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.57.1",
"@opentelemetry/resources": "^1.30.1",
"@opentelemetry/sdk-metrics": "^1.30.1",
"@opentelemetry/semantic-conventions": "^1.37.0",
"@opentelemetry/sdk-node": "^0.57.0",
"@types/express": "^4.17.21",
"@types/node": "^20.14.6",
"bunyan": "^1.8.15",
"express": "^4.21.2",
"mysql2": "^3.11.0",
"bunyan": "^1.8.15"
"mysql2": "^3.15.3"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is an update to mysql2 needed?

}
}
6 changes: 5 additions & 1 deletion terraform/node/ec2/default/amazon-cloudwatch-agent.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@
},
"logs": {
"metrics_collected": {
"application_signals": {}
"application_signals": {},
"otlp": {
"grpc_endpoint": "0.0.0.0:4317",
"http_endpoint": "0.0.0.0:4318"
}
}
}
}
20 changes: 17 additions & 3 deletions terraform/node/ec2/default/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -153,20 +153,34 @@ resource "null_resource" "main_service_setup" {
# Get ADOT instrumentation and install it
${var.get_adot_instrumentation_command}

# Set up application tmux screen so it keeps running after closing the SSH connection
tmux new-session -d -s frontend
# Set up environment variables like Python does
export SERVICE_NAME='node-sample-application-${var.test_id}'
export DEPLOYMENT_ENVIRONMENT_NAME='ec2:default'
export OTEL_RESOURCE_ATTRIBUTES="service.name=$${SERVICE_NAME},deployment.environment.name=$${DEPLOYMENT_ENVIRONMENT_NAME}"
export AWS_REGION='${var.aws_region}'
export TESTING_ID='${var.test_id}'

# Set up application tmux screen with bash shell
tmux new-session -d -s frontend bash

# Export environment variables for instrumentation
# Note: We use OTEL_NODE_DISABLED_INSTRUMENTATIONS=fs,dns,express to avoid
# having to validate around the telemetry generated for middleware
tmux send-keys -t frontend 'export OTEL_METRICS_EXPORTER=none' C-m
tmux send-keys -t frontend 'export OTEL_METRICS_EXPORTER=otlp' C-m
tmux send-keys -t frontend 'export OTEL_TRACES_EXPORTER=otlp' C-m
tmux send-keys -t frontend 'export OTEL_AWS_APPLICATION_SIGNALS_ENABLED=true' C-m
tmux send-keys -t frontend 'export OTEL_AWS_APPLICATION_SIGNALS_RUNTIME_ENABLED=false' C-m
tmux send-keys -t frontend 'export OTEL_AWS_APPLICATION_SIGNALS_EXPORTER_ENDPOINT=http://localhost:4316/v1/metrics' C-m
tmux send-keys -t frontend 'export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4316/v1/traces' C-m
tmux send-keys -t frontend 'export OTEL_EXPORTER_OTLP_TRACES_PROTOCOL=http/protobuf' C-m
tmux send-keys -t frontend 'export OTEL_EXPORTER_OTLP_METRICS_PROTOCOL=http/protobuf' C-m
tmux send-keys -t frontend 'export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT=http://localhost:4318/v1/metrics' C-m
tmux send-keys -t frontend 'export OTEL_EXPORTER_OTLP_METRICS_INSECURE=true' C-m
tmux send-keys -t frontend 'export SERVICE_NAME="$${SERVICE_NAME}"' C-m
tmux send-keys -t frontend 'export DEPLOYMENT_ENVIRONMENT_NAME="$${DEPLOYMENT_ENVIRONMENT_NAME}"' C-m
tmux send-keys -t frontend 'export OTEL_RESOURCE_ATTRIBUTES="$${OTEL_RESOURCE_ATTRIBUTES}"' C-m
tmux send-keys -t frontend 'export AWS_REGION="$${AWS_REGION}"' C-m
tmux send-keys -t frontend 'export TESTING_ID="$${TESTING_ID}"' C-m
tmux send-keys -t frontend 'export OTEL_NODE_DISABLED_INSTRUMENTATIONS=fs,dns,express' C-m
tmux send-keys -t frontend 'export OTEL_SERVICE_NAME=node-sample-application-${var.test_id}' C-m
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: Should use SERVICE_NAME

tmux send-keys -t frontend 'export OTEL_TRACES_SAMPLER=always_on' C-m
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -422,6 +422,9 @@ public enum PredefinedExpectedTemplate implements FileConfig {
NODE_EC2_DEFAULT_AWS_SDK_CALL_METRIC("/expected-data-template/node/ec2/default/aws-sdk-call-metric.mustache"),
NODE_EC2_DEFAULT_AWS_SDK_CALL_TRACE("/expected-data-template/node/ec2/default/aws-sdk-call-trace.mustache"),

/** Node EC2 Default Custom Metrics Test Case Validations */
NODE_EC2_DEFAULT_AWS_OTEL_CUSTOM_METRIC("/expected-data-template/node/ec2/default/aws-otel-custom-metrics.mustache"),

NODE_EC2_DEFAULT_REMOTE_SERVICE_LOG("/expected-data-template/node/ec2/default/remote-service-log.mustache"),
NODE_EC2_DEFAULT_REMOTE_SERVICE_METRIC("/expected-data-template/node/ec2/default/remote-service-metric.mustache"),
NODE_EC2_DEFAULT_REMOTE_SERVICE_TRACE("/expected-data-template/node/ec2/default/remote-service-trace.mustache"),
Expand Down
Loading