Jenkins

Jenkins Integration Guide

This guide explains how to integrate the LoadFocus JMeter API Client with Jenkins for automated performance testing.

Setup Steps

1. Store Credentials in Jenkins

First, store your LoadFocus API credentials securely in Jenkins:

  1. Navigate to Jenkins Dashboard > Manage Jenkins > Manage Credentials
  2. Select the appropriate credential domain (e.g., global)
  3. Click "Add Credentials"
  4. Add the following credentials:
    • Kind: Secret text
    • Scope: Global
    • Secret: Your LoadFocus API key
    • ID: loadfocus-api-key
    • Description: LoadFocus API Key
  5. Repeat for your team ID with ID: loadfocus-team-id

2. Create a Jenkins Pipeline

Create a Jenkinsfile in your repository:

pipeline {
agent {
docker {
image 'node:16-alpine'
}
}
environment {
LOADFOCUS_API_KEY = credentials('loadfocus-api-key')
LOADFOCUS_TEAM_ID = credentials('loadfocus-team-id')
}
stages {
stage('Build') {
steps {
// Your build steps
sh 'npm install'
sh 'npm run build'
}
}
stage('Test') {
steps {
// Your test steps
sh 'npm test'
}
}
stage('Performance Test') {
steps {
// Install LoadFocus JMeter API Client
sh 'npm install -g @loadfocus/loadfocus-api-client'
// Configure LoadFocus API Client
sh 'loadfocus-api config set apikey $LOADFOCUS_API_KEY'
sh 'loadfocus-api config set teamid $LOADFOCUS_TEAM_ID'
// Run Performance Tests
sh '''
loadfocus-api jmeter run-test \
--name "Jenkins_${JOB_NAME}_${BUILD_NUMBER}" \
--thresholds "avgresponse<=200,errors==0,p95<=250" \
--format json > performance_results.json
'''
// Archive the results
archiveArtifacts artifacts: 'performance_results.json', fingerprint: true
}
}
stage('Deploy') {
when {
expression {
return currentBuild.resultIsBetterOrEqualTo('SUCCESS')
}
}
steps {
// Your deployment steps
echo 'Deploying...'
}
}
}
post {
always {
// Clean up workspace
cleanWs()
}
}
}

3. Configure Jenkins Job

  1. Create a new Pipeline job in Jenkins
  2. Configure the Pipeline to use your Jenkinsfile
  3. Set up the appropriate SCM configuration to fetch your repository

Advanced Configuration

Declarative Pipeline with Parallel Testing

Run multiple performance tests in parallel:

pipeline {
agent any
environment {
LOADFOCUS_API_KEY = credentials('loadfocus-api-key')
LOADFOCUS_TEAM_ID = credentials('loadfocus-team-id')
}
stages {
// Previous stages...
stage('Performance Tests') {
parallel {
stage('API Performance') {
agent {
docker {
image 'node:16-alpine'
}
}
steps {
sh 'npm install -g @loadfocus/loadfocus-api-client'
sh 'loadfocus-api config set apikey $LOADFOCUS_API_KEY'
sh 'loadfocus-api config set teamid $LOADFOCUS_TEAM_ID'
sh '''
loadfocus-api jmeter run-test \
--name "API_Performance_Test" \
--thresholds "avgresponse<=150,errors==0" \
--format json > api_performance_results.json
'''
archiveArtifacts artifacts: 'api_performance_results.json', fingerprint: true
}
}
stage('UI Performance') {
agent {
docker {
image 'node:16-alpine'
}
}
steps {
sh 'npm install -g @loadfocus/loadfocus-api-client'
sh 'loadfocus-api config set apikey $LOADFOCUS_API_KEY'
sh 'loadfocus-api config set teamid $LOADFOCUS_TEAM_ID'
sh '''
loadfocus-api jmeter run-test \
--name "UI_Performance_Test" \
--thresholds "avgresponse<=300,errors==0" \
--format json > ui_performance_results.json
'''
archiveArtifacts artifacts: 'ui_performance_results.json', fingerprint: true
}
}
}
}
// Next stages...
}
}

Scripted Pipeline

For more flexibility, use a scripted pipeline:

node {
def performanceTestPassed = false
stage('Checkout') {
checkout scm
}
stage('Build & Test') {
// Your build and test steps
}
stage('Performance Test') {
docker.image('node:16-alpine').inside {
withCredentials([
string(credentialsId: 'loadfocus-api-key', variable: 'LOADFOCUS_API_KEY'),
string(credentialsId: 'loadfocus-team-id', variable: 'LOADFOCUS_TEAM_ID')
]) {
sh 'npm install -g @loadfocus/loadfocus-api-client'
sh 'loadfocus-api config set apikey $LOADFOCUS_API_KEY'
sh 'loadfocus-api config set teamid $LOADFOCUS_TEAM_ID'
try {
sh '''
loadfocus-api jmeter run-test \
--name "Jenkins_${JOB_NAME}_${BUILD_NUMBER}" \
--thresholds "avgresponse<=200,errors==0,p95<=250" \
--format json > performance_results.json
'''
// Check if test passed by examining the JSON
def testResults = readJSON file: 'performance_results.json'
if (testResults.overallResult == 'PASSED') {
performanceTestPassed = true
echo "Performance test passed!"
} else {
echo "Performance test failed to meet thresholds!"
// Optional: Fail the build
// error "Performance test failed"
}
} catch (Exception e) {
echo "Error running performance test: ${e.message}"
}
archiveArtifacts artifacts: 'performance_results.json', fingerprint: true
}
}
}
stage('Deploy') {
if (performanceTestPassed) {
echo 'Deploying...'
// Your deployment steps
} else {
echo 'Skipping deployment due to performance test failure'
}
}
}

Shared Library

Create a shared library for reusable performance testing:

// vars/performanceTest.groovy
def call(Map config = [:]) {
def testName = config.testName ?: "Jenkins_${env.JOB_NAME}_${env.BUILD_NUMBER}"
def thresholds = config.thresholds ?: "avgresponse<=200,errors==0,p95<=250"
def waitTimeout = config.waitTimeout ?: 1800
def resultsFile = config.resultsFile ?: "performance_results.json"
docker.image('node:16-alpine').inside {
withCredentials([
string(credentialsId: 'loadfocus-api-key', variable: 'LOADFOCUS_API_KEY'),
string(credentialsId: 'loadfocus-team-id', variable: 'LOADFOCUS_TEAM_ID')
]) {
sh 'npm install -g @loadfocus/loadfocus-api-client'
sh 'loadfocus-api config set apikey $LOADFOCUS_API_KEY'
sh 'loadfocus-api config set teamid $LOADFOCUS_TEAM_ID'
sh """
loadfocus-api jmeter run-test \\
--name "${testName}" \\
--thresholds "${thresholds}" \\
--waitTimeout ${waitTimeout} \\
--format json > ${resultsFile}
"""
archiveArtifacts artifacts: resultsFile, fingerprint: true
// Return the test results
def testResults = readJSON file: resultsFile
return testResults
}
}
}

Then in your Jenkinsfile:

@Library('my-shared-library') _
pipeline {
agent any
stages {
stage('Performance Test') {
steps {
script {
def results = performanceTest(
testName: "API_Performance_Test",
thresholds: "avgresponse<=150,errors==0"
)
if (results.overallResult != 'PASSED') {
error "Performance test failed"
}
}
}
}
}
}

Integration with Jenkins Plugins

Performance Plugin

Use the Jenkins Performance Plugin to visualize test results:

  1. Install the Performance Plugin in Jenkins
  2. Convert LoadFocus results to a format supported by the plugin (JMeter CSV or JUnit XML)
  3. Configure your pipeline:
stage('Performance Test') {
steps {
// Run LoadFocus test
sh '''
loadfocus-api jmeter run-test \
--name "Jenkins_${JOB_NAME}_${BUILD_NUMBER}" \
--thresholds "avgresponse<=200,errors==0,p95<=250" \
--format json > performance_results.json
# Convert to JMeter CSV format (using a custom script)
node convert-to-jmeter.js performance_results.json performance_results.csv
'''
// Use Performance Plugin
perfReport sourceDataFiles: 'performance_results.csv',
errorFailedThreshold: 0,
errorUnstableThreshold: 0,
errorUnstableResponseTimeThreshold: '200'
}
}

Email Notification

Send email notifications with test results:

post {
always {
script {
if (fileExists('performance_results.json')) {
def results = readJSON file: 'performance_results.json'
def resultStatus = results.overallResult == 'PASSED' ? 'SUCCESS' : 'FAILURE'
def subject = "Performance Test ${resultStatus}: ${env.JOB_NAME} #${env.BUILD_NUMBER}"
// Create email body
def body = """
<h2>Performance Test Results</h2>
<p><strong>Overall Result:</strong> ${results.overallResult}</p>
<h3>Results by Label</h3>
<table border="1">
<tr><th>Label</th><th>Result</th><th>Avg Response</th><th>Errors</th></tr>
"""
results.labels.each { label ->
body += """
<tr>
<td>${label.label}</td>
<td>${label.result}</td>
<td>${label.metrics.avgresponse}ms</td>
<td>${label.metrics.errors}</td>
</tr>
"""
}
body += "</table>"
emailext (
subject: subject,
body: body,
to: 'team@example.com',
attachmentsPattern: 'performance_results.json',
mimeType: 'text/html'
)
}
}
}
}

Tips for Jenkins Integration

  1. Timeout Handling: Set timeouts for long-running performance tests:

    stage('Performance Test') {
    options {
    timeout(time: 60, unit: 'MINUTES')
    }
    steps {
    // Performance test steps
    }
    }
  2. Conditional Execution: Run performance tests only on specific branches:

    stage('Performance Test') {
    when {
    anyOf {
    branch 'main'
    branch 'develop'
    tag pattern: "v\\d+\\.\\d+\\.\\d+", comparator: "REGEXP"
    }
    }
    steps {
    // Performance test steps
    }
    }
  3. Scheduled Testing: Run performance tests on a schedule:

    pipeline {
    agent any
    triggers {
    cron('0 0 * * *') // Run at midnight every day
    }
    stages {
    // Pipeline stages
    }
    }
  4. Parameterized Tests: Allow customization of test parameters:

    pipeline {
    agent any
    parameters {
    string(name: 'TEST_NAME', defaultValue: 'API_Performance_Test', description: 'Name of the LoadFocus test to run')
    string(name: 'THRESHOLDS', defaultValue: 'avgresponse<=200,errors==0', description: 'Performance thresholds')
    string(name: 'WAIT_TIMEOUT', defaultValue: '1800', description: 'Maximum wait time in seconds')
    }
    stages {
    stage('Performance Test') {
    steps {
    // Run test with parameters
    sh """
    loadfocus-api jmeter run-test \\
    --name "${params.TEST_NAME}" \\
    --thresholds "${params.THRESHOLDS}" \\
    --waitTimeout ${params.WAIT_TIMEOUT} \\
    --format json > performance_results.json
    """
    }
    }
    }
    }

For more information, refer to the Jenkins documentation and the LoadFocus API Client documentation.