Skip to content

Example


What Will You Do

In this exercise, you will create a Jenkins pipeline to

  • Use version controlled specs from a Git repo and use Terraform to provision a GKE cluster
  • Import the base cluster into Rafay and apply a cluster blueprint based on specs in the git repo.

The overall pipeline will look like the following:

Jenkins, Terraform and Rafay


Assumptions

  • You have an operational Jenkins environment
  • You have credentials to provision infrastructure and clusters on Google Cloud Platform
  • You have access to a Rafay Org.

1st Pipeline : Provision GKE Cluster

This is the first pipeline in Jenkins. We will be using a configuration from a Git repo and provision a GKE cluster.

pipeline {
    agent {
        node {
            label 'master'

        }
    }

    environment {
        CLUSTER_NAME = 'demo-cicd-gke-cluster'
        REGION = 'us-west1'
        PROJECT = 'lan-gke'
        GOOGLE_APPLICATION_CREDENTIALS = credentials('lan-gke-sa')
        TERRAFORM_CMD = 'docker run --network host -w /app -v ${HOME}/.aws:/root/.aws -v ${HOME}/.ssh:/root/.ssh --mount type=bind,source=$GOOGLE_APPLICATION_CREDENTIALS,target=/account.json --env GOOGLE_APPLICATION_CREDENTIALS=/account.json -v `pwd`:/app hashicorp/terraform:light'

    }

    stages {

        /* In this stage, we connect the pipeline to our Git Repo
         * where the version controlled terraform configs are stored
         */ 
        stage('checkout repo') {
            steps {
                checkout([$class: 'GitSCM', branches: [[name: '*/master']], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: 'git-rafay-demo', url: 'https://github.com/rafay/test-gke-cluster']]])
            }
        }

        /* In this stage, we will pull the latest terraform container image
         * from Hashicorp's repository 
         */ 
        stage('pull latest light terraform image') {
            steps {
                sh  """
                    docker pull hashicorp/terraform:light
                    """
            }
        }
        stage('get') {
            steps {
                sh  """
                    ${TERRAFORM_CMD} get 
                    """
            }
        }
        stage('init') {
            steps {
                sh  """
                    ${TERRAFORM_CMD} init 
                    """
            }
        }

        /* In this stage, a terraform plan is created based 
         * on the configuration in our Git repository 
         */ 
        stage('plan') {
            steps {
                sh  """
                    ${TERRAFORM_CMD} plan -out=tfplan
                    """

            }
        }

        /* In this stage, we will apply the Terraform plan 
         * created in the previous stage 
         */ 
        stage('apply') {
            steps {
                sh  """
                    ${TERRAFORM_CMD} apply -lock=false -input=false tfplan
                    """
            }
        }
    }
}

2nd Pipeline: Create Addons

In this pipeline, we will create addons in a Rafay Project. This is typically a one-time task in a given project. Therefore, depending on how your pipeline is configured, this can be an optional step.

#!/usr/bin/env groovy

pipeline {
  agent any

  /* Establish environment variables for the Jenkins pipeline
   * to connect to Rafay
   */ 
  environment {
      RAFAY_USERNAME=credentials('demo-rafay-username')
      RAFAY_PASSWORD=credentials('demo-rafay-password')
  }

  stages {

    /* Connect to the Git repository where you store
     * version controlled specs for cluster addons
     */ 
    stage("Checkout Repo") {
      steps {
            checkout([$class: 'GitSCM', branches: [[name: '*/master']], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[url: 'https://github.com/rafay/ci-cd-helpers']]])

      }
    }

    /* In this example, we will create a custom addon
     * for a datadog agent based on the YAML config file
     */ 
    stage("Add Custom Addon For Datadog Agent") {
        steps {
            sh label: '', script: '''
            chmod +x rafay_addon.sh
            ./rafay_addon.sh -u $RAFAY_USERNAME -p $RAFAY_PASSWORD -f demo-rafay-datadog-addon.yaml
            '''
        }
    }
  }
}

3rd Pipeline: Create Blueprint

In this pipeline, we will create a custom cluster in a Rafay Project. This is typically a one-time task in a given project. Therefore, depending on how your pipeline is configured, this can be an optional step.

#!/usr/bin/env groovy

pipeline {
  agent any

  /* Establish environment variables for the Jenkins pipeline
   * to connect to Rafay
   */ 
  environment {
      RAFAY_USERNAME=credentials('demo-rafay-username')
      RAFAY_PASSWORD=credentials('demo-rafay-password')
  }
  stages {

    /* Connect to the Git repo where version controlled 
     * cluster blueprint specs are stored
     */ 
    stage("Checkout Repo") {
      steps {
            checkout([$class: 'GitSCM', branches: [[name: '*/master']], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[url: 'https://github.com/rafay/ci-cd-helpers']]])

      }
    }

    /* Create a Custom cluster blueprint in Rafay
     */ 
    stage("Create A Custom Blueprint") {
        steps {
            sh label: '', script: '''
            chmod +x rafay_blueprint.sh
            ./rafay_blueprint.sh -u $RAFAY_USERNAME -p $RAFAY_PASSWORD -f demo-rafay-custom-blueprint.yaml
            '''
        }
    }
  }
}

4th Pipeline: Import Cluster

This pipeline automates the import of an existing cluster (provisioned earlier) into Rafay.

#!/usr/bin/env groovy

pipeline {
  agent any

  /* Specify the environment variables required for import 
   */ 
  environment {
      RAFAY_USERNAME=credentials('demo-rafay-username')
      RAFAY_PASSWORD=credentials('demo-rafay-password')
      CLUSTER_NAME = 'demo-cicd-gke-cluster'
      REGION = 'us-west1-a'
      PROJECT = 'lan-gke'
      GOOGLE_APPLICATION_CREDENTIALS = credentials('lan-gke-sa')
  }
  stages {

    /* Connect to our Git repo where we have our version
     * controlled cluster specifications in YAML format 
     */ 
    stage("Checkout Repo") {
      steps {
            checkout([$class: 'GitSCM', branches: [[name: '*/master']], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[url: 'https://github.com/rafay/ci-cd-helpers']]])

      }
    }

    /* Download the cluster's Kubeconfig file
     */ 
    stage('Download cluster kubeconfig') {
      steps {
            sh  """
                gcloud beta container clusters get-credentials $CLUSTER_NAME --region $REGION --project $PROJECT
                """
      }
    }

    /* Check if Kubectl works with cluster
     */ 
    stage('Kubectl to cluster') {
      steps {
            sh  """
                curl -LO https://storage.googleapis.com/kubernetes-release/release/`curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt`/bin/linux/amd64/kubectl
                chmod +x ./kubectl
                sudo mv ./kubectl /usr/local/bin/kubectl
                kubectl get nodes
                """
      }
    }

    /* Import cluster into Rafay using Kubectl. This deploys the Rafay
     * Kubernetes Operator on the cluster with the custom cluster blueprint
     */         
    stage("Create Imported GKE Cluster In Rafay With Custom Blueprint") {
        steps {
            sh label: '', script: '''
            chmod +x rafay_imported_cluster.sh
            ./rafay_imported_cluster.sh -u $RAFAY_USERNAME -p $RAFAY_PASSWORD -f demo-rafay-imported-gke-cluster.yaml -a
            '''
        }
    }
  }
}

Recap

Congratulations! You developed automation to provision a GKE cluster using Terraform and bring it under Rafay management for visibility, monitoring and workload operations.

Users can connect the different pipelines together so that they cascade one after another to achieve modularity. The example below showcases four distinct pipelines connected together to achieve full automation.

Jenkins, Terraform and Rafay