See practical examples of how to use Kuba with different applications, frameworks, and deployment scenarios.

Run a Node.js Express application with database credentials and API keys:

kuba run --env production -- node app.js
production:
  provider: gcp
  project: 1337
  env:
    DATABASE_URL:
      secret-key: "prod-database-url"
    JWT_SECRET:
      secret-key: "jwt-secret"
    STRIPE_SECRET_KEY:
      secret-key: "stripe-secret-key"
    REDIS_URL:
      value: "redis://${REDIS_HOST:-localhost}:6379"
const express = require('express');
const app = express();

// Environment variables are automatically available
const dbUrl = process.env.DATABASE_URL;
const jwtSecret = process.env.JWT_SECRET;
const stripeKey = process.env.STRIPE_SECRET_KEY;
const redisUrl = process.env.REDIS_URL;

console.log('Database URL:', dbUrl);
console.log('Redis URL:', redisUrl);

app.listen(3000, () => {
  console.log('Server running on port 3000');
});

Python Flask Application

Run a Python Flask application with environment-specific configurations:

kuba run --env development -- python app.py
development:
  provider: aws
  env:
    FLASK_ENV:
      value: "development"
    DATABASE_URL:
      secret-key: "dev-database-url"
    SECRET_KEY:
      secret-key: "flask-secret-key"
    DEBUG:
      value: "true"
from flask import Flask
import os

app = Flask(__name__)

# Environment variables are automatically available
app.config['DATABASE_URL'] = os.environ.get('DATABASE_URL')
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY')
app.config['DEBUG'] = os.environ.get('DEBUG', 'false').lower() == 'true'

print(f"Database URL: {app.config['DATABASE_URL']}")
print(f"Debug mode: {app.config['DEBUG']}")

if __name__ == '__main__':
    app.run(debug=app.config['DEBUG'])

Run database migrations with production credentials:

# Run migrations with production database credentials
kuba run --env production -- npm run migrate

# Run seed data with development database
kuba run --env development -- npm run seed
production:
  provider: gcp
  project: 1337
  env:
    DATABASE_URL:
      secret-key: "prod-postgres-url"
    DB_PASSWORD:
      secret-key: "prod-db-password"

development:
  provider: aws
  env:
    DATABASE_URL:
      secret-key: "dev-postgres-url"
    DB_PASSWORD:
      secret-key: "dev-db-password"

Connect to external APIs with secure keys:

kuba run --env staging -- python api_client.py
staging:
  provider: azure
  env:
    STRIPE_API_KEY:
      secret-key: "stripe-staging-key"
    SENDGRID_API_KEY:
      secret-key: "sendgrid-staging-key"
    TWILIO_ACCOUNT_SID:
      secret-key: "twilio-account-sid"
    TWILIO_AUTH_TOKEN:
      secret-key: "twilio-auth-token"
import os
import stripe
import sendgrid
from twilio.rest import Client

# API keys are automatically available
stripe.api_key = os.environ.get('STRIPE_API_KEY')
sendgrid_client = sendgrid.SendGridAPIClient(
    api_key=os.environ.get('SENDGRID_API_KEY')
)
twilio_client = Client(
    os.environ.get('TWILIO_ACCOUNT_SID'),
    os.environ.get('TWILIO_AUTH_TOKEN')
)

print("Stripe API key configured:", bool(stripe.api_key))
print("SendGrid API key configured:", bool(os.environ.get('SENDGRID_API_KEY')))
print("Twilio credentials configured:", bool(os.environ.get('TWILIO_ACCOUNT_SID')))

Run Docker containers with environment variables from Kuba:

# Build image with secrets available during build
kuba run --env production -- docker build \
  --build-arg DATABASE_URL \
  --build-arg API_KEY \
  -t myapp .

# Run container with secrets as environment variables
kuba run --env production -- docker run \
  -e DATABASE_URL \
  -e API_KEY \
  -e REDIS_URL \
  -p 3000:3000 \
  myapp

# Use --contain to avoid inheriting host environment
docker run --env-file=<(kuba run --env production --contain -- env) myapp

# or pass full host environment including Kuba-managed vars
docker run --env-file=<(kuba run --env production -- env) myapp
							
FROM node:18-alpine

WORKDIR /app

COPY package*.json ./
RUN npm ci --only=production

COPY . .

# Build arguments for secrets
ARG DATABASE_URL
ARG API_KEY

# Set environment variables
ENV DATABASE_URL=$DATABASE_URL
ENV API_KEY=$API_KEY

EXPOSE 3000

CMD ["npm", "start"]

Use Kuba with Docker Compose for multi-service applications:

# Start all services with production secrets
kuba run --env production -- docker-compose up -d

# Start specific service with development secrets
kuba run --env development -- docker-compose up web
version: '3.8'
services:
  web:
    build: .
    ports:
      - "3000:3000"
    environment:
      - DATABASE_URL
      - API_KEY
      - REDIS_URL
    depends_on:
      - db
      - redis

  db:
    image: postgres:15
    environment:
      - POSTGRES_DB=myapp
      - POSTGRES_USER=myapp
      - POSTGRES_PASSWORD
    volumes:
      - postgres_data:/var/lib/postgresql/data

  redis:
    image: redis:7-alpine
    ports:
      - "6379:6379"

volumes:
  postgres_data:

Integrate Kuba into GitHub Actions workflows:

name: Deploy to Production

on:
  push:
    branches: [main]

jobs:
  deploy:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v3

      - name: Setup Go
        uses: actions/setup-go@v4
        with:
          go-version: '1.21'

      - name: Install Kuba
        run: |
          curl -sSL https://kuba.mwco.app/install.sh | bash

      - name: Configure AWS credentials
        uses: aws-actions/configure-aws-credentials@v4
        with:
          aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
          aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
          aws-region: us-east-1

      - name: Build and deploy
        run: |
          kuba run --env production -- npm run build
          kuba run --env production -- npm run deploy

Use Kuba in GitLab CI/CD pipelines:

stages:
  - test
  - deploy

variables:
  KUBE_CONFIG_FILE: $CI_PROJECT_DIR/kuba.yaml

test:
  stage: test
  image: node:18
  before_script:
    - curl -sSL https://kuba.mwco.app/install.sh | bash
  script:
    - kuba run --env testing -- npm test
  only:
    - merge_requests

deploy:
  stage: deploy
  image: node:18
  before_script:
    - curl -sSL https://kuba.mwco.app/install.sh | bash
  script:
    - kuba run --env production -- npm run deploy
  only:
    - main

Use Kuba for local development without managing .env files:

# Start development server
kuba run --env development -- npm run dev

# Run tests
kuba run --env testing -- npm test

# Run database migrations
kuba run --env development -- npm run migrate

# Start background services
kuba run --env development -- npm run start:services
development:
  provider: gcp
  project: 1337
  env:
    DATABASE_URL:
      secret-key: "dev-database-url"
    API_KEY:
      secret-key: "dev-api-key"
    DEBUG:
      value: "true"
    LOG_LEVEL:
      value: "debug"

testing:
  provider: gcp
  project: 1337
  env:
    DATABASE_URL:
      secret-key: "test-database-url"
    API_KEY:
      secret-key: "test-api-key"
    NODE_ENV:
      value: "test"

Share configuration templates with your team:

# kuba.yaml (commit this to version control)
default:
  provider: gcp
  project: 1337
  env:
    DATABASE_URL:
      secret-key: "database-url"
    API_KEY:
      secret-key: "api-key"
    REDIS_URL:
      value: "redis://${REDIS_HOST:-localhost}:6379"

development:
  provider: gcp
  project: 1337
  env:
    DATABASE_URL:
      secret-key: "dev-database-url"
    DEBUG:
      value: "true"

Instructions for team members:

  1. Set up authentication for your cloud provider
  2. Create the necessary secrets in your cloud provider
  3. Run kuba run --env development -- npm run dev

Use secret paths to bulk-load related secrets:

production:
  provider: gcp
  project: 1337
  env:
    # Individual secrets
    APP_ENV:
      value: "production"

    # Database secrets (bulk load)
    DB:
      secret-path: "database"

    # API keys (bulk load)
    API:
      secret-path: "external-apis"

    # Service secrets (bulk load)
    SERVICE:
      secret-path: "microservices"

    # Interpolated connection strings
    DATABASE_URL:
      value: "postgresql://${DB_USERNAME}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}"

    REDIS_URL:
      value: "redis://${REDIS_HOST:-localhost}:${REDIS_PORT:-6379}/0"

Note: This configuration will create environment variables like DB_USERNAME, DB_PASSWORD, API_STRIPE_KEY, SERVICE_AUTH_TOKEN, etc.

Use different cloud providers for different types of secrets:

production:
  provider: gcp
  project: 1337
  env:
    # GCP secrets
    GCP_PROJECT_ID:
      secret-key: "project-id"

    # AWS secrets
    AWS_ACCESS_KEY:
      secret-key: "aws-access-key"
      provider: aws

    # Azure secrets
    AZURE_TENANT_ID:
      secret-key: "tenant-id"
      provider: azure
      project: "my-azure-project"

    # OpenBao secrets
    INTERNAL_API_KEY:
      secret-key: "internal-api-key"
      provider: openbao

    # Hard-coded values
    APP_ENV:
      value: "production"
    DEBUG:
      value: "false"

Next Steps

Configuration Guide

Learn more about advanced configuration options and best practices.

Configuration Guide

Cloud Providers

Set up authentication and permissions for your cloud providers.

Cloud Providers Guide