All Downloads are FREE. Search and download functionalities are using the official Maven repository.

templates.deployment.spark-operator.spark-application-base-values.yaml.vm Maven / Gradle / Ivy

metadata:
    name: ${sparkApplicationName}
sparkApp:
  spec:
    type: ${sparkApplicationType}
    image: "${dockerProjectRepositoryUrl}${projectName}-spark-worker-docker:latest"
    #if (${isJavaPipeline})
    mainClass: ${mainClass}
    #end
    mainApplicationFile: "local:///opt/spark/jobs/pipelines/${mainApplicationFile}"
    deps:
      packages:
        - mysql:mysql-connector-java:${versionMysqlConnector}
        #if (${useS3Local})
        - org.apache.hadoop:hadoop-aws:${versionHadoop}
        - com.amazonaws:aws-java-sdk-bundle:${versionAwsSdkBundle}
        #end
        #if (${enableNeo4jSupport})
        - org.neo4j:neo4j-connector-apache-spark_2.12:${versionNeo4j}
        #end
        #if (${enableElasticsearchSupport})
        - org.elasticsearch:elasticsearch-spark-30_2.12:${versionElasticsearch}
        #end
        #if (${enableDeltaSupport})
        - io.delta:delta-spark_2.12:${versionDelta}
        - io.delta:delta-storage:${versionDelta}
        #end
        #if (${enableSedonaSupport})
        - org.apache.sedona:sedona-sql-3.0_2.12:${versionSedona}
        #if (${enablePySparkSupport})
        - org.apache.sedona:sedona-python-adapter-3.0_2.12:${versionSedona}
        - org.datasyslab:geotools-wrapper:${versionGeotools}
        #end
        #end
        #if (${enableRDBMSSupport})
        - org.postgresql:postgresql:${versionPostgresql}
        #end
      excludePackages: []
    hadoopConf:
      #if (${useS3Local})
      fs.s3a.fast.upload: "true"
      fs.s3a.path.style: "true"
      #end
    driver:
      cores: 1
      coreLimit: "1200m"
      memory: "2048m"
      #if (${useS3Local})
      # Setup these secret key references within your SealedSecret 
##      # See our guide for using SealedSecret's in your project to learn more
##      # TODO: LINK-TO-GUIDE-HERE
      envFrom:
        - secretRef:
            name: remote-auth-config
      #end
      env:
        - name: KRAUSENING_BASE
          value: /opt/spark/krausening/base
        #if (${enableFileStore})
        #foreach (${fileStore} in ${fileStores})
        - name: "${fileStore.getName()}_FS_PROVIDER"
          value: "REPLACE ME: YOUR FILE STORE PROVIDER GOES HERE!"
        - name: "${fileStore.getName()}_FS_ACCESS_KEY_ID"
          valueFrom:
            secretKeyRef:
              name: remote-auth-config
              key: AWS_ACCESS_KEY_ID
        - name: "${fileStore.getName()}_FS_SECRET_ACCESS_KEY"
          valueFrom:
            secretKeyRef:
              name: remote-auth-config
              key: AWS_SECRET_ACCESS_KEY
        #end
        #end
        #if (${enableDataLineageSupport})
        - name: ENABLE_LINEAGE
          value: "true"
        #end
      #if (${isJavaPipeline})
      javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base"
      #end
    executor:
      cores: 1
      memory: "4096m"
      #if (${useS3Local})
      envFrom:
        - secretRef:
            name: remote-auth-config
      #end
      env:
        - name: KRAUSENING_BASE
          value: /opt/spark/krausening/base
        #if (${enableDataLineageSupport})
        - name: ENABLE_LINEAGE
          value: "true"
        #end
      #if (${isJavaPipeline})
      javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base"
      #end




© 2015 - 2025 Weber Informatics LLC | Privacy Policy