Skip to content
This repository has been archived by the owner on Dec 4, 2024. It is now read-only.

Commit

Permalink
Adds file based secret support in CLI and adds integration tests for it
Browse files Browse the repository at this point in the history
  • Loading branch information
farhan5900 committed Oct 23, 2020
1 parent a31c1ce commit a18e124
Show file tree
Hide file tree
Showing 4 changed files with 134 additions and 63 deletions.
27 changes: 21 additions & 6 deletions cli/dcos-spark/security.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@ import (
"encoding/base64"
"errors"
"fmt"
"github.com/mesosphere/dcos-commons/cli/client"
"path/filepath"
"strings"

"github.com/mesosphere/dcos-commons/cli/client"
)

const KEYLENGTH = 128
Expand Down Expand Up @@ -41,8 +43,7 @@ func prepareBase64Secret(secretPath string) string {
panic("Secret path cannot be empty")
}

absoluteSecretPath := strings.Split(secretPath, "/")
filename := absoluteSecretPath[len(absoluteSecretPath)-1]
_, filename := filepath.Split(secretPath)
// secrets with __dcos_base64__ will be decoded by Mesos, but remove the prefix here
if strings.HasPrefix(filename, "__dcos_base64__") {
return strings.TrimPrefix(filename, "__dcos_base64__")
Expand Down Expand Up @@ -121,15 +122,29 @@ func SetupSASL(args *sparkArgs) {
if args.saslSecret != "" {
setupSaslProperties(args)
}
if args.saslSecretPath != "" {
setupFileBasedSaslProperties(args)
}
}

func setupSaslProperties(args *sparkArgs) {
secretPath := args.saslSecret
secretValue := args.saslSecret
args.properties["spark.mesos.containerizer"] = "mesos"
args.properties["spark.authenticate"] = "true"
args.properties["spark.authenticate.enableSaslEncryption"] = "true"
args.properties["spark.authenticate.secret"] = "spark_shared_secret"
args.properties["spark.executorEnv._SPARK_AUTH_SECRET"] = "spark_shared_secret"
args.properties["spark.authenticate.secret"] = secretValue
args.properties["spark.executorEnv._SPARK_AUTH_SECRET"] = secretValue
}

func setupFileBasedSaslProperties(args *sparkArgs) {
secretPath := args.saslSecretPath

args.properties["spark.mesos.containerizer"] = "mesos"
args.properties["spark.authenticate"] = "true"
args.properties["spark.authenticate.enableSaslEncryption"] = "true"
args.properties["spark.authenticate.secret.file"] = prepareBase64Secret(secretPath)
args.properties["spark.executorEnv._SPARK_AUTH_SECRET_FILE"] = prepareBase64Secret(secretPath)

for _, taskType := range TASK_TYPES {
appendToProperty(SECRET_REFERENCE_PROPERTIES[taskType], secretPath, args)
appendToProperty(SECRET_FILENAME_PROPERTIES[taskType], prepareBase64Secret(secretPath), args)
Expand Down
20 changes: 12 additions & 8 deletions cli/dcos-spark/submit_builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ type sparkArgs struct {
truststoreSecretPath string
truststorePassword string
saslSecret string
saslSecretPath string
propertiesFile string
properties map[string]string

Expand Down Expand Up @@ -79,6 +80,7 @@ func NewSparkArgs() *sparkArgs {
"",
"",
"",
"",
make(map[string]string),
make([]*sparkVal, 0),
make([]*sparkVal, 0),
Expand Down Expand Up @@ -173,8 +175,10 @@ Args:
PlaceHolder("__dcos_base64__truststore").Default("").StringVar(&args.truststoreSecretPath)
submit.Flag("truststore-password", "A password to the truststore.").
Default("").StringVar(&args.truststorePassword)
submit.Flag("executor-auth-secret", "Path to secret 'cookie' to use for Executor authentication "+
"block transfer encryption. Make one with dcos spark secret").Default("").StringVar(&args.saslSecret)
submit.Flag("executor-auth-secret", "Value of the secret to use for RPC authentication "+
"between Driver and Executors").Default("").StringVar(&args.saslSecret)
submit.Flag("executor-auth-secret-path", "Path to secret 'cookie' to use for RPC authentication "+
"between Driver and Executors. Make one with dcos spark secret").Default("").StringVar(&args.saslSecretPath)
submit.Flag("isR", "Force using SparkR").Default("false").BoolVar(&args.isR)
submit.Flag("isPython", "Force using Python").Default("false").BoolVar(&args.isPython)

Expand Down Expand Up @@ -223,10 +227,10 @@ Args:
args.stringVals = append(args.stringVals, val)

val = newSparkVal("packages", "spark.jars.packages", "Comma-separated list of maven coordinates of jars to include "+
"on the driver and executor classpaths. Will search the local maven repo, then maven central and any additional remote "+
"repositories given by --repositories. The format for the coordinates should be groupId:artifactId:version")
val.flag(submit).StringVar(&val.s)
args.stringVals = append(args.stringVals, val)
"on the driver and executor classpaths. Will search the local maven repo, then maven central and any additional remote "+
"repositories given by --repositories. The format for the coordinates should be groupId:artifactId:version")
val.flag(submit).StringVar(&val.s)
args.stringVals = append(args.stringVals, val)

val = newSparkVal("py-files", "spark.submit.pyFiles", "Add .py, .zip or .egg files to "+
"be distributed with your application. If you depend on multiple Python files we recommend packaging them "+
Expand Down Expand Up @@ -307,7 +311,7 @@ func transformSubmitArgs(argsStr string, boolVals []*sparkVal) ([]string, []stri
sparkArgs, appArgs := make([]string, 0), make([]string, 0)

args = processJarsFlag(args)
args = processPackagesFlag(args)
args = processPackagesFlag(args)

LOOP:
for i := 0; i < len(args); {
Expand Down Expand Up @@ -640,7 +644,7 @@ func buildSubmitJson(cmd *SparkCommand, marathonConfig map[string]interface{}) (
"action": "CreateSubmissionRequest",
"appArgs": args.appArgs,
"appResource": args.app.String(),
"clientSparkVersion": "2.0.0",
"clientSparkVersion": "3.0.0",
"environmentVariables": cmd.submitEnv,
"mainClass": args.mainClass,
"sparkProperties": args.properties,
Expand Down
127 changes: 82 additions & 45 deletions cli/dcos-spark/submit_builder_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"testing"

"github.com/stretchr/testify/assert"
Expand All @@ -20,6 +21,7 @@ const principal = "client@local"
const keytabPrefixed = "__dcos_base64__keytab"
const keytab = "keytab"
const sparkAuthSecret = "spark-auth-secret"
const sparkAuthSecretPath = "path/to/spark-auth-secret"
const marathonAppId = "spark-app"

var marathonConfig = map[string]interface{}{"app": map[string]interface{}{"id": marathonAppId}}
Expand Down Expand Up @@ -127,35 +129,35 @@ func (suite *CliTestSuite) TestProcessMultiJarsFlagWithSpace() {

func (suite *CliTestSuite) TestProcessPackagesFlag() {
_, args := sparkSubmitArgSetup()
inputArgs := "--conf spark.cores.max=8 --packages=groupid:artifactid:version app/packages/main.jar 100"
expected := []string{"--conf=spark.cores.max=8",
"--packages=groupid:artifactid:version",
"--conf=spark.jars.ivy=" + mesosSandboxPath + "/.ivy2",
"app/packages/main.jar"}
actual, _ := transformSubmitArgs(inputArgs, args.boolVals)
assert.Equal(suite.T(), expected, actual)
inputArgs := "--conf spark.cores.max=8 --packages=groupid:artifactid:version app/packages/main.jar 100"
expected := []string{"--conf=spark.cores.max=8",
"--packages=groupid:artifactid:version",
"--conf=spark.jars.ivy=" + mesosSandboxPath + "/.ivy2",
"app/packages/main.jar"}
actual, _ := transformSubmitArgs(inputArgs, args.boolVals)
assert.Equal(suite.T(), expected, actual)
}

func (suite *CliTestSuite) TestProcessMultiPackagesFlag() {
_, args := sparkSubmitArgSetup()
inputArgs := "--conf spark.cores.max=8 --packages=groupid1:artifactid1:version1,groupid2:artifactid2:version2 app/packages/main.jar 100"
expected := []string{"--conf=spark.cores.max=8",
"--packages=groupid1:artifactid1:version1,groupid2:artifactid2:version2",
"--conf=spark.jars.ivy=" + mesosSandboxPath + "/.ivy2",
"app/packages/main.jar"}
actual, _ := transformSubmitArgs(inputArgs, args.boolVals)
assert.Equal(suite.T(), expected, actual)
_, args := sparkSubmitArgSetup()
inputArgs := "--conf spark.cores.max=8 --packages=groupid1:artifactid1:version1,groupid2:artifactid2:version2 app/packages/main.jar 100"
expected := []string{"--conf=spark.cores.max=8",
"--packages=groupid1:artifactid1:version1,groupid2:artifactid2:version2",
"--conf=spark.jars.ivy=" + mesosSandboxPath + "/.ivy2",
"app/packages/main.jar"}
actual, _ := transformSubmitArgs(inputArgs, args.boolVals)
assert.Equal(suite.T(), expected, actual)
}

func (suite *CliTestSuite) TestProcessMultiPackagesFlagWithSpace() {
_, args := sparkSubmitArgSetup()
inputArgs := "--conf spark.cores.max=8 --packages groupid1:artifactid1:version1,groupid2:artifactid2:version2 app/packages/main.jar 100"
expected := []string{"--conf=spark.cores.max=8",
"--packages=groupid1:artifactid1:version1,groupid2:artifactid2:version2",
"--conf=spark.jars.ivy=" + mesosSandboxPath + "/.ivy2",
"app/packages/main.jar"}
actual, _ := transformSubmitArgs(inputArgs, args.boolVals)
assert.Equal(suite.T(), expected, actual)
_, args := sparkSubmitArgSetup()
inputArgs := "--conf spark.cores.max=8 --packages groupid1:artifactid1:version1,groupid2:artifactid2:version2 app/packages/main.jar 100"
expected := []string{"--conf=spark.cores.max=8",
"--packages=groupid1:artifactid1:version1,groupid2:artifactid2:version2",
"--conf=spark.jars.ivy=" + mesosSandboxPath + "/.ivy2",
"app/packages/main.jar"}
actual, _ := transformSubmitArgs(inputArgs, args.boolVals)
assert.Equal(suite.T(), expected, actual)
}

func (suite *CliTestSuite) TestIsSparkApp() {
Expand Down Expand Up @@ -367,7 +369,7 @@ func (suite *CliTestSuite) TestPayloadWithSecret() {

func (suite *CliTestSuite) TestSaslSecret() {
inputArgs := fmt.Sprintf(
"--executor-auth-secret /%s "+
"--executor-auth-secret %s "+
"--class %s "+
"%s --input1 value1 --input2 value2", sparkAuthSecret, mainClass, appJar)

Expand All @@ -386,10 +388,46 @@ func (suite *CliTestSuite) TestSaslSecret() {
"spark.authenticate": "true",
"spark.mesos.containerizer": "mesos",
"spark.authenticate.enableSaslEncryption": "true",
"spark.authenticate.secret": "spark_shared_secret",
"spark.executorEnv._SPARK_AUTH_SECRET": "spark_shared_secret",
"spark.mesos.driver.secret.filenames": sparkAuthSecret,
"spark.mesos.driver.secret.names": fmt.Sprintf("/%s", sparkAuthSecret),
"spark.authenticate.secret": sparkAuthSecret,
"spark.executorEnv._SPARK_AUTH_SECRET": sparkAuthSecret,
}

v, ok := m["sparkProperties"].(map[string]interface{})
if !ok {
suite.T().Errorf("%+v", ok)
}

suite.checkProps(v, stringProps)
}

func (suite *CliTestSuite) TestSaslFileBasedSecret() {
inputArgs := fmt.Sprintf(
"--executor-auth-secret-path /%s "+
"--class %s "+
"%s --input1 value1 --input2 value2", sparkAuthSecretPath, mainClass, appJar)

_, sparkAuthSecretFile := filepath.Split(fmt.Sprintf("/%s", sparkAuthSecretPath))
cmd := createCommand(inputArgs, image)
payload, err := buildSubmitJson(&cmd, marathonConfig)

m := make(map[string]interface{})

json.Unmarshal([]byte(payload), &m)

if err != nil {
suite.T().Errorf("%s", err.Error())
}

stringProps := map[string]string{
"spark.authenticate": "true",
"spark.mesos.containerizer": "mesos",
"spark.authenticate.enableSaslEncryption": "true",
"spark.authenticate.secret.file": sparkAuthSecretFile,
"spark.executorEnv._SPARK_AUTH_SECRET_FILE": sparkAuthSecretFile,
"spark.mesos.driver.secret.filenames": sparkAuthSecretFile,
"spark.mesos.driver.secret.names": fmt.Sprintf("/%s", sparkAuthSecretPath),
"spark.mesos.executor.secret.filenames": sparkAuthSecretFile,
"spark.mesos.executor.secret.names": fmt.Sprintf("/%s", sparkAuthSecretPath),
}

v, ok := m["sparkProperties"].(map[string]interface{})
Expand All @@ -403,30 +441,29 @@ func (suite *CliTestSuite) TestSaslSecret() {
func (suite *CliTestSuite) TestPackagesFlag() {
sparkPackages := "group.one.id:artifact-one-id:version.one,group.two.id:artifact-two-id:version.two"
inputArgs := fmt.Sprintf(
"--packages %s "+
"--class %s "+
"%s --input1 value1 --input2 value2", sparkPackages, mainClass, appJar)
"--packages %s --class %s %s --input1 value1 --input2 value2",
sparkPackages, mainClass, appJar)

cmd := createCommand(inputArgs, image)
payload, err := buildSubmitJson(&cmd, marathonConfig)
payload, err := buildSubmitJson(&cmd, marathonConfig)

jsonMap := make(map[string]interface{})
jsonMap := make(map[string]interface{})

json.Unmarshal([]byte(payload), &jsonMap)
json.Unmarshal([]byte(payload), &jsonMap)

if err != nil {
suite.T().Errorf("%s", err.Error())
}
if err != nil {
suite.T().Errorf("%s", err.Error())
}

stringProps := map[string]string{
"spark.jars.ivy": mesosSandboxPath + "/.ivy2",
stringProps := map[string]string{
"spark.jars.ivy": mesosSandboxPath + "/.ivy2",
"spark.jars.packages": sparkPackages,
}
}

sparkProps, ok := jsonMap["sparkProperties"].(map[string]interface{})
if !ok {
suite.T().Errorf("%+v", ok)
}
sparkProps, ok := jsonMap["sparkProperties"].(map[string]interface{})
if !ok {
suite.T().Errorf("%+v", ok)
}

suite.checkProps(sparkProps, stringProps)
suite.checkProps(sparkProps, stringProps)
}
23 changes: 19 additions & 4 deletions tests/test_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,18 +161,33 @@ def test_jar(service_name=utils.SPARK_SERVICE_NAME):
@pytest.mark.sanity
@pytest.mark.smoke
def test_rpc_auth():
secret_name = "sparkauth"
auth_secret = "spark_shared_secret"

sdk_security.delete_secret(secret_name)
rc, _, _ = sdk_cmd.run_raw_cli("{} --verbose secret /{}".format(utils.SPARK_PACKAGE_NAME, secret_name))
utils.run_tests(
app_url=utils.SPARK_EXAMPLES,
app_args="100",
expected_output="Pi is roughly 3",
service_name=utils.SPARK_SERVICE_NAME,
args=["--executor-auth-secret {}".format(auth_secret),
"--class org.apache.spark.examples.SparkPi"])


@sdk_utils.dcos_ee_only
@pytest.mark.sanity
@pytest.mark.smoke
def test_rpc_filebased_auth():
auth_secret_file = "sparkauth"

sdk_security.delete_secret(auth_secret_file)
rc, _, _ = sdk_cmd.run_raw_cli("{} --verbose secret /{}".format(utils.SPARK_PACKAGE_NAME, auth_secret_file))
assert rc == 0, "Failed to generate Spark auth secret"

utils.run_tests(
app_url=utils.SPARK_EXAMPLES,
app_args="100",
expected_output="Pi is roughly 3",
service_name=utils.SPARK_SERVICE_NAME,
args=["--executor-auth-secret {}".format(secret_name),
args=["--executor-auth-secret-path {}".format(auth_secret_file),
"--class org.apache.spark.examples.SparkPi"])


Expand Down

0 comments on commit a18e124

Please sign in to comment.