Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adjustments on project structure #13

Draft
wants to merge 5 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
File renamed without changes.
7 changes: 3 additions & 4 deletions spark8t/cli/pyspark.py → spark8t/cli/bin/pyspark.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,15 @@
import re
from typing import Optional

from spark8t.cli.params import (
from spark8t.domain import ServiceAccount
from spark8t.lib.params import (
add_config_arguments,
add_logging_arguments,
defaults,
get_kube_interface,
k8s_parser,
parse_arguments_with,
spark_user_parser,
)
from spark8t.domain import ServiceAccount
from spark8t.lib.process_settings import defaults, get_kube_interface
from spark8t.services import K8sServiceAccountRegistry, SparkInterface

if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,16 @@
from argparse import ArgumentParser
from enum import Enum

from spark8t.cli.params import (
from spark8t.domain import PropertyFile, ServiceAccount
from spark8t.exceptions import NoAccountFound
from spark8t.lib.params import (
add_config_arguments,
add_logging_arguments,
get_kube_interface,
k8s_parser,
parse_arguments_with,
spark_user_parser,
)
from spark8t.domain import PropertyFile, ServiceAccount
from spark8t.exceptions import NoAccountFound
from spark8t.lib.process_settings import get_kube_interface
from spark8t.services import K8sServiceAccountRegistry, parse_conf_overrides


Expand Down
7 changes: 3 additions & 4 deletions spark8t/cli/spark_shell.py → spark8t/cli/bin/spark_shell.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,15 @@
import re
from typing import Optional

from spark8t.cli.params import (
from spark8t.domain import ServiceAccount
from spark8t.lib.params import (
add_config_arguments,
add_logging_arguments,
defaults,
get_kube_interface,
k8s_parser,
parse_arguments_with,
spark_user_parser,
)
from spark8t.domain import ServiceAccount
from spark8t.lib.process_settings import defaults, get_kube_interface
from spark8t.services import K8sServiceAccountRegistry, SparkInterface

if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@
import re
from typing import Optional

from spark8t.cli.params import (
from spark8t.domain import ServiceAccount
from spark8t.lib.params import (
add_config_arguments,
add_deploy_arguments,
add_logging_arguments,
defaults,
get_kube_interface,
k8s_parser,
parse_arguments_with,
spark_user_parser,
)
from spark8t.domain import ServiceAccount
from spark8t.lib.process_settings import defaults, get_kube_interface
from spark8t.services import K8sServiceAccountRegistry, SparkInterface

if __name__ == "__main__":
Expand Down
File renamed without changes.
15 changes: 1 addition & 14 deletions spark8t/cli/params.py → spark8t/cli/lib/params.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
from argparse import ArgumentParser, Namespace
from argparse import ArgumentParser
from typing import Callable, List, Optional

from spark8t.cli import defaults
from spark8t.services import AbstractKubeInterface, KubeInterface, LightKube


def parse_arguments_with(
parsers: List[Callable[[ArgumentParser], ArgumentParser]],
Expand Down Expand Up @@ -119,13 +116,3 @@ def add_deploy_arguments(parser: ArgumentParser) -> ArgumentParser:
choices=["client", "cluster"],
)
return parser


def get_kube_interface(args: Namespace) -> AbstractKubeInterface:
return (
LightKube(args.kubeconfig or defaults.kube_config, defaults)
if args.backend == "lightkube"
else KubeInterface(
args.kubeconfig or defaults.kube_config, context_name=args.context
)
)
17 changes: 17 additions & 0 deletions spark8t/cli/lib/process_settings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from argparse import Namespace

from spark8t.cli import defaults
from spark8t.domain import Defaults
from spark8t.services import AbstractKubeInterface, KubeInterface, LightKube


def get_kube_interface(args: Namespace) -> AbstractKubeInterface:
return (
LightKube(args.kubeconfig or defaults.kube_config, defaults)
if args.backend == "lightkube"
else KubeInterface(
args.kubeconfig or defaults.kube_config,
context_name=args.context,
kubectl_cmd=Defaults().kubectl_cmd,
)
)
Empty file added spark8t/core/__init__.py
Empty file.
89 changes: 89 additions & 0 deletions spark8t/core/domain/defaults.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
class Defaults:
"""Class containing all relevant defaults for the application."""

def __init__(self, environ: Dict = dict(os.environ)):
"""Initialize a Defaults class using the value contained in a dictionary

Args:
environ: dictionary representing the environment. Default uses the os.environ key-value pairs.
"""

self.environ = environ if environ is not None else {}

@property
def spark_home(self):
return self.environ["SPARK_HOME"]

@property
def spark_confs(self):
return self.environ.get("SPARK_CONFS", os.path.join(self.spark_home, "conf"))

@property
def spark_user_data(self):
return self.environ["SPARK_USER_DATA"]

@property
def kubectl_cmd(self) -> str:
"""Return default kubectl command."""
return self.environ.get("SPARK_KUBECTL", "kubectl")

@property
def kube_config(self) -> str:
"""Return default kubeconfig to use if not explicitly provided."""
return self.environ["KUBECONFIG"]

@property
def static_conf_file(self) -> str:
"""Return static config properties file packaged with the client artefacts."""
return f"{self.spark_confs}/spark-defaults.conf"

@property
def env_conf_file(self) -> Optional[str]:
"""Return env var provided by user to point to the config properties file with conf overrides."""
return self.environ.get("SPARK_CLIENT_ENV_CONF")

@property
def service_account(self):
return "spark"

@property
def namespace(self):
return "defaults"

@property
def scala_history_file(self):
return f"{self.spark_user_data}/.scala_history"

@property
def spark_submit(self) -> str:
return f"{self.spark_home}/bin/spark-submit"

@property
def spark_shell(self) -> str:
return f"{self.spark_home}/bin/spark-shell"

@property
def pyspark(self) -> str:
return f"{self.spark_home}/bin/pyspark"

@property
def dir_package(self) -> str:
return os.path.dirname(__file__)

@property
def template_dir(self) -> str:
return f"{self.dir_package}/resources/templates"

@property
def template_serviceaccount(self) -> str:
return f"{self.template_dir}/serviceaccount_yaml.tmpl"

@property
def template_role(self) -> str:
return f"{self.template_dir}/role_yaml.tmpl"

@property
def template_rolebinding(self) -> str:
return f"{self.template_dir}/rolebinding_yaml.tmpl"


7 changes: 7 additions & 0 deletions spark8t/core/domain/kubernetes_resource_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
class KubernetesResourceType(str, Enum):
SERVICEACCOUNT = "serviceaccount"
ROLE = "role"
ROLEBINDING = "rolebinding"
SECRET = "secret"
SECRET_GENERIC = "secret generic"
NAMESPACE = "namespace"
128 changes: 0 additions & 128 deletions spark8t/domain.py → spark8t/core/domain/properties_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,131 +161,3 @@ def remove(self, keys_to_remove: List[str]) -> "PropertyFile":
else self.props
)
return self


class Defaults:
"""Class containing all relevant defaults for the application."""

def __init__(self, environ: Dict = dict(os.environ)):
"""Initialize a Defaults class using the value contained in a dictionary

Args:
environ: dictionary representing the environment. Default uses the os.environ key-value pairs.
"""

self.environ = environ if environ is not None else {}

@property
def spark_home(self):
return self.environ["SPARK_HOME"]

@property
def spark_confs(self):
return self.environ.get("SPARK_CONFS", os.path.join(self.spark_home, "conf"))

@property
def spark_user_data(self):
return self.environ["SPARK_USER_DATA"]

@property
def kubectl_cmd(self) -> str:
"""Return default kubectl command."""
return self.environ.get("SPARK_KUBECTL", "kubectl")

@property
def kube_config(self) -> str:
"""Return default kubeconfig to use if not explicitly provided."""
return self.environ["KUBECONFIG"]

@property
def static_conf_file(self) -> str:
"""Return static config properties file packaged with the client artefacts."""
return f"{self.spark_confs}/spark-defaults.conf"

@property
def env_conf_file(self) -> Optional[str]:
"""Return env var provided by user to point to the config properties file with conf overrides."""
return self.environ.get("SPARK_CLIENT_ENV_CONF")

@property
def service_account(self):
return "spark"

@property
def namespace(self):
return "defaults"

@property
def scala_history_file(self):
return f"{self.spark_user_data}/.scala_history"

@property
def spark_submit(self) -> str:
return f"{self.spark_home}/bin/spark-submit"

@property
def spark_shell(self) -> str:
return f"{self.spark_home}/bin/spark-shell"

@property
def pyspark(self) -> str:
return f"{self.spark_home}/bin/pyspark"

@property
def dir_package(self) -> str:
return os.path.dirname(__file__)

@property
def template_dir(self) -> str:
return f"{self.dir_package}/resources/templates"

@property
def template_serviceaccount(self) -> str:
return f"{self.template_dir}/serviceaccount_yaml.tmpl"

@property
def template_role(self) -> str:
return f"{self.template_dir}/role_yaml.tmpl"

@property
def template_rolebinding(self) -> str:
return f"{self.template_dir}/rolebinding_yaml.tmpl"


@dataclass
class ServiceAccount:
"""Class representing the spark ServiceAccount domain object."""

name: str
namespace: str
api_server: str
primary: bool = False
extra_confs: PropertyFile = PropertyFile.empty()

@property
def id(self):
"""Return the service account id, as a concatenation of namespace and username."""
return f"{self.namespace}:{self.name}"

@property
def _k8s_configurations(self):
return PropertyFile(
{
"spark.kubernetes.authenticate.driver.serviceAccountName": self.name,
"spark.kubernetes.namespace": self.namespace,
}
)

@property
def configurations(self) -> PropertyFile:
"""Return the service account configuration, associated to a given spark service account."""
return self.extra_confs + self._k8s_configurations


class KubernetesResourceType(str, Enum):
SERVICEACCOUNT = "serviceaccount"
ROLE = "role"
ROLEBINDING = "rolebinding"
SECRET = "secret"
SECRET_GENERIC = "secret generic"
NAMESPACE = "namespace"
28 changes: 28 additions & 0 deletions spark8t/core/domain/service_account.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
@dataclass
class ServiceAccount:
"""Class representing the spark ServiceAccount domain object."""

name: str
namespace: str
api_server: str
primary: bool = False
extra_confs: PropertyFile = PropertyFile.empty()

@property
def id(self):
"""Return the service account id, as a concatenation of namespace and username."""
return f"{self.namespace}:{self.name}"

@property
def _k8s_configurations(self):
return PropertyFile(
{
"spark.kubernetes.authenticate.driver.serviceAccountName": self.name,
"spark.kubernetes.namespace": self.namespace,
}
)

@property
def configurations(self) -> PropertyFile:
"""Return the service account configuration, associated to a given spark service account."""
return self.extra_confs + self._k8s_configurations
File renamed without changes.
Loading