Skip to content

Commit

Permalink
Initial test framework and tests (#46)
Browse files Browse the repository at this point in the history
* Add initial test framework
* Typing fixups
* Add ioloop fixtures
* Revert previous, add pytest-jupyter and warning filters
* Bump min vers on some packages for min-versions test
* Increase timeout on min versions test, use py3.9
* Disable minimum version testing after long timeouts
* Complete lifecycle test for yarn, k8s, and docker provisioners
* Add hatch env for tests
* Address code-cov analysis
  • Loading branch information
kevin-bates authored Jan 26, 2023
1 parent 4c54f69 commit 9c4ce6c
Show file tree
Hide file tree
Showing 20 changed files with 965 additions and 70 deletions.
47 changes: 24 additions & 23 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -75,19 +75,20 @@ jobs:
- name: Build Docs
run: make docs

test_minimum_versions:
name: Test Minimum Versions
timeout-minutes: 20
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
with:
python_version: "3.8"
- uses: jupyterlab/maintainer-tools/.github/actions/install-minimums@v1
- name: Run the unit tests
run: |
make test
# Disabled for now, timed out after 20 and 30 minute attempts
# test_minimum_versions:
# name: Test Minimum Versions
# timeout-minutes: 20
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v3
# - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
# with:
# python_version: "3.9"
# - uses: jupyterlab/maintainer-tools/.github/actions/install-minimums@v1
# - name: Run the unit tests
# run: |
# make test

make_sdist:
name: Make SDist
Expand All @@ -98,24 +99,24 @@ jobs:
- uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
- uses: jupyterlab/maintainer-tools/.github/actions/make-sdist@v1

# test_sdist:
# name: Install from SDist and Test
# runs-on: ubuntu-latest
# needs: [make_sdist]
# timeout-minutes: 20
# steps:
# - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
# - uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1
test_sdist:
name: Install from SDist and Test
runs-on: ubuntu-latest
needs: [make_sdist]
timeout-minutes: 20
steps:
- uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
- uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1

python_tests_check: # This job does nothing and is only used for the branch protection
name: Check Jobs
if: always()
needs:
- build
- link_check
- test_minimum_versions
# - test_minimum_versions
- build_docs
# - test_sdist
- test_sdist
runs-on: ubuntu-latest
steps:
- name: Decide whether the needed jobs succeeded or failed
Expand Down
3 changes: 1 addition & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,7 @@ lint: build-dependencies ## check style with flake8
pre-commit run --all-files

test: ## run tests quickly with the default Python
@echo "No tests exist!"
# pytest -v --cov gateway_provisioners gateway_provisioners
hatch run test:test

docs: ## generate Sphinx HTML documentation, including API docs
hatch run docs:api
Expand Down
10 changes: 5 additions & 5 deletions gateway_provisioners/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import os
import signal
from abc import abstractmethod
from typing import Any, Optional
from typing import Any, Dict, List, Optional, Set

import urllib3 # docker ends up using this and it causes lots of noise, so turn off warnings
from jupyter_client import localinterfaces
Expand Down Expand Up @@ -70,7 +70,7 @@ def has_process(self) -> bool:
return self.container_name is not None

@overrides
async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
async def pre_launch(self, **kwargs: Any) -> Dict[str, Any]:
# Unset assigned_host, ip, and node_ip in pre-launch, otherwise, these screw up restarts
self.assigned_host = ""
self.assigned_ip = None
Expand All @@ -91,7 +91,7 @@ async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
return kwargs

@overrides
def log_kernel_launch(self, cmd: list[str]) -> None:
def log_kernel_launch(self, cmd: List[str]) -> None:
self.log.info(
f"{self.__class__.__name__}: kernel launched. Kernel image: {self.image_name}, "
f"KernelID: {self.kernel_id}, cmd: '{cmd}'"
Expand Down Expand Up @@ -198,7 +198,7 @@ async def confirm_remote_startup(self):
self.detect_launch_failure()

@overrides
async def get_provisioner_info(self) -> dict[str, Any]:
async def get_provisioner_info(self) -> Dict[str, Any]:
"""Captures the base information necessary for kernel persistence relative to containers."""
provisioner_info = await super().get_provisioner_info()
provisioner_info.update(
Expand All @@ -215,7 +215,7 @@ async def load_provisioner_info(self, provisioner_info: dict) -> None:
self.assigned_node_ip = provisioner_info.get("assigned_node_ip")

@abstractmethod
def get_initial_states(self) -> set[str]:
def get_initial_states(self) -> Set[str]:
"""Return list of states indicating container is starting (includes running)."""
raise NotImplementedError

Expand Down
13 changes: 7 additions & 6 deletions gateway_provisioners/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
import subprocess
import warnings
from socket import gethostbyname, gethostname
from typing import Any, Optional
from typing import Any, Dict, Optional
from typing import List as tyList

import paramiko
from jupyter_client import KernelConnectionInfo, launch_kernel
Expand Down Expand Up @@ -101,7 +102,7 @@ def _load_balancing_algorithm_default(self) -> str:
)

@validate("load_balancing_algorithm")
def _validate_load_balancing_algorithm(self, proposal: dict[str, str]) -> str:
def _validate_load_balancing_algorithm(self, proposal: Dict[str, str]) -> str:
value = proposal["value"]
try:
assert value in ["round-robin", "least-connection"]
Expand Down Expand Up @@ -135,7 +136,7 @@ def has_process(self) -> bool:
return self.local_proc is not None or (self.ip is not None and self.pid > 0)

@overrides
async def launch_kernel(self, cmd: list[str], **kwargs: Any) -> KernelConnectionInfo:
async def launch_kernel(self, cmd: tyList[str], **kwargs: Any) -> KernelConnectionInfo:
"""
Launches a kernel process on a selected host.
Expand Down Expand Up @@ -216,14 +217,14 @@ async def confirm_remote_startup(self):
ready_to_connect = await self.receive_connection_info()

@overrides
def log_kernel_launch(self, cmd: list[str]) -> None:
def log_kernel_launch(self, cmd: tyList[str]) -> None:
self.log.info(
f"{self.__class__.__name__}: kernel launched. Host: '{self.assigned_host}', "
f"pid: {self.pid}, Kernel ID: {self.kernel_id}, "
f"Log file: {self.assigned_host}:{self.kernel_log}, cmd: '{cmd}'."
)

def _launch_remote_process(self, cmd: list[str], **kwargs: Any):
def _launch_remote_process(self, cmd: tyList[str], **kwargs: Any):
"""
Launch the kernel as indicated by the argv stanza in the kernelspec. Note that this method
will bypass use of ssh if the remote host is also the local machine.
Expand All @@ -248,7 +249,7 @@ def _launch_remote_process(self, cmd: list[str], **kwargs: Any):

return result_pid

def _build_startup_command(self, cmd: list[str], **kwargs: Any) -> list[str]:
def _build_startup_command(self, cmd: tyList[str], **kwargs: Any) -> tyList[str]:
"""
Builds the command to invoke by concatenating envs from kernelspec followed by the kernel argvs.
Expand Down
10 changes: 5 additions & 5 deletions gateway_provisioners/docker_swarm.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"""Code related to managing kernels running in docker-based containers."""
import logging
import os
from typing import Any, Optional
from typing import Any, Dict, Optional, Set

from overrides import overrides

Expand Down Expand Up @@ -38,7 +38,7 @@ def __init__(self, **kwargs):
self.client = DockerClient.from_env()

@overrides
async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
async def pre_launch(self, **kwargs: Any) -> Dict[str, Any]:
kwargs = await super().pre_launch(**kwargs)

# Convey the network to the docker launch script
Expand All @@ -47,7 +47,7 @@ async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
return kwargs

@overrides
def get_initial_states(self) -> set[str]:
def get_initial_states(self) -> Set[str]:
return {"preparing", "starting", "running"}

@overrides
Expand Down Expand Up @@ -164,7 +164,7 @@ def __init__(self, **kwargs):
self.client = DockerClient.from_env()

@overrides
async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
async def pre_launch(self, **kwargs: Any) -> Dict[str, Any]:
kwargs = await super().pre_launch(**kwargs)

# Convey the network to the docker launch script
Expand All @@ -173,7 +173,7 @@ async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
return kwargs

@overrides
def get_initial_states(self) -> set[str]:
def get_initial_states(self) -> Set[str]:
return {"created", "running"}

@overrides
Expand Down
14 changes: 9 additions & 5 deletions gateway_provisioners/k8s.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import logging
import os
import re
from typing import Any, Optional
from typing import Any, Dict, Optional, Set

import urllib3
from overrides import overrides
Expand Down Expand Up @@ -42,7 +42,11 @@

app_name = os.environ.get("GP_APP_NAME", "k8s-provisioner")

if not os.environ.get("SPHINX_BUILD_IN_PROGRESS", ""):
if (
"SPHINX_BUILD_IN_PROGRESS" not in os.environ
and "PYTEST_CURRENT_TEST" not in os.environ
and "PYTEST_RUN_CONFIG" not in os.environ
):
if bool(os.environ.get("GP_USE_INCLUSTER_CONFIG", "True").lower() == "true"):
config.load_incluster_config()
else:
Expand All @@ -64,7 +68,7 @@ def __init__(self, **kwargs):
self.restarting = False

@overrides
async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
async def pre_launch(self, **kwargs: Any) -> Dict[str, Any]:
# Set env before superclass call so we see these in the debug output

# Kubernetes relies on many internal env variables. Since we're running in a k8s pod, we will
Expand All @@ -79,7 +83,7 @@ async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
return kwargs

@overrides
async def get_provisioner_info(self) -> dict[str, Any]:
async def get_provisioner_info(self) -> Dict[str, Any]:
provisioner_info = await super().get_provisioner_info()
provisioner_info.update(
{
Expand All @@ -96,7 +100,7 @@ async def load_provisioner_info(self, provisioner_info: dict) -> None:
self.delete_kernel_namespace = provisioner_info["delete_ns"]

@overrides
def get_initial_states(self) -> set[str]:
def get_initial_states(self) -> Set[str]:
return {"Pending", "Running"}

@overrides
Expand Down
30 changes: 17 additions & 13 deletions gateway_provisioners/remote_provisioner.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from abc import abstractmethod
from enum import Enum
from socket import AF_INET, SHUT_WR, SOCK_STREAM, socket, timeout
from typing import Any, Optional
from typing import Any, Dict, List, Optional, Tuple

import pexpect
from jupyter_client import (
Expand Down Expand Up @@ -68,6 +68,10 @@ class KernelChannel(Enum):
)


def gp_launch_kernel(cmd: list, **kwargs):
return launch_kernel(cmd, **kwargs)


class RemoteProvisionerBase(RemoteProvisionerConfigMixin, KernelProvisionerBase):
"""Base class for remote provisioners."""

Expand Down Expand Up @@ -105,7 +109,7 @@ def has_process(self) -> bool:
pass

@overrides
async def pre_launch(self, **kwargs: Any) -> dict[str, Any]:
async def pre_launch(self, **kwargs: Any) -> Dict[str, Any]:
self.response_manager.register_event(self.kernel_id)

cmd = self.kernel_spec.argv # Build launch command, provide substitutions
Expand Down Expand Up @@ -140,10 +144,10 @@ def from_ns(match):
return kwargs

@overrides
async def launch_kernel(self, cmd: list[str], **kwargs: Any) -> KernelConnectionInfo:
async def launch_kernel(self, cmd: List[str], **kwargs: Any) -> KernelConnectionInfo:

launch_kwargs = RemoteProvisionerBase._scrub_kwargs(kwargs)
self.local_proc = launch_kernel(cmd, **launch_kwargs)
self.local_proc = gp_launch_kernel(cmd, **launch_kwargs)
self.pid = self.local_proc.pid
self.ip = local_ip

Expand Down Expand Up @@ -213,7 +217,7 @@ async def shutdown_requested(self, restart: bool = False) -> None:
await self.shutdown_listener(restart)

@overrides
async def get_provisioner_info(self) -> dict[str, Any]:
async def get_provisioner_info(self) -> Dict[str, Any]:
provisioner_info = await super().get_provisioner_info()
provisioner_info.update(
{
Expand Down Expand Up @@ -246,7 +250,7 @@ def get_shutdown_wait_time(self, recommended: Optional[float] = 5.0) -> float:
return recommended

@overrides
def _finalize_env(self, env: dict[str, str]) -> None:
def _finalize_env(self, env: Dict[str, str]) -> None:

# add the applicable kernel_id and language to the env dict
env["KERNEL_ID"] = self.kernel_id
Expand All @@ -262,17 +266,17 @@ def _finalize_env(self, env: dict[str, str]) -> None:
env.pop(k, None)

@staticmethod
def _scrub_kwargs(kwargs: dict[str, Any]) -> dict[str, Any]:
def _scrub_kwargs(kwargs: Dict[str, Any]) -> Dict[str, Any]:
"""Remove any keyword arguments that Popen does not tolerate."""
keywords_to_scrub: list[str] = ["extra_arguments", "kernel_id"]
keywords_to_scrub: List[str] = ["extra_arguments", "kernel_id"]
scrubbed_kwargs = kwargs.copy()
for kw in keywords_to_scrub:
scrubbed_kwargs.pop(kw, None)

return scrubbed_kwargs

@abstractmethod
def log_kernel_launch(self, cmd: list[str]) -> None:
def log_kernel_launch(self, cmd: List[str]) -> None:
"""Logs the kernel launch from the respective remote provisioner"""
pass

Expand Down Expand Up @@ -482,7 +486,7 @@ def _raise_authorization_error(self, differentiator_clause):
)
self.log_and_raise(PermissionError(error_message))

def _validate_port_range(self) -> tuple[int, int]:
def _validate_port_range(self) -> Tuple[int, int]:
"""Validates the port range configuration option to ensure appropriate values."""

lower_port = upper_port = 0
Expand Down Expand Up @@ -850,7 +854,7 @@ def _spawn_ssh_tunnel(
)
return pexpect.spawn(cmd, env=os.environ.copy().pop("SSH_ASKPASS", None))

def _select_ports(self, count: int) -> list[int]:
def _select_ports(self, count: int) -> List[int]:
"""
Selects and returns n random ports that adhere to the configured port range, if applicable.
Expand All @@ -863,8 +867,8 @@ def _select_ports(self, count: int) -> list[int]:
-------
List - ports available and adhering to the configured port range
"""
ports: list[int] = []
sockets: list[socket] = []
ports: List[int] = []
sockets: List[socket] = []
for _i in range(count):
sock = self._select_socket()
ports.append(sock.getsockname()[1])
Expand Down
Loading

0 comments on commit 9c4ce6c

Please sign in to comment.