Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 3 additions & 10 deletions .github/workflows/integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,11 @@ jobs:
run: |
sudo apt-get install -y linux-modules-extra-$(uname -r)

- name: Set up SSH for localhost
- name: Build Docker image for integration test minions
shell: bash
run: |
sudo apt-get install -y openssh-server
sudo mkdir -p /root/.ssh
sudo chmod 700 /root/.ssh
sudo ssh-keygen -t rsa -b 4096 -N "" -f /root/.ssh/id_rsa
sudo bash -c 'cat /root/.ssh/id_rsa.pub >> /root/.ssh/authorized_keys'
sudo chmod 600 /root/.ssh/authorized_keys
sudo bash -c 'echo "PermitRootLogin yes" >> /etc/ssh/sshd_config'
sudo systemctl start ssh
sudo ssh-keyscan -H 127.0.0.1 | sudo tee -a /root/.ssh/known_hosts
docker build -t coriolis-data-minion:test \
coriolis/tests/integration/dockerfiles/data-minion/

- name: Run integration tests
shell: bash
Expand Down
7 changes: 2 additions & 5 deletions coriolis/osmorphing/osmount/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import uuid

from oslo_log import log as logging
import paramiko
from six import with_metaclass

from coriolis import exception
Expand Down Expand Up @@ -82,10 +81,8 @@ def _connect(self):
self._event_manager.progress_update(
"Connecting through SSH to OSMorphing host on: %(ip)s:%(port)s" %
({"ip": ip, "port": port}))
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=ip, port=port, username=username, pkey=pkey,
password=password)
ssh = utils.connect_ssh(
ip, port, username, pkey=pkey, password=password)
ssh.set_log_channel("paramiko.morpher.%s.%s" % (ip, port))
self._ssh = ssh

Expand Down
34 changes: 6 additions & 28 deletions coriolis/providers/backup_writers.py
Original file line number Diff line number Diff line change
Expand Up @@ -564,20 +564,9 @@ def _copy_helper_cmd(self, ssh):
def _connect_ssh(self):
LOG.info("Connecting to SSH host: %(ip)s:%(port)s" %
{"ip": self._ip, "port": self._port})
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(
hostname=self._ip,
port=self._port,
username=self._username,
pkey=self._pkey,
password=self._password)
except (Exception, KeyboardInterrupt):
# No need to log the error as we just raise
ssh.close()
raise
return ssh
return utils.connect_ssh(
self._ip, self._port, self._username,
pkey=self._pkey, password=self._password)


class HTTPBackupWriterImpl(BaseBackupWriterImpl):
Expand Down Expand Up @@ -957,20 +946,9 @@ def __init__(self, ssh_conn_info, writer_port):
def _connect_ssh(self):
LOG.info("Connecting to SSH host: %(ip)s:%(port)s" %
{"ip": self._ip, "port": self._port})
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(
hostname=self._ip,
port=self._port,
username=self._username,
pkey=self._pkey,
password=self._password)
except (Exception, KeyboardInterrupt):
# No need to log the error as we just raise
ssh.close()
raise
return ssh
return utils.connect_ssh(
self._ip, self._port, self._username,
pkey=self._pkey, password=self._password)

def _inject_dport_allow_rule(self, ssh):
cmd = (
Expand Down
16 changes: 4 additions & 12 deletions coriolis/providers/replicator.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,18 +496,10 @@ def _get_ssh_client(self, args):
"""
gets a paramiko SSH client
"""
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(**args)
return ssh
except Exception:
ssh.close()
raise
except paramiko.ssh_exception.SSHException as ex:
raise exception.CoriolisException(
"Failed to setup SSH client: %s" % str(ex)) from ex
return utils.connect_ssh(
args["hostname"], args["port"], args["username"],
pkey=args.get("pkey"), password=args.get("password"),
banner_timeout=args.get("banner_timeout"))

def _parse_source_ssh_conn_info(self, conn_info):
# if we get valid SSH connection info we can
Expand Down
10 changes: 0 additions & 10 deletions coriolis/tests/integration/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,16 +52,6 @@ Key packages used by the harness:
- `keystoneauth1`: session used by `coriolisclient` (auth is bypassed in tests)
- `oslo.messaging`, `oslo.config`, `oslo.log`, `oslo.service`

### SSH key (for provider connection info)

The test provider connection info includes a `pkey_path` field that
defaults to `/root/.ssh/id_rsa`. Override it with the environment
variable `CORIOLIS_TEST_SSH_KEY_PATH` if the key lives elsewhere.

> The key is passed through to the provider's connection info dictionary
> but the smoke tests and current provider implementation do not actually
> open an SSH connection, so any readable file path satisfies the field.

### Root access

The tests must run as root because:
Expand Down
27 changes: 20 additions & 7 deletions coriolis/tests/integration/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
"""

import os
import subprocess
import time
import unittest
from unittest import mock
Expand All @@ -33,11 +34,6 @@
CONF = cfg.CONF
LOG = logging.getLogger(__name__)

# Path to the SSH private key used to connect to the (local) provider.
# Override via the CORIOLIS_TEST_SSH_KEY_PATH environment variable.
_TEST_SSH_KEY_PATH = os.environ.get(
'CORIOLIS_TEST_SSH_KEY_PATH', '/root/.ssh/id_rsa')


class CoriolisIntegrationTestBase(test_base.CoriolisBaseTestCase):
"""Base class for integration tests."""
Expand Down Expand Up @@ -108,6 +104,23 @@ def f(*args, **kwargs):


class ReplicaIntegrationTestBase(CoriolisIntegrationTestBase):

@classmethod
def setUpClass(cls):
result = subprocess.run(
["docker", "image", "inspect", test_utils.DATA_MINION_IMAGE],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
if result.returncode != 0:
raise unittest.SkipTest(
"Docker image not found; build it with: "
"docker build -t %s "
"coriolis/tests/integration/dockerfiles/data-minion/"
% test_utils.DATA_MINION_IMAGE)

super().setUpClass()

def setUp(self):
super().setUp()

Expand All @@ -126,7 +139,7 @@ def setUp(self):
description="integration source endpoint",
connection_info={
"block_device_path": self._src_device,
"pkey_path": _TEST_SSH_KEY_PATH,
"pkey_path": self._harness.ssh_key_path,
},
)

Expand All @@ -136,7 +149,7 @@ def setUp(self):
description="integration destination endpoint",
connection_info={
"devices": [self._dst_device],
"pkey_path": _TEST_SSH_KEY_PATH,
"pkey_path": self._harness.ssh_key_path,
},
)

Expand Down
27 changes: 27 additions & 0 deletions coriolis/tests/integration/dockerfiles/data-minion/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Copyright 2026 Cloudbase Solutions Srl
# All Rights Reserved.

FROM ubuntu:24.04

# dbus is required for systemd to fully manage units;
# sudo is used by replicator / writer setup.
RUN apt-get update && apt-get install -y --no-install-recommends \
dbus \
openssh-server \
sudo \
systemd \
&& rm -rf /var/lib/apt/lists/*

RUN systemctl enable ssh

RUN sed -i \
-e 's/^#\?PermitRootLogin.*/PermitRootLogin yes/' \
-e 's/^#\?PubkeyAuthentication.*/PubkeyAuthentication yes/' \
-e 's/^#\?AuthorizedKeysFile.*/AuthorizedKeysFile .ssh\/authorized_keys/' \
/etc/ssh/sshd_config && \
echo 'StrictModes no' >> /etc/ssh/sshd_config

# systemd requires these folders.
VOLUME ["/run", "/run/lock"]

CMD ["/lib/systemd/systemd"]
10 changes: 10 additions & 0 deletions coriolis/tests/integration/harness.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import queue
import shutil
import socket
import subprocess
import tempfile
from unittest import mock
import uuid
Expand Down Expand Up @@ -221,6 +222,15 @@ def __init__(self):
self._mysql_password = "coriolis"
self._mysql_database = "coriolis"

self.ssh_key_path = os.path.join(self.workdir, "id_rsa")
subprocess.run(
["ssh-keygen", "-t", "rsa", "-b", "2048",
"-f", self.ssh_key_path, "-N", ""],
check=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)

coriolis_conf.init_common_opts()
cfg.CONF([], project='coriolis', version='1.0.0',
default_config_files=[], default_config_dirs=[])
Expand Down
81 changes: 51 additions & 30 deletions coriolis/tests/integration/providers/test_provider/exp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
"""
Export-side (source) implementation of the test provider.

Uses Replicator (via SSH to 127.0.0.1) to deploy and manage the
coriolis-replicator service and perform disk replication.
Uses Replicator (via SSH to a Docker data-minion container) to deploy and
manage the coriolis-replicator service and perform disk replication.
"""

import os
import uuid

from oslo_config import cfg
from oslo_log import log as logging
Expand All @@ -22,6 +23,7 @@
from coriolis.providers.base import BaseReplicaExportValidationProvider
from coriolis.providers.base import BaseUpdateSourceReplicaProvider
from coriolis.providers import replicator as replicator_module
from coriolis.tests.integration import utils as test_utils

CONF = cfg.CONF
LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -51,16 +53,21 @@ def __init__(self, event_handler):
def _event_manager(self):
return events.EventManager(self._event_handler)

def _make_replicator(self, pkey_path, event_mgr, volumes_info, repl_state):
# TODO(claudiub): Use containers instead of using 127.0.0.1.
pkey = paramiko.RSAKey.from_private_key_file(pkey_path)
conn_info = {
"ip": "127.0.0.1",
"username": "root",
def _make_replicator(self, conn_info, event_mgr, volumes_info, repl_state):
"""Build a Replicator that connects via SSH to *conn_info*.

*conn_info* must contain ``ip``, ``port``, ``username``, and
``pkey_path`` keys.
"""
pkey = paramiko.RSAKey.from_private_key_file(conn_info["pkey_path"])
repl_conn_info = {
"ip": conn_info["ip"],
"port": conn_info.get("port", 22),
"username": conn_info.get("username", "root"),
"pkey": pkey,
}
return replicator_module.Replicator(
conn_info, event_mgr, volumes_info, repl_state)
repl_conn_info, event_mgr, volumes_info, repl_state)

# BaseProvider / BaseEndpointProvider

Expand Down Expand Up @@ -179,42 +186,56 @@ def deploy_replica_source_resources(
block_device_path = connection_info["block_device_path"]
pkey_path = connection_info["pkey_path"]

replicator = self._make_replicator(
pkey_path, self._event_manager(), [], None)
replicator.init_replicator()

disk_id = os.path.basename(block_device_path)
return {
"connection_info": {
"ip": "127.0.0.1",
container_name = "coriolis-replicator-%s" % uuid.uuid4().hex[:8]
container_id = test_utils.start_container(
test_utils.DATA_MINION_IMAGE,
container_name,
is_systemd=True,
ssh_key=f"{pkey_path}.pub",
devices=[block_device_path],
)

try:
container_ip = test_utils.get_container_ip(container_id)
test_utils.wait_for_ssh(container_ip, 22, "root", pkey_path)

src_conn_info = {
"ip": container_ip,
"port": 22,
"username": "root",
"pkey_path": pkey_path,
},
"migr_resources": {
"disk_mappings": {disk_id: block_device_path},
},
}
}
replicator = self._make_replicator(
src_conn_info, self._event_manager(), [], None)
replicator.init_replicator()

disk_id = os.path.basename(block_device_path)
return {
"connection_info": src_conn_info,
"migr_resources": {
"container_id": container_id,
"disk_mappings": {disk_id: block_device_path},
},
}
except Exception:
test_utils.stop_container(container_id)
raise

def delete_replica_source_resources(
self, ctxt, connection_info, source_environment,
migr_resources_dict):
pkey_path = connection_info.get("pkey_path")
if not pkey_path:
return
replicator = self._make_replicator(
pkey_path, self._event_manager(), [], None)
replicator.stop()
container_id = (migr_resources_dict or {}).get("container_id")
if container_id:
test_utils.stop_container(container_id)

def replicate_disks(
self, ctxt, connection_info, source_environment, instance_name,
source_resources, source_conn_info, target_conn_info,
volumes_info, incremental):
pkey_path = source_conn_info["pkey_path"]
repl_state = _extract_repl_state(volumes_info) if incremental else None

replicator = self._make_replicator(
pkey_path, self._event_manager(), volumes_info, repl_state)
source_conn_info, self._event_manager(), volumes_info, repl_state)
replicator.init_replicator()
replicator.wait_for_chunks()

Expand Down
Loading
Loading