move ZFSCli.parse_dataset_identifier to Dataset.from_string

main
svalouch 5 years ago
parent 27de066f27
commit 2f5870e7fa

@ -3,9 +3,12 @@
Type declarations
'''
import os
from enum import Enum, unique
from typing import NamedTuple, Optional
from .validation import validate_dataset_path, validate_pool_name
@unique
class DatasetType(str, Enum):
@ -60,6 +63,38 @@ class Dataset(NamedTuple):
#: Dataset type
type: DatasetType
@staticmethod
def from_string(value: str) -> 'Dataset':
'''
Helper to convert a string to a Dataset.
:param value: The value to convert.
:raises ValidationError: if the value can't be converted.
:return: the dataset instance
'''
if '/' in value:
validate_dataset_path(value)
tokens = value.split('/')
ds_name = tokens[-1]
ds_parent = '/'.join(tokens[:-1]) # type: Optional[str]
ds_pool = tokens[0]
else:
validate_pool_name(value)
ds_name = value
ds_parent = None
ds_pool = value
if '@' in ds_name:
ds_type = DatasetType.SNAPSHOT
elif '#' in ds_name:
ds_type = DatasetType.BOOKMARK
elif os.path.exists(os.path.join('/dev/zvol', value)):
ds_type = DatasetType.VOLUME
else:
ds_type = DatasetType.FILESET
return Dataset(name=ds_name, parent=ds_parent, type=ds_type, full_path=value, pool=ds_pool)
@unique
class PropertySource(str, Enum):

@ -69,7 +69,7 @@ class ZFS:
'''
self._metadata_namespace = namespace
def dataset_exists(self, name: str ) -> bool:
def dataset_exists(self, name: str) -> bool:
'''
Checks is a dataset exists. This is done by querying for its `type` property.
@ -442,6 +442,9 @@ class ZFS:
# validate type specifics
if dataset_type == DatasetType.VOLUME:
if '@' in name or '#' in name:
raise ValidationError('Volumes can\'t contain @ or #')
if not size:
raise ValidationError('Size must be specified for volumes')
try:
@ -455,14 +458,15 @@ class ZFS:
if properties and 'blocksize' in properties:
try:
blocksize = int(properties['blocksize'])
except ValueError as e:
except ValueError:
raise ValidationError('blocksize must be an integer')
if blocksize < 2 or blocksize > 128*1024: # zfs(8) version 0.8.1 lists 128KB as maximum
if blocksize < 2 or blocksize > 128 * 1024: # zfs(8) version 0.8.1 lists 128KB as maximum
raise ValidationError('blocksize must be between 2 and 128kb (inclusive)')
if not ((blocksize & (blocksize-1) == 0) and blocksize != 0):
if not ((blocksize & (blocksize - 1) == 0) and blocksize != 0):
raise ValidationError('blocksize must be a power of two')
# TODO recursive
#
elif dataset_type == DatasetType.FILESET:
if '@' in name or '#' in name:

@ -9,12 +9,10 @@ import os
import shutil
import subprocess
from .exceptions import DatasetNotFound, PropertyNotFound, ValidationError
from .types import Property, Dataset, DatasetType
from .exceptions import DatasetNotFound, PropertyNotFound
from .types import Property, Dataset
from .validation import (
validate_dataset_path,
validate_metadata_property_name,
validate_native_property_name,
validate_pool_name,
)
from .zfs import ZFS
@ -59,37 +57,6 @@ class ZFSCli(ZFS):
'''
return self.__exe
@staticmethod
def parse_dataset_identifier(name: str) -> Dataset:
'''
Parses a dataset identifier like ``pool/system/root@initial`` to a :class:`~simplezfs.types.Dataset`.
:param name: The name to parse.
:return: The dataset.
:raises ValidationError: If the argument is not valid or the argument was a pool.
'''
if '/' in name:
validate_dataset_path(name)
tokens = name.split('/')
ds_name = tokens[-1]
ds_parent = '/'.join(tokens[:-1]) # type: Optional[str]
ds_pool = tokens[0]
else:
validate_pool_name(name)
ds_name = name
ds_parent = None
ds_pool = name
if '@' in ds_name:
ds_type = DatasetType.SNAPSHOT
elif '#' in ds_name:
ds_type = DatasetType.BOOKMARK
elif ZFSCli.is_zvol(name):
ds_type = DatasetType.VOLUME
else:
ds_type = DatasetType.FILESET
return Dataset(name=ds_name, parent=ds_parent, type=ds_type, full_path=name, pool=ds_pool)
@staticmethod
def is_zvol(name: str) -> bool:
'''
@ -135,7 +102,7 @@ class ZFSCli(ZFS):
for line in proc.stdout.strip().split('\n'):
# format is NAME, USED, AVAIL, REFER, MOUNTPOINT, we only care for the name here
name = line.split('\t')[0]
res.append(ZFSCli.parse_dataset_identifier(name.strip()))
res.append(Dataset.from_string(name.strip()))
return res
def handle_command_error(self, proc: subprocess.CompletedProcess, dataset: str = None) -> None:
@ -198,11 +165,11 @@ class ZFSCli(ZFS):
namespace = prop_name.split(':')[0]
return Property(key=prop_name, value=prop_value, source=prop_source, namespace=namespace)
def _get_properties(self, dataset: str, include_metadata: bool = False) -> List[Property]:
'''
Gets all properties from a dataset, basically running ``zfs get -H -p all {dataset}``.
:raises DatasetNotFound: If the dataset does not exist.
'''
args = [self.__exe, 'get', '-H', '-p', 'all', dataset]

@ -0,0 +1,41 @@
from unittest.mock import patch
import pytest
from simplezfs.exceptions import ValidationError
from simplezfs.types import Dataset, DatasetType
from simplezfs.validation import validate_dataset_path
class TestTypesDataset:
@patch('os.path.exists')
@pytest.mark.parametrize('identifier,name,parent,dstype,pool', [
('pool/test', 'test', 'pool', DatasetType.FILESET, 'pool'),
('pool/test@st', 'test@st', 'pool', DatasetType.SNAPSHOT, 'pool'),
('pool/test1/test@snap-12', 'test@snap-12', 'pool/test1', DatasetType.SNAPSHOT, 'pool'),
('tank/test#bm1', 'test#bm1', 'tank', DatasetType.BOOKMARK, 'tank'),
('tank/test1/test#bmark-12', 'test#bmark-12', 'tank/test1', DatasetType.BOOKMARK, 'tank'),
('pool/test2', 'test2', 'pool', DatasetType.VOLUME, 'pool'),
('pool/test2/test', 'test', 'pool/test2', DatasetType.VOLUME, 'pool'),
])
def test_from_string_valid(self, exists, identifier, name, parent, dstype, pool):
'''
Tests the happy path.
'''
validate_dataset_path(identifier)
exists.return_value = dstype == DatasetType.VOLUME
ds = Dataset.from_string(identifier)
assert isinstance(ds, Dataset)
assert ds.name == name
assert ds.parent == parent
assert ds.type == dstype
assert ds.full_path == identifier
assert ds.pool == pool
@pytest.mark.parametrize('identifier', [' /asd', ' /asd', '\0/asd', 'mirrored/asd', 'raidz fun/asd'])
def test_from_string_invalid(self, identifier):
with pytest.raises(ValidationError):
Dataset.from_string(identifier)

@ -3,13 +3,11 @@
Tests the ZFSCli class, non-distructive version.
'''
from unittest.mock import patch, PropertyMock
from unittest.mock import patch
import pytest
import subprocess
from simplezfs.exceptions import ValidationError
from simplezfs.types import Dataset, DatasetType
from simplezfs.validation import validate_dataset_path
from simplezfs.zfs_cli import ZFSCli
@ -18,42 +16,8 @@ class TestZFSCli:
def test_init_noparam(self):
instance = ZFSCli() # noqa: F841
########################
@patch('simplezfs.zfs_cli.ZFSCli.is_zvol')
@pytest.mark.parametrize('identifier,name,parent,dstype,pool', [
('pool/test', 'test', 'pool', DatasetType.FILESET, 'pool'),
('pool/test@st', 'test@st', 'pool', DatasetType.SNAPSHOT, 'pool'),
('pool/test1/test@snap-12', 'test@snap-12', 'pool/test1', DatasetType.SNAPSHOT, 'pool'),
('tank/test#bm1', 'test#bm1', 'tank', DatasetType.BOOKMARK, 'tank'),
('tank/test1/test#bmark-12', 'test#bmark-12', 'tank/test1', DatasetType.BOOKMARK, 'tank'),
('pool/test2', 'test2', 'pool', DatasetType.VOLUME, 'pool'),
('pool/test2/test', 'test', 'pool/test2', DatasetType.VOLUME, 'pool'),
])
def test_parse_dataset_identifier_valid(self, is_zvol, identifier, name, parent, dstype, pool):
'''
Tests the happy path.
'''
validate_dataset_path(identifier)
is_zvol.return_value = dstype == DatasetType.VOLUME
ds = ZFSCli.parse_dataset_identifier(identifier)
assert isinstance(ds, Dataset)
assert ds.name == name
assert ds.parent == parent
assert ds.type == dstype
assert ds.full_path == identifier
assert ds.pool == pool
@pytest.mark.parametrize('identifier', [' /asd', ' /asd', '\0/asd', 'mirrored/asd', 'raidz fun/asd'])
def test_parse_dataset_identifier_invalid(self, identifier):
with pytest.raises(ValidationError):
ZFSCli.parse_dataset_identifier(identifier)
######################
@patch('os.path.exists')
def test_is_zvol_ok_exists(self, exists):

Loading…
Cancel
Save