Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions nodescraper/base/inbandcollectortask.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
#
###############################################################################
import logging
from typing import Generic, Optional
from typing import Generic, Optional, Union

from nodescraper.connection.inband import InBandConnection
from nodescraper.connection.inband.inband import BaseFileArtifact, CommandArtifact
Expand All @@ -49,7 +49,7 @@ def __init__(
connection: InBandConnection,
logger: Optional[logging.Logger] = None,
system_interaction_level: SystemInteractionLevel = SystemInteractionLevel.INTERACTIVE,
max_event_priority_level: EventPriority | str = EventPriority.CRITICAL,
max_event_priority_level: Union[EventPriority, str] = EventPriority.CRITICAL,
parent: Optional[str] = None,
task_result_hooks: Optional[list[TaskResultHook]] = None,
**kwargs,
Expand Down
10 changes: 6 additions & 4 deletions nodescraper/base/regexanalyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
#
###############################################################################
import re
from typing import Union

from pydantic import BaseModel

Expand All @@ -36,7 +37,7 @@
class ErrorRegex(BaseModel):
regex: re.Pattern
message: str
event_category: str | EventCategory = EventCategory.UNKNOWN
event_category: Union[str, EventCategory] = EventCategory.UNKNOWN
event_priority: EventPriority = EventPriority.ERROR


Expand All @@ -54,14 +55,15 @@ class RegexAnalyzer(DataAnalyzer[TDataModel, TAnalyzeArg]):
"""Parent class for all regex based data analyzers."""

def _build_regex_event(
self, regex_obj: ErrorRegex, match: str | list[str], source: str
self, regex_obj: ErrorRegex, match: Union[str, list[str]], source: str
) -> RegexEvent:
"""Build a RegexEvent object from a regex match and source.

Args:
regex_obj (ErrorRegex): regex object containing the regex pattern, message, category, and priorit
match (str | list[str]): matched content from the regex
source (str): descriptor for the content where the match was found
match (
Union[str, list[str]]): matched content from the regex
source (str): descriptor for the content where the match was found

Returns:
RegexEvent: an instance of RegexEvent containing the match details
Expand Down
5 changes: 3 additions & 2 deletions nodescraper/cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,17 +264,18 @@ def build_parser(
model_type_map = parser_builder.build_plugin_parser()
except Exception as e:
print(f"Exception building arg parsers for {plugin_name}: {str(e)}") # noqa: T201
continue
plugin_subparser_map[plugin_name] = (plugin_subparser, model_type_map)

return parser, plugin_subparser_map


def setup_logger(log_level: str = "INFO", log_path: str | None = None) -> logging.Logger:
def setup_logger(log_level: str = "INFO", log_path: Optional[str] = None) -> logging.Logger:
"""set up root logger when using the CLI

Args:
log_level (str): log level to use
log_path (str | None): optional path to filesystem log location
log_path (Optional[str]): optional path to filesystem log location

Returns:
logging.Logger: logger intstance
Expand Down
11 changes: 5 additions & 6 deletions nodescraper/cli/dynamicparserbuilder.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@
#
###############################################################################
import argparse
import types
from typing import Type
from typing import Optional, Type

from pydantic import BaseModel

Expand Down Expand Up @@ -59,7 +58,7 @@ def build_plugin_parser(self) -> dict:
}

# skip args where generic type has been set to None
if types.NoneType in type_class_map:
if type(None) in type_class_map:
continue

model_arg = self.get_model_arg(type_class_map)
Expand All @@ -75,14 +74,14 @@ def build_plugin_parser(self) -> dict:
return model_type_map

@classmethod
def get_model_arg(cls, type_class_map: dict) -> Type[BaseModel] | None:
def get_model_arg(cls, type_class_map: dict) -> Optional[Type[BaseModel]]:
"""Get the first type which is a pydantic model from a type class map

Args:
type_class_map (dict): mapping of type classes

Returns:
Type[BaseModel] | None: pydantic model type
Optional[Type[BaseModel]]: pydantic model type
"""
return next(
(
Expand Down Expand Up @@ -164,7 +163,7 @@ def build_model_arg_parser(self, model: type[BaseModel], required: bool) -> list
type_class.type_class: type_class for type_class in attr_data.type_classes
}

if types.NoneType in type_class_map and len(attr_data.type_classes) == 1:
if type(None) in type_class_map and len(attr_data.type_classes) == 1:
continue

self.add_argument(type_class_map, attr.replace("_", "-"), required)
Expand Down
6 changes: 3 additions & 3 deletions nodescraper/cli/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def parse_gen_plugin_config(
sys.exit(1)


def log_system_info(log_path: str | None, system_info: SystemInfo, logger: logging.Logger):
def log_system_info(log_path: Optional[str], system_info: SystemInfo, logger: logging.Logger):
"""dump system info object to json log

Args:
Expand Down Expand Up @@ -480,12 +480,12 @@ def dump_to_csv(all_rows: list, filename: str, fieldnames: list[str], logger: lo
logger.info("Data written to csv file: %s", filename)


def generate_summary(search_path: str, output_path: str | None, logger: logging.Logger):
def generate_summary(search_path: str, output_path: Optional[str], logger: logging.Logger):
"""Concatenate csv files into 1 summary csv file

Args:
search_path (str): Path for previous runs
output_path (str | None): Path for new summary csv file
output_path (Optional[str]): Path for new summary csv file
logger (logging.Logger): instance of logger
"""

Expand Down
9 changes: 4 additions & 5 deletions nodescraper/cli/inputargtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,22 +25,21 @@
###############################################################################
import argparse
import json
import types
from typing import Generic, Type
from typing import Generic, Optional, Type

from pydantic import ValidationError

from nodescraper.generictypes import TModelType


def log_path_arg(log_path: str) -> str | None:
def log_path_arg(log_path: str) -> Optional[str]:
"""Type function for a log path arg, allows 'none' to be specified to disable logging

Args:
log_path (str): log path string

Returns:
str | None: log path or None
Optional[str]: log path or None
"""
if log_path.lower() == "none":
return None
Expand Down Expand Up @@ -84,7 +83,7 @@ def dict_arg(str_input: str) -> dict:
class ModelArgHandler(Generic[TModelType]):
"""Class to handle loading json files into pydantic models"""

def __init__(self, model: Type[TModelType]) -> types.NoneType:
def __init__(self, model: Type[TModelType]) -> None:
self.model = model

def process_file_arg(self, file_path: str) -> TModelType:
Expand Down
7 changes: 3 additions & 4 deletions nodescraper/configbuilder.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@
###############################################################################
import enum
import logging
import types
from typing import Any, Optional, Type
from typing import Any, Optional, Type, Union

from pydantic import BaseModel

Expand Down Expand Up @@ -80,7 +79,7 @@ def _update_config(cls, config_key, type_data: TypeData, config: dict):
type_class_map = {
type_class.type_class: type_class for type_class in type_data.type_classes
}
if types.NoneType in type_class_map:
if type(None) in type_class_map:
return

model_arg = next(
Expand All @@ -102,7 +101,7 @@ def _update_config(cls, config_key, type_data: TypeData, config: dict):
config[config_key] = cls._process_value(type_data.default)

@classmethod
def _process_value(cls, value: Any) -> dict | str | int | float | list | None:
def _process_value(cls, value: Any) -> Optional[Union[dict, str, int, float, list]]:
if isinstance(value, enum.Enum):
return value.name

Expand Down
11 changes: 6 additions & 5 deletions nodescraper/connection/inband/inbandmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from __future__ import annotations

from logging import Logger
from typing import Optional, Union

from nodescraper.enums import (
EventCategory,
Expand All @@ -50,11 +51,11 @@ class InBandConnectionManager(ConnectionManager[InBandConnection, SSHConnectionP
def __init__(
self,
system_info: SystemInfo,
logger: Logger | None = None,
max_event_priority_level: EventPriority | str = EventPriority.CRITICAL,
parent: str | None = None,
task_result_hooks: list[TaskResultHook] | None = None,
connection_args: SSHConnectionParams | None = None,
logger: Optional[Logger] = None,
max_event_priority_level: Union[EventPriority, str] = EventPriority.CRITICAL,
parent: Optional[str] = None,
task_result_hooks: Optional[list[TaskResultHook]] = None,
connection_args: Optional[SSHConnectionParams] = None,
**kwargs,
):
super().__init__(
Expand Down
6 changes: 3 additions & 3 deletions nodescraper/connection/inband/inbandremote.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
###############################################################################
import os
import socket
from typing import Type
from typing import Type, Union

import paramiko
from paramiko.ssh_exception import (
Expand Down Expand Up @@ -99,14 +99,14 @@ def connect_ssh(self):
def read_file(
self,
filename: str,
encoding: str | None = "utf-8",
encoding: Union[str, None] = "utf-8",
strip: bool = True,
) -> BaseFileArtifact:
"""Read a remote file into a BaseFileArtifact.

Args:
filename (str): Path to file on remote host
encoding (str | None, optional): If None, file is read as binary. If str, decode using that encoding. Defaults to "utf-8".
encoding Optional[Union[str, None]]: If None, file is read as binary. If str, decode using that encoding. Defaults to "utf-8".
strip (bool): Strip whitespace for text files. Ignored for binary.

Returns:
Expand Down
3 changes: 2 additions & 1 deletion nodescraper/connection/inband/sshparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,10 @@ class SSHConnectionParams(BaseModel):
"""Class which holds info for an SSH connection"""

model_config = ConfigDict(arbitrary_types_allowed=True)

hostname: Union[IPvAnyAddress, str]
username: str
password: Optional[SecretStr] = None
pkey: Optional[PKey] = None
key_filename: Optional[str] = None
port: Annotated[int, Field(strict=True, gt=0, lt=65536)] = 22
port: Annotated[int, Field(strict=True, gt=0, le=65535)] = 22
10 changes: 5 additions & 5 deletions nodescraper/interfaces/connectionmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import logging
import types
from functools import wraps
from typing import Callable, Generic, Optional, TypeVar
from typing import Callable, Generic, Optional, TypeVar, Union

from pydantic import BaseModel

Expand Down Expand Up @@ -89,10 +89,10 @@ def __init__(
self,
system_info: SystemInfo,
logger: Optional[logging.Logger] = None,
max_event_priority_level: EventPriority | str = EventPriority.CRITICAL,
max_event_priority_level: Union[EventPriority, str] = EventPriority.CRITICAL,
parent: Optional[str] = None,
task_result_hooks: list[TaskResultHook] | types.NoneType = None,
connection_args: Optional[TConnectArg | dict] = None,
task_result_hooks: Optional[list[TaskResultHook], None] = None,
connection_args: Optional[Union[TConnectArg, dict]] = None,
**kwargs,
):
super().__init__(
Expand All @@ -113,7 +113,7 @@ def __init__(
connection_args = connection_arg_model(**connection_args)

self.connection_args = connection_args
self.connection: TConnection | None = None
self.connection: Optional[TConnection] = None

def __init_subclass__(cls, **kwargs) -> None:
super().__init_subclass__(**kwargs)
Expand Down
4 changes: 2 additions & 2 deletions nodescraper/interfaces/dataanalyzertask.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import abc
import inspect
from functools import wraps
from typing import Any, Callable, Generic, Optional, Type
from typing import Any, Callable, Generic, Optional, Type, Union

from pydantic import BaseModel, ValidationError

Expand All @@ -46,7 +46,7 @@ def analyze_decorator(func: Callable[..., TaskResult]) -> Callable[..., TaskResu
def wrapper(
analyzer: "DataAnalyzer",
data: DataModel,
args: Optional[TAnalyzeArg | dict] = None,
args: Optional[Union[TAnalyzeArg, dict]] = None,
) -> TaskResult:
analyzer.logger.info("Running data analyzer: %s", analyzer.__class__.__name__)
analyzer.result = analyzer._init_result()
Expand Down
16 changes: 9 additions & 7 deletions nodescraper/interfaces/datacollectortask.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import inspect
import logging
from functools import wraps
from typing import Callable, ClassVar, Generic, Optional, Type
from typing import Callable, ClassVar, Generic, Optional, Type, Union

from pydantic import BaseModel, ValidationError

Expand All @@ -48,12 +48,12 @@


def collect_decorator(
func: Callable[..., tuple[TaskResult, TDataModel | None]],
) -> Callable[..., tuple[TaskResult, TDataModel | None]]:
func: Callable[..., tuple[TaskResult, Optional[TDataModel]]],
) -> Callable[..., tuple[TaskResult, Optional[TDataModel]]]:
@wraps(func)
def wrapper(
collector: "DataCollector", args: Optional[TCollectArg] = None
) -> tuple[TaskResult, TDataModel | None]:
) -> tuple[TaskResult, Optional[TDataModel]]:
collector.logger.info("Running data collector: %s", collector.__class__.__name__)
collector.result = collector._init_result()
try:
Expand Down Expand Up @@ -122,8 +122,10 @@ def __init__(
system_info: SystemInfo,
connection: TConnection,
logger: Optional[logging.Logger] = None,
system_interaction_level: SystemInteractionLevel | str = SystemInteractionLevel.INTERACTIVE,
max_event_priority_level: EventPriority | str = EventPriority.CRITICAL,
system_interaction_level: Union[
SystemInteractionLevel, str
] = SystemInteractionLevel.INTERACTIVE,
max_event_priority_level: Union[EventPriority, str] = EventPriority.CRITICAL,
parent: Optional[str] = None,
task_result_hooks: Optional[list[TaskResultHook]] = None,
**kwargs,
Expand Down Expand Up @@ -175,7 +177,7 @@ def __init_subclass__(cls, **kwargs) -> None:
@abc.abstractmethod
def collect_data(
self, args: Optional[TCollectArg] = None
) -> tuple[TaskResult, TDataModel | None]:
) -> tuple[TaskResult, Optional[TDataModel]]:
"""Collect data from a target system

Returns:
Expand Down
Loading