diff --git a/dbc2val/dbcfeeder.py b/dbc2val/dbcfeeder.py index 966dba7d..ba7a9dbb 100755 --- a/dbc2val/dbcfeeder.py +++ b/dbc2val/dbcfeeder.py @@ -25,6 +25,7 @@ import argparse import configparser import enum +import errno import logging import os import queue @@ -34,8 +35,7 @@ import asyncio from signal import SIGINT, SIGTERM, signal -from typing import Any -from typing import Dict +from typing import Any, Dict from dbcfeederlib import canclient from dbcfeederlib import canplayer @@ -50,6 +50,21 @@ log = logging.getLogger("dbcfeeder") +CONFIG_SECTION_CAN = "can" +CONFIG_SECTION_ELMCAN = "elmcan" +CONFIG_SECTION_GENERAL = "general" + +CONFIG_OPTION_CAN_DUMP_FILE = "candumpfile" +CONFIG_OPTION_DBC_DEFAULT_FILE = "dbc_default_file" +CONFIG_OPTION_IP = "ip" +CONFIG_OPTION_J1939 = "j1939" +CONFIG_OPTION_MAPPING = "mapping" +CONFIG_OPTION_PORT = "port" +CONFIG_OPTION_ROOT_CA_PATH = "root_ca_path" +CONFIG_OPTION_TLS_ENABLED = "tls" +CONFIG_OPTION_TLS_SERVER_NAME = "tls_server_name" +CONFIG_OPTION_TOKEN = "token" + class ServerType(str, enum.Enum): """Enum class to indicate type of server dbcfeeder is connecting to""" @@ -131,16 +146,16 @@ def __init__(self, client_wrapper: clientwrapper.ClientWrapper, def start( self, canport, - dbcfile, + dbc_file_names, mappingfile, dbc_default_file, - candumpfile=None, + candumpfile, use_j1939=False, use_strict_parsing=False ): # Read DBC file - self._dbc_parser = dbcparser.DBCParser(dbcfile, use_strict_parsing) + self._dbc_parser = dbcparser.DBCParser(dbc_file_names, use_strict_parsing, use_j1939) log.info("Using mapping: {}".format(mappingfile)) self._mapper = dbc2vssmapper.Mapper(mappingfile, self._dbc_parser, dbc_default_file) @@ -201,7 +216,7 @@ def start( log.error("Subscribing to VSS signals not supported by chosen client!") self.stop() else: - log.info(f"Starting transmit thread, using {canport}") + log.info("Starting transmit thread, using %s", canport) # For now creating another bus # Maybe support different buses for downstream/upstream in the future @@ -315,12 +330,16 @@ async def vss_update(self, updates): for update in updates: if update.entry.value is not None: # This shall currently never happen as we do not subscribe to this - log.warning(f"Current value for {update.entry.path} is now: " - f"{update.entry.value.value} of type {type(update.entry.value.value)}") + log.warning( + "Current value for %s is now: %s of type %s", + update.entry.path, update.entry.value.value, type(update.entry.value.value) + ) if update.entry.actuator_target is not None: - log.debug(f"Target value for {update.entry.path} is now: {update.entry.actuator_target} " - f"of type {type(update.entry.actuator_target.value)}") + log.debug( + "Target value for %s is now: %s of type %s", + update.entry.path, update.entry.actuator_target, type(update.entry.actuator_target.value) + ) new_dbc_ids = self._mapper.handle_update(update.entry.path, update.entry.actuator_target.value) dbc_ids.update(new_dbc_ids) @@ -330,7 +349,7 @@ async def vss_update(self, updates): can_ids.add(can_id) for can_id in can_ids: - log.debug(f"CAN id to be sent, this is {can_id}") + log.debug("CAN id to be sent, this is %#x", can_id) sig_dict = self._mapper.get_value_dict(can_id) message_data = self._dbc_parser.db.get_message_by_frame_id(can_id) data = message_data.encode(sig_dict) @@ -353,13 +372,13 @@ def _run_transmitter(self): asyncio.run(self._run_subscribe()) -def parse_config(filename): +def _parse_config(filename: str) -> configparser.ConfigParser: configfile = None if filename: if not os.path.exists(filename): - log.warning("Couldn't find config file {}".format(filename)) - raise Exception("Couldn't find config file {}".format(filename)) + log.warning("Couldn't find config file %s", filename) + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), filename) configfile = filename else: config_candidates = [ @@ -372,35 +391,83 @@ def parse_config(filename): configfile = candidate break - log.info("Using config: {}".format(configfile)) - if configfile is None: - return {} - config = configparser.ConfigParser() - readed = config.read(configfile) - if log.level >= logging.DEBUG: - log.debug( - "# config.read({}):\n{}".format( - readed, - {section: dict(config[section]) for section in config.sections()}, - ) - ) + log.info("Reading configuration from file: %s", configfile) + if configfile: + readed = config.read(configfile) + if log.isEnabledFor(logging.DEBUG): + log.debug("using configuration (%s):\n%s", readed, config) return config -def main(argv): - """Main entrypoint for dbcfeeder""" +def _get_kuksa_val_client(command_line_parser: argparse.Namespace, + config: configparser.ConfigParser) -> clientwrapper.ClientWrapper: + + if command_line_parser.server_type: + server_type_name = command_line_parser.server_type + elif os.environ.get("SERVER_TYPE"): + server_type_name = os.environ.get("SERVER_TYPE") + else: + server_type_name = config.get(CONFIG_SECTION_GENERAL, "server_type", fallback=ServerType.KUKSA_VAL_SERVER.name) + + server_type = ServerType(server_type_name) + + # The wrappers contain default settings, so we only need to change settings + # if given by dbcfeeder configs/arguments/env-variables + if server_type is ServerType.KUKSA_VAL_SERVER: + client: clientwrapper.ClientWrapper = serverclientwrapper.ServerClientWrapper() + elif server_type is ServerType.KUKSA_DATABROKER: + client = databrokerclientwrapper.DatabrokerClientWrapper() + else: + raise ValueError(f"Unsupported server type: {server_type}") + + kuksa_ip = os.environ.get("KUKSA_ADDRESS") + if kuksa_ip is not None: + client.set_ip(kuksa_ip) + elif config.has_option(CONFIG_SECTION_GENERAL, CONFIG_OPTION_IP): + client.set_ip(config.get(CONFIG_SECTION_GENERAL, CONFIG_OPTION_IP)) + + kuksa_port = os.environ.get("KUKSA_PORT") + if kuksa_port is not None: + client.set_port(int(kuksa_port)) + elif config.has_option(CONFIG_SECTION_GENERAL, CONFIG_OPTION_PORT): + client.set_port(config.getint(CONFIG_SECTION_GENERAL, CONFIG_OPTION_PORT)) + + if config.has_option(CONFIG_SECTION_GENERAL, CONFIG_OPTION_TLS_ENABLED): + client.set_tls(config.getboolean(CONFIG_SECTION_GENERAL, CONFIG_OPTION_TLS_ENABLED, fallback=False)) + + if config.has_option(CONFIG_SECTION_GENERAL, CONFIG_OPTION_ROOT_CA_PATH): + path = config.get(CONFIG_SECTION_GENERAL, CONFIG_OPTION_ROOT_CA_PATH) + client.set_root_ca_path(path) + elif client.get_tls(): + # We do not want to rely on kuksa-client default + log.error("Root CA must be given when using TLS") + + if config.has_option(CONFIG_SECTION_GENERAL, CONFIG_OPTION_TLS_SERVER_NAME): + name = config.get(CONFIG_SECTION_GENERAL, CONFIG_OPTION_TLS_SERVER_NAME) + client.set_tls_server_name(name) + + if config.has_option(CONFIG_SECTION_GENERAL, CONFIG_OPTION_TOKEN): + token_path = config.get(CONFIG_SECTION_GENERAL, CONFIG_OPTION_TOKEN) + client.set_token_path(token_path) + else: + log.info("Path to token information not given") + + return client + + +def _get_command_line_args_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser(description="dbcfeeder") - parser.add_argument("--config", metavar="FILE", help="Configuration file") + parser.add_argument("--config", metavar="FILE", help="The file to read configuration properties from") parser.add_argument( - "--dbcfile", metavar="FILE", help="DBC file used for parsing CAN traffic" + "--dbcfile", metavar="FILE", help="A (comma sparated) list of DBC files to read message definitions from." ) parser.add_argument( "--dumpfile", metavar="FILE", help="Replay recorded CAN traffic from dumpfile" ) - parser.add_argument("--canport", metavar="DEVICE", help="Read from this CAN device") - parser.add_argument("--use-j1939", action="store_true", help="Use J1939") + parser.add_argument("--canport", metavar="DEVICE", help="The name of the device representing the CAN bus") + parser.add_argument("--use-j1939", action="store_true", help="Use j1939 messages on the CAN bus") parser.add_argument( "--use-socketcan", @@ -410,18 +477,17 @@ def main(argv): parser.add_argument( "--mapping", metavar="FILE", - help="Mapping file used to map CAN signals to VSS datapoints", + help="The file to read definitions for mapping CAN signals to VSS datapoints from", ) parser.add_argument( "--dbc-default", metavar="FILE", - help="File containing default values for DBC signals. Needed for all CAN signals used if using val2dbc", + help="A file containing default values for DBC signals. Needed for all CAN signals used if using val2dbc", ) parser.add_argument( "--server-type", - help="Which type of server the feeder should connect to", - choices=[server_type.value for server_type in ServerType], - type=ServerType, + help="The type of KUKSA.val server to write/read VSS signal to/from", + choices=[server_type.name for server_type in ServerType] ) parser.add_argument( "--lax-dbc-parsing", @@ -447,103 +513,84 @@ def main(argv): parser.add_argument('--no-val2dbc', action='store_true', help="Do not monitor mapped signals in KUKSA.val") + return parser + + +def main(argv): + """Main entrypoint for dbcfeeder""" + parser = _get_command_line_args_parser() args = parser.parse_args() + config = _parse_config(args.config) - config = parse_config(args.config) + if args.dbc2val: + use_dbc2val = True + elif args.no_dbc2val: + use_dbc2val = False + elif os.environ.get("USE_DBC2VAL"): + use_dbc2val = True + elif os.environ.get("NO_USE_DBC2VAL"): + use_dbc2val = False + else: + # By default enabled + use_dbc2val = config.getboolean(CONFIG_SECTION_GENERAL, "dbc2val", fallback=True) + log.info("DBC2VAL mode is: %s", use_dbc2val) - if args.server_type: - server_type = args.server_type - elif os.environ.get("SERVER_TYPE"): - server_type = ServerType(os.environ.get("SERVER_TYPE")) - elif "server_type" in config["general"]: - server_type = ServerType(config["general"]["server_type"]) + if args.val2dbc: + use_val2dbc = True + elif args.no_val2dbc: + use_val2dbc = False + elif os.environ.get("USE_VAL2DBC"): + use_val2dbc = True + elif os.environ.get("NO_USE_VAL2DBC"): + use_val2dbc = False else: - server_type = ServerType.KUKSA_VAL_SERVER + # By default disabled + use_val2dbc = config.getboolean(CONFIG_SECTION_GENERAL, "val2dbc", fallback=False) + log.info("VAL2DBC mode is: %s", use_val2dbc) - if server_type not in [ServerType.KUKSA_VAL_SERVER, ServerType.KUKSA_DATABROKER]: - raise ValueError(f"Unsupported server type: {server_type}") + if not (use_dbc2val or use_val2dbc): + parser.error("Either DBC2VAL or VAL2DBC must be enabled") - # The wrappers contain default settings, so we only need to change settings - # if given by dbcfeeder configs/arguments/env-variables - if server_type is ServerType.KUKSA_VAL_SERVER: - client_wrapper = serverclientwrapper.ServerClientWrapper() - elif server_type is ServerType.KUKSA_DATABROKER: - client_wrapper = databrokerclientwrapper.DatabrokerClientWrapper() - - if os.environ.get("KUKSA_ADDRESS"): - client_wrapper.set_ip(os.environ.get("KUKSA_ADDRESS")) - elif "ip" in config["general"]: - client_wrapper.set_ip(config["general"]["ip"]) - - if os.environ.get("KUKSA_PORT"): - client_wrapper.set_port(os.environ.get("KUKSA_PORT")) - elif "port" in config["general"]: - client_wrapper.set_port(config["general"]["port"]) - - if "tls" in config["general"]: - client_wrapper.set_tls(config["general"].getboolean("tls")) - - if "root_ca_path" in config["general"]: - path = config['general']['root_ca_path'] - log.info(f"Given root CA path: {path}") - client_wrapper.set_root_ca_path(path) - elif client_wrapper.get_tls(): - # We do not want to rely on kuksa-client default - log.error("Root CA must be given when using TLS") + if args.dbcfile: + dbcfile = args.dbcfile + elif os.environ.get("DBC_FILE"): + dbcfile = os.environ.get("DBC_FILE") + else: + dbcfile = config.get(CONFIG_SECTION_CAN, "dbcfile", fallback=None) + + if not dbcfile: + parser.error("No DBC file(s) specified") + + if args.canport: + canport = args.canport + elif os.environ.get("CAN_PORT"): + canport = os.environ.get("CAN_PORT") + else: + canport = config.get(CONFIG_SECTION_CAN, CONFIG_OPTION_PORT, fallback=None) - if "tls_server_name" in config["general"]: - name = config['general']['tls_server_name'] - log.info(f"Given TLS server name: {name}") - client_wrapper.set_tls_server_name(name) + if not canport: + parser.error("No CAN port specified") - if "token" in config["general"]: - log.info(f"Given token information: {config['general']['token']}") - client_wrapper.set_token_path(config["general"]["token"]) + if args.dbc_default: + dbc_default = args.dbc_default + elif os.environ.get("DBC_DEFAULT_FILE"): + dbc_default = os.environ.get("DBC_DEFAULT_FILE") else: - log.info("Token information not given") + dbc_default = config.get(CONFIG_SECTION_CAN, CONFIG_OPTION_DBC_DEFAULT_FILE, fallback="dbc_default_values.json") if args.mapping: mappingfile = args.mapping elif os.environ.get("MAPPING_FILE"): mappingfile = os.environ.get("MAPPING_FILE") - elif "general" in config and "mapping" in config["general"]: - mappingfile = config["general"]["mapping"] else: - mappingfile = "mapping/vss_4.0/vss_dbc.json" - - if args.canport: - canport = args.canport - elif os.environ.get("CAN_PORT"): - canport = os.environ.get("CAN_PORT") - elif "can" in config and "port" in config["can"]: - canport = config["can"]["port"] - else: - parser.print_help() - print("ERROR:\nNo CAN port specified") - return -1 + mappingfile = config.get(CONFIG_SECTION_GENERAL, CONFIG_OPTION_MAPPING, fallback="mapping/vss_4.0/vss_dbc.json") if args.use_j1939: use_j1939 = True elif os.environ.get("USE_J1939"): use_j1939 = True - elif "can" in config: - use_j1939 = config["can"].getboolean("j1939", False) else: - use_j1939 = False - - if args.dbcfile: - dbcfile = args.dbcfile - elif os.environ.get("DBC_FILE"): - dbcfile = os.environ.get("DBC_FILE") - elif "can" in config and "dbcfile" in config["can"]: - dbcfile = config["can"]["dbcfile"] - else: - dbcfile = None - - if not dbcfile and not use_j1939: - parser.print_help() - print("\nERROR:\nNeither DBC file nor the use of J1939 specified") - return -1 + use_j1939 = config.getboolean(CONFIG_SECTION_CAN, CONFIG_OPTION_J1939, fallback=False) candumpfile = None if not args.use_socketcan: @@ -551,73 +598,29 @@ def main(argv): candumpfile = args.dumpfile elif os.environ.get("CANDUMP_FILE"): candumpfile = os.environ.get("CANDUMP_FILE") - elif "can" in config and "candumpfile" in config["can"]: - candumpfile = config["can"]["candumpfile"] + else: + candumpfile = config.get(CONFIG_SECTION_CAN, CONFIG_OPTION_CAN_DUMP_FILE, fallback=None) if args.val2dbc and candumpfile is not None: - log.error("Cannot use dumpfile and val2dbc at the same time!") - sys.exit(-1) - - client_wrapper.get_client_specific_configs() + parser.error("Cannot use dumpfile and val2dbc at the same time!") elmcan_config = [] if canport == "elmcan": if candumpfile is not None: - log.error("It is a contradiction specifying both elmcan and candumpfile!") - sys.exit(-1) - if "elmcan" not in config: - log.error("Cannot use elmcan without elmcan config!") - sys.exit(-1) - elmcan_config = config["elmcan"] - - if args.dbc_default: - dbc_default = args.dbc_default - elif os.environ.get("DBC_DEFAULT_FILE"): - dbc_default = os.environ.get("DBC_DEFAULT_FILE") - elif "can" in config and "dbc_default_file" in config["can"]: - dbc_default = config["can"]["dbc_default_file"] - else: - dbc_default = "dbc_default_values.json" - - if args.dbc2val: - use_dbc2val = True - elif args.no_dbc2val: - use_dbc2val = False - elif os.environ.get("USE_DBC2VAL"): - use_dbc2val = True - elif os.environ.get("NO_USE_DBC2VAL"): - use_dbc2val = False - elif "general" in config and "dbc2val" in config["general"]: - use_dbc2val = config["general"].getboolean("dbc2val", False) - else: - # By default enabled - log.info("Alt5") - use_dbc2val = True - log.info(f"DBC2VAL mode is: {use_dbc2val}") - - if args.val2dbc: - use_val2dbc = True - elif args.no_val2dbc: - use_val2dbc = False - elif os.environ.get("USE_VAL2DBC"): - use_val2dbc = True - elif os.environ.get("NO_USE_VAL2DBC"): - use_val2dbc = False - elif "general" in config and "val2dbc" in config["general"]: - use_val2dbc = config["general"].getboolean("val2dbc", True) - else: - # By default disabled - use_val2dbc = False - log.info(f"VAL2DBC mode is: {use_val2dbc}") + parser.error("It is a contradiction specifying both elmcan and candumpfile!") + if not config.has_section(CONFIG_SECTION_ELMCAN): + parser.error("Cannot use elmcan without configuration in [elmcan] section!") + elmcan_config = config[CONFIG_SECTION_ELMCAN] - feeder = Feeder(client_wrapper, elmcan_config, dbc2val=use_dbc2val, val2dbc=use_val2dbc) + kuksa_val_client = _get_kuksa_val_client(args, config) + feeder = Feeder(kuksa_val_client, elmcan_config, dbc2val=use_dbc2val, val2dbc=use_val2dbc) def signal_handler(signal_received, *_): - log.info(f"Received signal {signal_received}, stopping...") + log.info("Received signal %s, stopping...", signal_received) # If we get told to shutdown a second time. Just do it. if feeder.is_stopping(): - log.warning("Shutdown now!") + log.warning("Shutting down now!") sys.exit(-1) feeder.stop() @@ -628,7 +631,7 @@ def signal_handler(signal_received, *_): log.info("Starting CAN feeder") feeder.start( canport=canport, - dbcfile=dbcfile, + dbc_file_names=dbcfile.split(','), mappingfile=mappingfile, dbc_default_file=dbc_default, candumpfile=candumpfile, diff --git a/dbc2val/dbcfeederlib/clientwrapper.py b/dbc2val/dbcfeederlib/clientwrapper.py index 28ff8410..8c48ae23 100644 --- a/dbc2val/dbcfeederlib/clientwrapper.py +++ b/dbc2val/dbcfeederlib/clientwrapper.py @@ -38,6 +38,15 @@ def __init__(self, ip: str, port: int, token_path: str, tls: bool = True): self._registered = False self._root_ca_path: Optional[str] = None self._tls_server_name: Optional[str] = None + self._do_init() + + def _do_init(self): + """ + Perform any implementation specific additional initialization. + + Called at the end of __init__. + This default implementation does nothing. + """ def set_ip(self, ip: str): """ Set IP address to use """ @@ -63,19 +72,18 @@ def get_tls(self) -> bool: def set_root_ca_path(self, path: str): """ Set Path for Root CA (CA.pem) """ self._root_ca_path = path + log.info("Using root CA path: %s", self._root_ca_path) def set_tls_server_name(self, name: str): """ Set Path for Root CA (CA.pem) """ self._tls_server_name = name + log.info("Using TLS server name: %s", self._tls_server_name) def set_token_path(self, token_path: str): self._token_path = token_path + log.info("Using token from: %s", self._token_path) # Abstract methods to implement - @abstractmethod - def get_client_specific_configs(self): - pass - @abstractmethod def start(self): pass diff --git a/dbc2val/dbcfeederlib/databrokerclientwrapper.py b/dbc2val/dbcfeederlib/databrokerclientwrapper.py index 35836e9d..12e937e7 100644 --- a/dbc2val/dbcfeederlib/databrokerclientwrapper.py +++ b/dbc2val/dbcfeederlib/databrokerclientwrapper.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 ################################################################################# -# Copyright (c) 2022 Contributors to the Eclipse Foundation +# Copyright (c) 2022, 2023 Contributors to the Eclipse Foundation # # See the NOTICE file(s) distributed with this work for additional # information regarding copyright ownership. @@ -48,19 +48,19 @@ def __init__(self, ip: str = "127.0.0.1", port: int = 55555, token_path: str = "", tls: bool = False): """ - Init Databroker client wrapper, by default (for now) without TLS + Init Databroker client wrapper, by default with TLS """ + super().__init__(ip, port, token_path, tls) self._grpc_client = None self._name_to_type: dict[str, DataType] = {} self._rpc_kwargs: Dict[str, str] = {} self._connected = False self._exit_stack = contextlib.ExitStack() - super().__init__(ip, port, token_path, tls) self._token = "" - def get_client_specific_configs(self): + def _do_init(self): """ - Get client specific configs and env variables + Set up gRPC metadata for interaction for Databroker. """ if os.environ.get("VEHICLEDATABROKER_DAPR_APP_ID"): diff --git a/dbc2val/dbcfeederlib/dbcparser.py b/dbc2val/dbcfeederlib/dbcparser.py index c9405af0..45acbdc7 100644 --- a/dbc2val/dbcfeederlib/dbcparser.py +++ b/dbc2val/dbcfeederlib/dbcparser.py @@ -21,49 +21,63 @@ import logging import sys import os -from typing import Set, Optional, Dict, cast, Tuple + import cantools.database +from types import MappingProxyType +from typing import cast, Dict, Optional, List, Set, Tuple + + log = logging.getLogger(__name__) class DBCParser: - def __init__(self, dbcfile: str, use_strict_parsing: bool = True): + + _dbc_file_encodings = MappingProxyType({ + 'dbc': 'cp1252', + 'sym': 'cp1252' + }) + + def __init__(self, + dbc_file_names: List[str], + use_strict_parsing: bool = True, + expect_extended_frame_ids: bool = False): first = True - found_names = set() - for name in dbcfile.split(","): - filename = name.strip() - if filename in found_names: - log.warning("The DBC file {} has already been read, ignoring it!".format(filename)) + processed_files: Set[str] = set() + for filename in [name.strip() for name in dbc_file_names]: + if filename in processed_files: + log.warning("DBC file %s has already been read, ignoring it!", filename) continue - found_names.add(filename) + processed_files.add(filename) if first: - log.info("Reading DBC file {} as first file".format(filename)) - db = cantools.database.load_file(filename, strict=use_strict_parsing) + # by default, do not mask any bits of standard (11-bit) frame IDs + mask = 0b11111111111 + if expect_extended_frame_ids: + # mask 3 priority bits of extended (29-bit) frame IDs + mask = 0b00011111111111111111111111111 + log.info("Reading definitions from DBC file %s", filename) + database = cantools.database.load_file(filename, strict=use_strict_parsing, frame_id_mask=mask) # load_file can return multiple types of databases, make sure we have CAN database - if isinstance(db, cantools.database.can.database.Database): - self.db = cast(cantools.database.can.database.Database, db) + if isinstance(database, cantools.database.can.database.Database): + self._db = cast(cantools.database.can.database.Database, database) first = False else: - log.error("File is not a CAN database, likely a diagnostics database") + log.error("File %s is not a CAN database, likely a diagnostics database", filename) sys.exit(-1) else: - log.info("Adding definitions from {}".format(filename)) + log.info("Adding definitions from DBC file %s", filename) self._add_db_file(filename) # Init some dictionaries to speed up search - self.signal_to_canid: Dict[str, Optional[int]] = {} - self.canid_to_signals: Dict[int, Set[str]] = {} + self._signal_to_canid: Dict[str, Optional[int]] = {} + self._canid_to_signals: Dict[int, Set[str]] = {} def _determine_db_format_and_encoding(self, filename) -> Tuple[str, str]: db_format = os.path.splitext(filename)[1][1:].lower() try: - encoding = { - 'dbc': 'cp1252', - 'sym': 'cp1252' - }[db_format] + encoding = DBCParser._dbc_file_encodings[db_format] except KeyError: encoding = 'utf-8' @@ -72,47 +86,47 @@ def _determine_db_format_and_encoding(self, filename) -> Tuple[str, str]: def _add_db_file(self, filename: str): db_format, encoding = self._determine_db_format_and_encoding(filename) if db_format == "arxml": - self.db.add_arxml_file(filename, encoding) + self._db.add_arxml_file(filename, encoding) elif db_format == "dbc": - self.db.add_dbc_file(filename, encoding) + self._db.add_dbc_file(filename, encoding) elif db_format == "kcd": - self.db.add_kcd_file(filename, encoding) + self._db.add_kcd_file(filename, encoding) elif db_format == "sym": - self.db.add_sym_file(filename, encoding) + self._db.add_sym_file(filename, encoding) else: log.warning("Cannot read CAN message definitions from file using unsupported format: %s", db_format) def get_canid_for_signal(self, sig_to_find: str) -> Optional[int]: - if sig_to_find in self.signal_to_canid: - return self.signal_to_canid[sig_to_find] + if sig_to_find in self._signal_to_canid: + return self._signal_to_canid[sig_to_find] - for msg in self.db.messages: + for msg in self._db.messages: for signal in msg.signals: if signal.name == sig_to_find: frame_id = msg.frame_id - log.info( - "Found signal in DBC file {} in CAN frame id 0x{:02x}".format( - signal.name, frame_id - ) - ) - self.signal_to_canid[sig_to_find] = frame_id + log.debug("Found signal %s in CAN message with frame ID %#x", signal.name, frame_id) + self._signal_to_canid[sig_to_find] = frame_id return frame_id - log.warning("Signal {} not found in DBC file".format(sig_to_find)) - self.signal_to_canid[sig_to_find] = None + log.warning("Signal %s not found in CAN message database", sig_to_find) + self._signal_to_canid[sig_to_find] = None return None def get_signals_for_canid(self, canid: int) -> Set[str]: - if canid in self.canid_to_signals: - return self.canid_to_signals[canid] - - for msg in self.db.messages: - if canid == msg.frame_id: - names = set() - for signal in msg.signals: - names.add(signal.name) - self.canid_to_signals[canid] = names - return names - log.warning(f"CAN id {canid} not found in DBC file") - self.canid_to_signals[canid] = set() - return set() + if canid in self._canid_to_signals: + return self._canid_to_signals[canid] + + names: Set[str] = set() + message = self.get_message_for_canid(canid) + if message is not None: + for signal in message.signals: + names.add(signal.name) + self._canid_to_signals[canid] = names + return names + + def get_message_for_canid(self, canid: int) -> Optional[cantools.database.Message]: + try: + return self._db.get_message_by_frame_id(canid) + except Exception: + log.debug("No DBC mapping registered for CAN frame id %#x", canid) + return None diff --git a/dbc2val/dbcfeederlib/dbcreader.py b/dbc2val/dbcfeederlib/dbcreader.py index 4cb38f89..a7028db7 100644 --- a/dbc2val/dbcfeederlib/dbcreader.py +++ b/dbc2val/dbcfeederlib/dbcreader.py @@ -58,12 +58,12 @@ def start_listening(self, *args, **kwargs): rx_thread.start() def get_whitelist(self): - log.info("Generating CAN ID whitelist") + log.debug("Generating CAN ID whitelist") white_list = [] - for entry in self.mapper.get_dbc2val_entries(): - canid = self.dbc_parser.get_canid_for_signal(entry) + for signal_name in self.mapper.get_dbc2val_entries(): + canid = self.dbc_parser.get_canid_for_signal(signal_name) if canid is not None and canid not in white_list: - log.info(f"Adding {entry} to white list, canid is {canid}") + log.debug("Adding CAN frame id %d of message containing signal %s to white list", canid, signal_name) white_list.append(canid) return white_list @@ -71,26 +71,31 @@ def rx_worker(self): log.info("Starting Rx thread") while self.run: msg = self.canclient.recv(timeout=1) - log.debug("processing message from CAN bus") if msg and msg.get_arbitration_id() in self.canidwl: + log.debug("processing message with frame ID %#x from CAN bus", msg.get_arbitration_id()) try: - decode = self.dbc_parser.db.decode_message(msg.get_arbitration_id(), msg.get_data()) - log.debug("Decoded message: %s", str(decode)) + message_def = self.dbc_parser.get_message_for_canid(msg.get_arbitration_id()) + if message_def is not None: + decode = message_def.decode(data=msg.get_data()) + else: + # no message definition found for frame ID + continue except Exception: - log.warning( - "Error Decoding: ID:{}".format(msg.get_arbitration_id()), - exc_info=True, - ) + log.warning("Error Decoding frame with ID: %#x", msg.get_arbitration_id(), exc_info=True) continue + + if log.isEnabledFor(logging.DEBUG): + log.debug("Decoded message: %s", str(decode)) + rx_time = time.time() for k, v in decode.items(): vss_mappings = self.mapper.get_dbc2val_mappings(k) for signal in vss_mappings: if signal.time_condition_fulfilled(rx_time): - log.debug(f"Queueing {signal.vss_name}, triggered by {k}, raw value {v} ") + log.debug("Queueing %s, triggered by %s, raw value %s ", signal.vss_name, k, v) self.queue.put(dbc2vssmapper.VSSObservation(k, signal.vss_name, v, rx_time)) else: - log.debug(f"Ignoring {signal.vss_name}, triggered by {k}, raw value {v} ") + log.debug("Ignoring %s, triggered by %s, raw value %s ", signal.vss_name, k, v) log.info("Stopped Rx thread") def stop(self): diff --git a/dbc2val/dbcfeederlib/serverclientwrapper.py b/dbc2val/dbcfeederlib/serverclientwrapper.py index 1219a760..7ef6a72e 100644 --- a/dbc2val/dbcfeederlib/serverclientwrapper.py +++ b/dbc2val/dbcfeederlib/serverclientwrapper.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 ################################################################################# -# Copyright (c) 2022 Contributors to the Eclipse Foundation +# Copyright (c) 2022, 2023 Contributors to the Eclipse Foundation # # See the NOTICE file(s) distributed with this work for additional # information regarding copyright ownership. @@ -42,11 +42,8 @@ def __init__(self, ip: str = "localhost", port: int = 8090, self._client_config["protocol"] = "ws" self._kuksa = None - def get_client_specific_configs(self): - """ - Get client specific configs and env variables - """ - log.debug("No additional configs for KUKSA.val server") + def _do_init(self): + log.debug("No additional initialization necessary for KUKSA.val server") def start(self): """ diff --git a/dbc2val/test/test_dbc/test_dbc.py b/dbc2val/test/test_dbc/test_dbc.py index 12028b0c..a350bda5 100644 --- a/dbc2val/test/test_dbc/test_dbc.py +++ b/dbc2val/test/test_dbc/test_dbc.py @@ -18,8 +18,11 @@ # SPDX-License-Identifier: Apache-2.0 ######################################################################## -from dbcfeederlib import dbcparser import os +import pytest + +from dbcfeederlib import dbcparser +from dbcfeeder import ServerType # read config only once test_path = os.path.dirname(os.path.abspath(__file__)) @@ -28,7 +31,7 @@ def test_default_dbc(): - parser3 = dbcparser.DBCParser(def_dbc) + parser3 = dbcparser.DBCParser([def_dbc]) assert parser3.get_canid_for_signal('SteeringAngle129') == 297 assert parser3.get_canid_for_signal('DI_uiSpeed') == 599 @@ -38,7 +41,7 @@ def test_splitted_dbc(): This verifies that we can read a splitted DBC File. Difference compared to default is that 'SteeringAngle129' has been moved to test1_2.dbc """ - parser3 = dbcparser.DBCParser(test_path + "/test1_1.dbc," + test_path + "/test1_2.dbc") + parser3 = dbcparser.DBCParser([test_path + "/test1_1.dbc", test_path + "/test1_2.dbc"]) assert parser3.get_canid_for_signal('SteeringAngle129') == 297 assert parser3.get_canid_for_signal('DI_uiSpeed') == 599 @@ -47,6 +50,6 @@ def test_duplicated_dbc(): """ Load original DBC multiple times """ - parser3 = dbcparser.DBCParser(def_dbc + ',' + def_dbc) + parser3 = dbcparser.DBCParser([def_dbc, def_dbc]) assert parser3.get_canid_for_signal('SteeringAngle129') == 297 assert parser3.get_canid_for_signal('DI_uiSpeed') == 599 diff --git a/dbc2val/test/test_dbc/test_kcd.py b/dbc2val/test/test_dbc/test_kcd.py index 73594769..b82aa242 100644 --- a/dbc2val/test/test_dbc/test_kcd.py +++ b/dbc2val/test/test_dbc/test_kcd.py @@ -27,7 +27,7 @@ def test_single_kcd(): - parser = dbcparser.DBCParser(test_path + "/test1_1.kcd") + parser = dbcparser.DBCParser([test_path + "/test1_1.kcd"]) assert parser.get_canid_for_signal('DI_bmsRequestInterfaceVersion') == 0x16 @@ -35,7 +35,7 @@ def test_split_kcd(): """ This verifies that we can read multiple KCD files. """ - parser = dbcparser.DBCParser(test_path + "/test1_1.kcd," + test_path + "/test1_2.kcd") + parser = dbcparser.DBCParser([test_path + "/test1_1.kcd", test_path + "/test1_2.kcd"]) assert parser.get_canid_for_signal('DI_bmsRequestInterfaceVersion') == 0x16 assert parser.get_canid_for_signal('SteeringAngle129') == 0x129 @@ -44,7 +44,7 @@ def test_mixed_file_types(): """ This verifies that we can read files of different type. """ - parser = dbcparser.DBCParser(test_path + "/test1_1.kcd," + test_path + "/test1_2.dbc") + parser = dbcparser.DBCParser([test_path + "/test1_1.kcd", test_path + "/test1_2.dbc"]) assert parser.get_canid_for_signal('DI_bmsRequestInterfaceVersion') == 0x16 assert parser.get_canid_for_signal('SteeringAngle129') == 0x129 @@ -53,5 +53,5 @@ def test_duplicated_dbc(): """ Load original KCD multiple times """ - parser = dbcparser.DBCParser(test_path + "/test1_1.kcd," + test_path + "/test1_1.kcd") + parser = dbcparser.DBCParser([test_path + "/test1_1.kcd", test_path + "/test1_1.kcd"]) assert parser.get_canid_for_signal('DI_bmsRequestInterfaceVersion') == 0x16 diff --git a/dbc2val/test/test_example_mapping/test_example_mapping.py b/dbc2val/test/test_example_mapping/test_example_mapping.py index f653f5dd..89349788 100644 --- a/dbc2val/test/test_example_mapping/test_example_mapping.py +++ b/dbc2val/test/test_example_mapping/test_example_mapping.py @@ -25,7 +25,7 @@ # read config only once test_path = os.path.dirname(os.path.abspath(__file__)) mapping_path = test_path + "/../../mapping/vss_4.0/vss_dbc.json" -parser = dbcparser.DBCParser(test_path + "/../../Model3CAN.dbc") +parser = dbcparser.DBCParser([test_path + "/../../Model3CAN.dbc"]) mapper: dbc2vssmapper.Mapper = dbc2vssmapper.Mapper(mapping_path, parser) diff --git a/dbc2val/test/test_mapping_error/test_mapping_error.py b/dbc2val/test/test_mapping_error/test_mapping_error.py index deff444d..3ae0fbf7 100644 --- a/dbc2val/test/test_mapping_error/test_mapping_error.py +++ b/dbc2val/test/test_mapping_error/test_mapping_error.py @@ -29,7 +29,7 @@ def test_unknown_transform(caplog, capsys): test_path = os.path.dirname(os.path.abspath(__file__)) mapping_path = test_path + "/test_unknown_transform.json" - parser = dbcparser.DBCParser(test_path + "/../../Model3CAN.dbc") + parser = dbcparser.DBCParser([test_path + "/../../Model3CAN.dbc"]) with pytest.raises(SystemExit) as excinfo: dbc2vssmapper.Mapper(mapping_path, parser) diff --git a/dbc2val/test/test_mapping_transform/test_mapping_transform.py b/dbc2val/test/test_mapping_transform/test_mapping_transform.py index c44a8c43..0f7bb6ae 100644 --- a/dbc2val/test/test_mapping_transform/test_mapping_transform.py +++ b/dbc2val/test/test_mapping_transform/test_mapping_transform.py @@ -30,7 +30,7 @@ test_path = os.path.dirname(os.path.abspath(__file__)) mapping_path = test_path + "/test.json" -parser = dbcparser.DBCParser(test_path + "/../../Model3CAN.dbc") +parser = dbcparser.DBCParser([test_path + "/../../Model3CAN.dbc"]) mapper: dbc2vssmapper.Mapper = dbc2vssmapper.Mapper(mapping_path, parser)