aboutsummaryrefslogtreecommitdiffstats
path: root/recipes-connectivity/kuksa-val/kuksa-dbc-feeder
diff options
context:
space:
mode:
Diffstat (limited to 'recipes-connectivity/kuksa-val/kuksa-dbc-feeder')
-rw-r--r--recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0001-dbc2val-add-installation-mechanism.patch236
-rw-r--r--recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0002-dbc2val-usability-improvements.patch211
-rw-r--r--recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0003-dbc2val-add-duplicate-filtering-option.patch61
-rw-r--r--recipes-connectivity/kuksa-val/kuksa-dbc-feeder/config.ini10
-rw-r--r--recipes-connectivity/kuksa-val/kuksa-dbc-feeder/mapping.yml2
5 files changed, 293 insertions, 227 deletions
diff --git a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0001-dbc2val-add-installation-mechanism.patch b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0001-dbc2val-add-installation-mechanism.patch
index 1e84eeb7e..4221df5c8 100644
--- a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0001-dbc2val-add-installation-mechanism.patch
+++ b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0001-dbc2val-add-installation-mechanism.patch
@@ -1,93 +1,202 @@
-From 603b5a584bd2b348befc9bab8f1ea0a7974f9d77 Mon Sep 17 00:00:00 2001
+From 22062d99a89922e02b626e4701feaac8947af5fb Mon Sep 17 00:00:00 2001
From: Scott Murray <scott.murray@konsulko.com>
-Date: Wed, 11 May 2022 15:31:25 -0400
-Subject: [PATCH] dbc2val: add installation mechanism
+Date: Fri, 7 Oct 2022 15:54:48 -0400
+Subject: [PATCH 1/2] dbc2val: add installation mechanism
Add setup.py and setup.cfg to allow installing the dbc2val module
and dbcfeeder.py in a way suitable for packaging. Some of the
imports in the scripts have been tweaked to enable running against
an installed copy of dbc2val.
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
+Upstream-Status: pending
+Signed-off-by: Scott Murray <scott.murray@konsulko.com>
---
- kuksa_feeders/dbc2val/__init__.py | 0
- kuksa_feeders/dbc2val/dbc2vssmapper.py | 12 ++++-----
- kuksa_feeders/dbc2val/dbcfeeder.py | 6 ++---
- kuksa_feeders/setup.cfg | 31 ++++++++++++++++++++++
- kuksa_feeders/setup.py | 36 ++++++++++++++++++++++++++
- 5 files changed, 75 insertions(+), 10 deletions(-)
- create mode 100644 kuksa_feeders/dbc2val/__init__.py
- create mode 100644 kuksa_feeders/setup.cfg
- create mode 100644 kuksa_feeders/setup.py
+ dbc2val/__init__.py | 0
+ dbc2val/databroker.py | 4 +--
+ dbc2val/dbc2vssmapper.py | 12 +++----
+ dbc2val/dbcfeeder.py | 8 ++---
+ dbc2val/gen_proto/__init__.py | 0
+ dbc2val/gen_proto/sdv/__init__.py | 0
+ dbc2val/gen_proto/sdv/databroker/__init__.py | 0
+ .../gen_proto/sdv/databroker/v1/__init__.py | 0
+ .../gen_proto/sdv/databroker/v1/broker_pb2.py | 2 +-
+ .../sdv/databroker/v1/broker_pb2_grpc.py | 2 +-
+ .../sdv/databroker/v1/collector_pb2.py | 2 +-
+ .../sdv/databroker/v1/collector_pb2_grpc.py | 2 +-
+ setup.cfg | 31 ++++++++++++++++
+ setup.py | 36 +++++++++++++++++++
+ 14 files changed, 82 insertions(+), 17 deletions(-)
+ create mode 100644 dbc2val/__init__.py
+ create mode 100644 dbc2val/gen_proto/__init__.py
+ create mode 100644 dbc2val/gen_proto/sdv/__init__.py
+ create mode 100644 dbc2val/gen_proto/sdv/databroker/__init__.py
+ create mode 100644 dbc2val/gen_proto/sdv/databroker/v1/__init__.py
+ create mode 100644 setup.cfg
+ create mode 100644 setup.py
-diff --git a/kuksa_feeders/dbc2val/__init__.py b/kuksa_feeders/dbc2val/__init__.py
+diff --git a/dbc2val/__init__.py b/dbc2val/__init__.py
new file mode 100644
index 0000000..e69de29
-diff --git a/kuksa_feeders/dbc2val/dbc2vssmapper.py b/kuksa_feeders/dbc2val/dbc2vssmapper.py
-index a43d1bd..1718154 100644
---- a/kuksa_feeders/dbc2val/dbc2vssmapper.py
-+++ b/kuksa_feeders/dbc2val/dbc2vssmapper.py
-@@ -11,8 +11,8 @@
- ########################################################################
+diff --git a/dbc2val/databroker.py b/dbc2val/databroker.py
+index c1a941a..ba52980 100644
+--- a/dbc2val/databroker.py
++++ b/dbc2val/databroker.py
+@@ -16,13 +16,13 @@
+ import logging
+
+ import grpc
+-from gen_proto.sdv.databroker.v1 import (
++from dbc2val.gen_proto.sdv.databroker.v1 import (
+ broker_pb2,
+ broker_pb2_grpc,
+ collector_pb2,
+ collector_pb2_grpc,
+ )
+-from gen_proto.sdv.databroker.v1 import types_pb2 as types # for export
++from dbc2val.gen_proto.sdv.databroker.v1 import types_pb2 as types # for export
+
+ log = logging.getLogger(__name__)
+
+diff --git a/dbc2val/dbc2vssmapper.py b/dbc2val/dbc2vssmapper.py
+index 05b78f1..d76c068 100644
+--- a/dbc2val/dbc2vssmapper.py
++++ b/dbc2val/dbc2vssmapper.py
+@@ -20,8 +20,8 @@
+
import yaml
-import transforms.mapping
-import transforms.math
+from dbc2val.transforms import mapping
+from dbc2val.transforms import math
+ import logging
-
- class mapper:
-@@ -22,9 +22,9 @@ class mapper:
+ log = logging.getLogger(__name__)
+@@ -33,13 +33,13 @@ class mapper:
self.mapping = yaml.full_load(file)
- self.transforms={}
-- self.transforms['fullmapping']=transforms.mapping.mapping(discard_non_matching_items=True)
-- self.transforms['partialmapping']=transforms.mapping.mapping(discard_non_matching_items=False)
-- self.transforms['math']=transforms.math.math()
-+ self.transforms['fullmapping']=mapping.mapping(discard_non_matching_items=True)
-+ self.transforms['partialmapping']=mapping.mapping(discard_non_matching_items=False)
-+ self.transforms['math']=math.math()
-
-
+ self.transforms = {}
+- self.transforms["fullmapping"] = transforms.mapping.mapping(
++ self.transforms["fullmapping"] = mapping.mapping(
+ discard_non_matching_items=True
+ )
+- self.transforms["partialmapping"] = transforms.mapping.mapping(
++ self.transforms["partialmapping"] = mapping.mapping(
+ discard_non_matching_items=False
+ )
+- self.transforms["math"] = transforms.math.math()
++ self.transforms["math"] = math.math()
-@@ -62,4 +62,4 @@ class mapper:
+ for key in self.mapping.keys():
+ self.mapping[key]["lastupdate"] = 0.0
+@@ -93,4 +93,4 @@ class mapper:
def __getitem__(self, item):
return self.mapping[item]
-
\ No newline at end of file
+
-diff --git a/kuksa_feeders/dbc2val/dbcfeeder.py b/kuksa_feeders/dbc2val/dbcfeeder.py
-index 71939c6..56c316a 100755
---- a/kuksa_feeders/dbc2val/dbcfeeder.py
-+++ b/kuksa_feeders/dbc2val/dbcfeeder.py
-@@ -16,10 +16,7 @@ import configparser
- import queue
- import json
+diff --git a/dbc2val/dbcfeeder.py b/dbc2val/dbcfeeder.py
+index 5267369..7daaa2b 100755
+--- a/dbc2val/dbcfeeder.py
++++ b/dbc2val/dbcfeeder.py
+@@ -28,16 +28,13 @@ import sys
+ import time
+ from signal import SIGINT, SIGTERM, signal
+-import canplayer
-import dbc2vssmapper
-import dbcreader
+ import grpc
-import j1939reader
--import elm2canbridge
-+from dbc2val import dbc2vssmapper, dbcreader, j1939reader, elm2canbridge
-
- scriptDir= os.path.dirname(os.path.realpath(__file__))
- sys.path.append(os.path.join(scriptDir, "../../"))
-@@ -27,6 +24,7 @@ from kuksa_viss_client import KuksaClientThread
-
- print("kuksa.val DBC example feeder")
- config_candidates=['/config/dbc_feeder.ini', '/etc/dbc_feeder.ini', os.path.join(scriptDir, 'config/dbc_feeder.ini')]
-+configfile = None
- for candidate in config_candidates:
- if os.path.isfile(candidate):
- configfile=candidate
-diff --git a/kuksa_feeders/setup.cfg b/kuksa_feeders/setup.cfg
++from dbc2val import dbc2vssmapper, dbcreader, j1939reader, canplayer
+
+ # kuksa related
+ from kuksa_viss_client import KuksaClientThread
+ # databroker related
+-import databroker
++from dbc2val import databroker
+
+ # global variable for usecase, default databroker
+ USE_CASE = ""
+@@ -289,6 +286,7 @@ def parse_config(filename):
+ "/etc/dbc_feeder.ini",
+ "config/dbc_feeder.ini",
+ ]
++ configfile = None
+ for candidate in config_candidates:
+ if os.path.isfile(candidate):
+ configfile = candidate
+diff --git a/dbc2val/gen_proto/__init__.py b/dbc2val/gen_proto/__init__.py
+new file mode 100644
+index 0000000..e69de29
+diff --git a/dbc2val/gen_proto/sdv/__init__.py b/dbc2val/gen_proto/sdv/__init__.py
+new file mode 100644
+index 0000000..e69de29
+diff --git a/dbc2val/gen_proto/sdv/databroker/__init__.py b/dbc2val/gen_proto/sdv/databroker/__init__.py
+new file mode 100644
+index 0000000..e69de29
+diff --git a/dbc2val/gen_proto/sdv/databroker/v1/__init__.py b/dbc2val/gen_proto/sdv/databroker/v1/__init__.py
new file mode 100644
-index 0000000..4b69ccf
+index 0000000..e69de29
+diff --git a/dbc2val/gen_proto/sdv/databroker/v1/broker_pb2.py b/dbc2val/gen_proto/sdv/databroker/v1/broker_pb2.py
+index 98dc94a..6deacfb 100644
+--- a/dbc2val/gen_proto/sdv/databroker/v1/broker_pb2.py
++++ b/dbc2val/gen_proto/sdv/databroker/v1/broker_pb2.py
+@@ -12,7 +12,7 @@ from google.protobuf import symbol_database as _symbol_database
+ _sym_db = _symbol_database.Default()
+
+
+-from gen_proto.sdv.databroker.v1 import types_pb2 as sdv_dot_databroker_dot_v1_dot_types__pb2
++from dbc2val.gen_proto.sdv.databroker.v1 import types_pb2 as sdv_dot_databroker_dot_v1_dot_types__pb2
+
+
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1esdv/databroker/v1/broker.proto\x12\x11sdv.databroker.v1\x1a\x1dsdv/databroker/v1/types.proto\"*\n\x14GetDatapointsRequest\x12\x12\n\ndatapoints\x18\x01 \x03(\t\"\xb0\x01\n\x12GetDatapointsReply\x12I\n\ndatapoints\x18\x01 \x03(\x0b\x32\x35.sdv.databroker.v1.GetDatapointsReply.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01\"!\n\x10SubscribeRequest\x12\r\n\x05query\x18\x02 \x01(\t\"\x9c\x01\n\x0eSubscribeReply\x12=\n\x06\x66ields\x18\x01 \x03(\x0b\x32-.sdv.databroker.v1.SubscribeReply.FieldsEntry\x1aK\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01\"#\n\x12GetMetadataRequest\x12\r\n\x05names\x18\x01 \x03(\t\"=\n\x10GetMetadataReply\x12)\n\x04list\x18\x01 \x03(\x0b\x32\x1b.sdv.databroker.v1.Metadata2\x9b\x02\n\x06\x42roker\x12_\n\rGetDatapoints\x12\'.sdv.databroker.v1.GetDatapointsRequest\x1a%.sdv.databroker.v1.GetDatapointsReply\x12U\n\tSubscribe\x12#.sdv.databroker.v1.SubscribeRequest\x1a!.sdv.databroker.v1.SubscribeReply0\x01\x12Y\n\x0bGetMetadata\x12%.sdv.databroker.v1.GetMetadataRequest\x1a#.sdv.databroker.v1.GetMetadataReplyb\x06proto3')
+diff --git a/dbc2val/gen_proto/sdv/databroker/v1/broker_pb2_grpc.py b/dbc2val/gen_proto/sdv/databroker/v1/broker_pb2_grpc.py
+index 5c87db1..bc6be8b 100644
+--- a/dbc2val/gen_proto/sdv/databroker/v1/broker_pb2_grpc.py
++++ b/dbc2val/gen_proto/sdv/databroker/v1/broker_pb2_grpc.py
+@@ -2,7 +2,7 @@
+ """Client and server classes corresponding to protobuf-defined services."""
+ import grpc
+
+-from gen_proto.sdv.databroker.v1 import broker_pb2 as sdv_dot_databroker_dot_v1_dot_broker__pb2
++from dbc2val.gen_proto.sdv.databroker.v1 import broker_pb2 as sdv_dot_databroker_dot_v1_dot_broker__pb2
+
+
+ class BrokerStub(object):
+diff --git a/dbc2val/gen_proto/sdv/databroker/v1/collector_pb2.py b/dbc2val/gen_proto/sdv/databroker/v1/collector_pb2.py
+index 25d69dd..ea15409 100644
+--- a/dbc2val/gen_proto/sdv/databroker/v1/collector_pb2.py
++++ b/dbc2val/gen_proto/sdv/databroker/v1/collector_pb2.py
+@@ -12,7 +12,7 @@ from google.protobuf import symbol_database as _symbol_database
+ _sym_db = _symbol_database.Default()
+
+
+-from gen_proto.sdv.databroker.v1 import types_pb2 as sdv_dot_databroker_dot_v1_dot_types__pb2
++from dbc2val.gen_proto.sdv.databroker.v1 import types_pb2 as sdv_dot_databroker_dot_v1_dot_types__pb2
+
+
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!sdv/databroker/v1/collector.proto\x12\x11sdv.databroker.v1\x1a\x1dsdv/databroker/v1/types.proto\"\xba\x01\n\x17UpdateDatapointsRequest\x12N\n\ndatapoints\x18\x01 \x03(\x0b\x32:.sdv.databroker.v1.UpdateDatapointsRequest.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01\"\xaf\x01\n\x15UpdateDatapointsReply\x12\x44\n\x06\x65rrors\x18\x01 \x03(\x0b\x32\x34.sdv.databroker.v1.UpdateDatapointsReply.ErrorsEntry\x1aP\n\x0b\x45rrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0e\x32!.sdv.databroker.v1.DatapointError:\x02\x38\x01\"\xba\x01\n\x17StreamDatapointsRequest\x12N\n\ndatapoints\x18\x01 \x03(\x0b\x32:.sdv.databroker.v1.StreamDatapointsRequest.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01\"\xaf\x01\n\x15StreamDatapointsReply\x12\x44\n\x06\x65rrors\x18\x01 \x03(\x0b\x32\x34.sdv.databroker.v1.StreamDatapointsReply.ErrorsEntry\x1aP\n\x0b\x45rrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0e\x32!.sdv.databroker.v1.DatapointError:\x02\x38\x01\"R\n\x19RegisterDatapointsRequest\x12\x35\n\x04list\x18\x01 \x03(\x0b\x32\'.sdv.databroker.v1.RegistrationMetadata\"\x9d\x01\n\x14RegistrationMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\tdata_type\x18\x02 \x01(\x0e\x32\x1b.sdv.databroker.v1.DataType\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x32\n\x0b\x63hange_type\x18\x04 \x01(\x0e\x32\x1d.sdv.databroker.v1.ChangeType\"\x93\x01\n\x17RegisterDatapointsReply\x12H\n\x07results\x18\x01 \x03(\x0b\x32\x37.sdv.databroker.v1.RegisterDatapointsReply.ResultsEntry\x1a.\n\x0cResultsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x32\xd3\x02\n\tCollector\x12n\n\x12RegisterDatapoints\x12,.sdv.databroker.v1.RegisterDatapointsRequest\x1a*.sdv.databroker.v1.RegisterDatapointsReply\x12h\n\x10UpdateDatapoints\x12*.sdv.databroker.v1.UpdateDatapointsRequest\x1a(.sdv.databroker.v1.UpdateDatapointsReply\x12l\n\x10StreamDatapoints\x12*.sdv.databroker.v1.StreamDatapointsRequest\x1a(.sdv.databroker.v1.StreamDatapointsReply(\x01\x30\x01\x62\x06proto3')
+diff --git a/dbc2val/gen_proto/sdv/databroker/v1/collector_pb2_grpc.py b/dbc2val/gen_proto/sdv/databroker/v1/collector_pb2_grpc.py
+index 419bfde..2dee751 100644
+--- a/dbc2val/gen_proto/sdv/databroker/v1/collector_pb2_grpc.py
++++ b/dbc2val/gen_proto/sdv/databroker/v1/collector_pb2_grpc.py
+@@ -2,7 +2,7 @@
+ """Client and server classes corresponding to protobuf-defined services."""
+ import grpc
+
+-from gen_proto.sdv.databroker.v1 import collector_pb2 as sdv_dot_databroker_dot_v1_dot_collector__pb2
++from dbc2val.gen_proto.sdv.databroker.v1 import collector_pb2 as sdv_dot_databroker_dot_v1_dot_collector__pb2
+
+
+ class CollectorStub(object):
+diff --git a/setup.cfg b/setup.cfg
+new file mode 100644
+index 0000000..70aab60
--- /dev/null
-+++ b/kuksa_feeders/setup.cfg
++++ b/setup.cfg
@@ -0,0 +1,31 @@
+[metadata]
+name = dbc2val
@@ -105,11 +214,11 @@ index 0000000..4b69ccf
+ Development Status :: 3 - Alpha
+ Environment :: Console
+ Programming Language :: Python :: 3
-+ License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)
++ License :: OSI Approved :: Apache Software License
+ Operating System :: OS Independent
+ Topic :: Software Development
+
-+license_file = ../LICENSE
++license_file = LICENSE.txt
+
+[options]
+python_requires = >=3.6
@@ -120,11 +229,11 @@ index 0000000..4b69ccf
+packages=find:
+include_package_data = True
+scripts=dbc2val/dbcfeeder.py
-diff --git a/kuksa_feeders/setup.py b/kuksa_feeders/setup.py
+diff --git a/setup.py b/setup.py
new file mode 100644
-index 0000000..ad08d17
+index 0000000..55951cc
--- /dev/null
-+++ b/kuksa_feeders/setup.py
++++ b/setup.py
@@ -0,0 +1,36 @@
+# To avoid shipping dbcfeeder.py in the module itself, use the
+# technique outlined at:
@@ -154,7 +263,7 @@ index 0000000..ad08d17
+ "template": "{tag}",
+ "dev_template": "{tag}-{ccount}",
+ "dirty_template": "{tag}-{ccount}-dirty",
-+ "starting_version": "0.1.11",
++ "starting_version": "0.1.0",
+ "version_callback": None,
+ "version_file": None,
+ "count_commits_from_version_file": False
@@ -162,3 +271,6 @@ index 0000000..ad08d17
+ setup_requires=['setuptools-git-versioning'],
+ cmdclass={'build_py': build_py},
+)
+--
+2.37.3
+
diff --git a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0002-dbc2val-usability-improvements.patch b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0002-dbc2val-usability-improvements.patch
index 12e366aa8..be88fa0ee 100644
--- a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0002-dbc2val-usability-improvements.patch
+++ b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0002-dbc2val-usability-improvements.patch
@@ -1,120 +1,129 @@
-From fe10a3645e77cd8122d3d312d317bedcb88bc683 Mon Sep 17 00:00:00 2001
+From d0730c0d643738ac683d1b5c19b117893e3b2049 Mon Sep 17 00:00:00 2001
From: Scott Murray <scott.murray@konsulko.com>
-Date: Thu, 12 May 2022 17:39:56 +0200
-Subject: [PATCH] dbc2val: usability improvements
+Date: Tue, 11 Oct 2022 15:28:07 -0400
+Subject: [PATCH 2/2] dbc2val: usability improvements
Changes:
- Tweaked default configuration file search path to better match
Linux FHS and kuksa-val-server. First look for a config.ini in
/etc/kuksa-dbc-feeder, then /etc/dbc_feeder.ini.
-- Added a command-line option to specify configuration file, this
- should allow running two instances against different interfaces.
-- Added verbosity command-line option and made several messages
- verbose mode only to avoid log spamming.
-- Added '-u' option to python invocation to disable output buffering.
- The intent is to make logging immediate, otherwise errors may not
- get logged for some time (or at all).
- Add catching of exceptions around CAN device opening so that the
script can exit cleanly with an error message if the device is
not available.
+- Fixed shutdown behavior with some tweaks to actually stop the
+ reader and KUKSA.val client library threads. This makes the
+ script actually exit on SIGTERM as opposed to hanging.
Upstream-Status: pending
+
Signed-off-by: Scott Murray <scott.murray@konsulko.com>
---
- kuksa_feeders/dbc2val/dbcfeeder.py | 40 ++++++++++++++++++++++--------
- 1 file changed, 29 insertions(+), 11 deletions(-)
+ dbc2val/dbcfeeder.py | 21 +++++++++++++++------
+ dbc2val/dbcreader.py | 12 ++++++++----
+ 2 files changed, 23 insertions(+), 10 deletions(-)
-diff --git a/kuksa_feeders/dbc2val/dbcfeeder.py b/kuksa_feeders/dbc2val/dbcfeeder.py
-index 56c316a..d2d70b9 100755
---- a/kuksa_feeders/dbc2val/dbcfeeder.py
-+++ b/kuksa_feeders/dbc2val/dbcfeeder.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env -S python -u
-
- ########################################################################
- # Copyright (c) 2020 Robert Bosch GmbH
-@@ -15,15 +15,21 @@ import os, sys, signal
- import configparser
- import queue
- import json
-+import argparse
-
- from dbc2val import dbc2vssmapper, dbcreader, j1939reader, elm2canbridge
--
--scriptDir= os.path.dirname(os.path.realpath(__file__))
--sys.path.append(os.path.join(scriptDir, "../../"))
- from kuksa_viss_client import KuksaClientThread
-
--print("kuksa.val DBC example feeder")
--config_candidates=['/config/dbc_feeder.ini', '/etc/dbc_feeder.ini', os.path.join(scriptDir, 'config/dbc_feeder.ini')]
-+parser = argparse.ArgumentParser("kuksa.val DBC example feeder")
-+parser.add_argument("-c", "--config", dest="userconfig")
-+parser.add_argument("-v", "--verbose", action="store_true")
-+args = parser.parse_args()
-+
-+if args.verbose:
-+ print("kuksa.val DBC example feeder")
-+config_candidates=['/etc/kuksa-dbc-feeder/config.ini', '/etc/dbc_feeder.ini']
-+if args.userconfig is not None:
-+ config_candidates.insert(0, args.userconfig)
- configfile = None
- for candidate in config_candidates:
- if os.path.isfile(candidate):
-@@ -54,10 +60,12 @@ cancfg = config['can']
- canport = cancfg['port']
+diff --git a/dbc2val/dbcfeeder.py b/dbc2val/dbcfeeder.py
+index 7daaa2b..33c2212 100755
+--- a/dbc2val/dbcfeeder.py
++++ b/dbc2val/dbcfeeder.py
+@@ -93,6 +93,7 @@ class Feeder:
+ self._connected = False
+ self._registered = False
+ self._can_queue = queue.Queue()
++ self.kuksa = None
- if config["can"].getboolean("j1939", False):
-- print("Use j1939 reader")
-+ if args.verbose:
-+ print("Use j1939 reader")
- reader = j1939reader.J1939Reader(cancfg,canQueue,mapping)
- else:
-- print("Use dbc reader")
-+ if args.verbose:
-+ print("Use dbc reader")
- reader = dbcreader.DBCReader(cancfg, canQueue,mapping)
+ def start(
+ self,
+@@ -134,7 +135,11 @@ class Feeder:
+ else:
+ # use socketCAN
+ log.info("Using socket CAN device '%s'", canport)
+- self._reader.start_listening(bustype="socketcan", channel=canport)
++ try:
++ self._reader.start_listening(bustype="socketcan", channel=canport)
++ except:
++ log.error("Could not open {}, exiting".format(canport))
++ sys.exit(-1)
+
+ # databroker related
+ if USE_CASE=="databroker":
+@@ -151,6 +156,8 @@ class Feeder:
+ log.info("Shutting down...")
+ self._shutdown = True
+ # Tell others to stop
++ if USE_CASE=="kuksa" and self.kuksa is not None:
++ self.kuksa.stop()
+ if self._reader is not None:
+ self._reader.stop()
+ if self._player is not None:
+@@ -204,9 +211,10 @@ class Feeder:
+ # kuksa related
+ if USE_CASE=="kuksa":
+ global kuksaconfig
+- kuksa = KuksaClientThread(kuksaconfig)
+- kuksa.start()
+- kuksa.authorize()
++ self.kuksa = KuksaClientThread(kuksaconfig)
++ self.kuksa.start()
++ if "token" in kuksaconfig:
++ self.kuksa.authorize(kuksaconfig["token"])
+
+ while self._shutdown is False:
+ # databroker related
+@@ -255,7 +263,7 @@ class Feeder:
+ self._provider.update_datapoint(target, value)
+ # kuksa related
+ elif USE_CASE=="kuksa":
+- resp=json.loads(kuksa.setValue(target, str(value)))
++ resp=json.loads(self.kuksa.setValue(target, str(value)))
+ if "error" in resp:
+ if "message" in resp["error"]:
+ log.error("Error setting {}: {}".format(target, resp["error"]["message"]))
+@@ -282,8 +290,9 @@ def parse_config(filename):
+ configfile = filename
+ else:
+ config_candidates = [
+- "/config/dbc_feeder.ini",
++ "/etc/kuksa-dbc-feeder/config.ini",
+ "/etc/dbc_feeder.ini",
++ "/config/dbc_feeder.ini",
+ "config/dbc_feeder.ini",
+ ]
+ configfile = None
+diff --git a/dbc2val/dbcreader.py b/dbc2val/dbcreader.py
+index 2500832..b537a4d 100644
+--- a/dbc2val/dbcreader.py
++++ b/dbc2val/dbcreader.py
+@@ -54,8 +54,8 @@ class DBCReader:
+ Bitrate in bit/s.
+ """
+ self.bus = can.interface.Bus(*args, **kwargs)
+- rxThread = threading.Thread(target=self.rxWorker)
+- rxThread.start()
++ self.rxThread = threading.Thread(target=self.rxWorker)
++ self.rxThread.start()
- if canport == 'elmcan':
-@@ -65,10 +73,18 @@ if canport == 'elmcan':
- print("section {} missing from configuration, exiting".format(canport))
- sys.exit(-1)
+ def get_whitelist(self):
+ log.info("Collecting signals, generating CAN ID whitelist")
+@@ -83,7 +83,10 @@ class DBCReader:
+ def rxWorker(self):
+ log.info("Starting Rx thread")
+ while self.run:
+- msg = self.bus.recv(timeout=1)
++ try:
++ msg = self.bus.recv(timeout=1)
++ except Exception:
++ break
+ if msg and msg.arbitration_id in self.canidwl:
+ try:
+ decode = self.db.decode_message(msg.arbitration_id, msg.data)
+@@ -105,4 +108,5 @@ class DBCReader:
-- print("Using elmcan. Trying to set up elm2can bridge")
-+ if args.verbose:
-+ print("Using elmcan. Trying to set up elm2can bridge")
- elmbr=elm2canbridge.elm2canbridge(canport, config[canport], reader.canidwl)
-
--reader.start_listening()
-+try:
-+ reader.start_listening()
-+except:
-+ print("Could not open {}, exiting".format(canport))
-+ kuksa.stop()
-+ reader.stop()
-+ sys.exit(-1)
-+
- running = True
-
- def terminationSignalreceived(signalNumber, frame):
-@@ -77,6 +93,7 @@ def terminationSignalreceived(signalNumber, frame):
- kuksa.stop()
- reader.stop()
- print("Received termination signal. Shutting down")
-+ sys.exit(0)
-
- signal.signal(signal.SIGINT, terminationSignalreceived)
- signal.signal(signal.SIGQUIT, terminationSignalreceived)
-@@ -88,7 +105,8 @@ while running:
- for target in mapping[signal]['targets']:
- tv=mapping.transform(signal,target,value)
- if tv is not None: #none indicates the transform decided to not set the value
-- print("Update VSS path {} to {} based on signal {}".format(target, tv, signal))
-+ if args.verbose:
-+ print("Update VSS path {} to {} based on signal {}".format(target, tv, signal))
- resp=json.loads(kuksa.setValue(target, str(tv)))
- if "error" in resp:
- if "message" in resp["error"]:
+ def stop(self):
+ self.run = False
+-
++ self.bus.shutdown()
++ self.rxThread.join()
--
-2.35.1
+2.37.3
diff --git a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0003-dbc2val-add-duplicate-filtering-option.patch b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0003-dbc2val-add-duplicate-filtering-option.patch
deleted file mode 100644
index 1709ac1e4..000000000
--- a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/0003-dbc2val-add-duplicate-filtering-option.patch
+++ /dev/null
@@ -1,61 +0,0 @@
-From a22d972bc497ab46d99c1d118bd40b9471fef3a7 Mon Sep 17 00:00:00 2001
-From: Scott Murray <scott.murray@konsulko.com>
-Date: Mon, 13 Jun 2022 12:54:54 -0400
-Subject: [PATCH] dbc2val: add duplicate filtering option
-
-To avoid generating a lot of duplicated signal events from the
-CAN messages generated by LIN polling, add a per-target
-"filter-duplicates" option that can be used for signals where
-only changes should be pushed toi the VIS server. This is
-required with the current performance of the DBC feeder to avoid
-ending up with an increasing backlog of signal commands to the
-server. This will be investigated with upstream.
-
-Upstream-Status: pending
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
----
- kuksa_feeders/dbc2val/dbc2vssmapper.py | 27 +++++++++++++++++++-------
- 1 file changed, 20 insertions(+), 7 deletions(-)
-
-diff --git a/kuksa_feeders/dbc2val/dbc2vssmapper.py b/kuksa_feeders/dbc2val/dbc2vssmapper.py
-index 1718154..2feb572 100644
---- a/kuksa_feeders/dbc2val/dbc2vssmapper.py
-+++ b/kuksa_feeders/dbc2val/dbc2vssmapper.py
-@@ -48,14 +48,27 @@ class mapper:
- # Check whether there are transforms defined to map DBC signal "signal" to
- # VSS path "target". Returns the (potentially) transformed values
- def transform(self,signal, target, value):
-- if "transform" not in self.mapping[signal]["targets"][target].keys(): #no transform defined, return as is
-- return value
-- for transform in self.mapping[signal]["targets"][target]["transform"]:
-- if transform in self.transforms.keys(): #found a known transform and apply
-- value=self.transforms[transform].transform(self.mapping[signal]["targets"][target]["transform"][transform],value)
-+ result = value
-+ if "transform" in self.mapping[signal]["targets"][target].keys():
-+ for transform in self.mapping[signal]["targets"][target]["transform"]:
-+ if transform in self.transforms.keys(): #found a known transform and apply
-+ result = self.transforms[transform].transform(self.mapping[signal]["targets"][target]["transform"][transform],value)
-+ else:
-+ print(f"Warning: Unknown transform {transform} for {signal}->{target}")
-+ # else no transform defined, return as is
-+
-+ if (("filter-duplicates" in self.mapping[signal]["targets"][target]) and
-+ self.mapping[signal]["targets"][target]["filter-duplicates"] == "true"):
-+ if "last" in self.mapping[signal]["targets"][target]:
-+ if self.mapping[signal]["targets"][target]["last"] == result:
-+ # ignore duplicate value
-+ result = None
-+ else:
-+ self.mapping[signal]["targets"][target]["last"] = result
- else:
-- print(f"Warning: Unknown transform {transform} for {signal}->{target}")
-- return value
-+ self.mapping[signal]["targets"][target]["last"] = result
-+
-+ return result
-
- def __contains__(self,key):
- return key in self.mapping.keys()
---
-2.35.1
-
diff --git a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/config.ini b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/config.ini
index 25c9fd273..7da8381f0 100644
--- a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/config.ini
+++ b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/config.ini
@@ -1,10 +1,16 @@
+[general]
+# use case:
+# switch between databroker and kuksa
+# default kuksa
+usecase = kuksa
+# VSS mapping file
+mapping=/etc/kuksa-dbc-feeder/mapping.yml
+
[kuksa_val]
# kuksa VSS server address
server=wss://localhost:8090
# JWT security token file
token=/etc/kuksa-dbc-feeder/dbc_feeder.json.token
-# VSS mapping file
-mapping=/etc/kuksa-dbc-feeder/mapping.yml
[can]
# CAN port
diff --git a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/mapping.yml b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/mapping.yml
index ee970a32d..b1c1fece7 100644
--- a/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/mapping.yml
+++ b/recipes-connectivity/kuksa-val/kuksa-dbc-feeder/mapping.yml
@@ -7,7 +7,7 @@ PT_VehicleAvgSpeed:
PT_EngineSpeed:
minupdatedelay: 100
targets:
- Vehicle.Powertrain.CombustionEngine.Engine.Speed:
+ Vehicle.Powertrain.CombustionEngine.Speed:
transform:
math: "floor(x+0.5)"