Skip to content

Commit

Permalink
ODE-1250 Change BundleStream to StreamId
Browse files Browse the repository at this point in the history
  • Loading branch information
Schwartz-Matthew-bah committed May 21, 2019
1 parent ceb6222 commit 648a0d7
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 35 deletions.
18 changes: 9 additions & 9 deletions qa/test-harness/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,15 +80,15 @@ PerformBundleIdCheck = True

All other sections define test cases. You may provide as few or as many as you wish.

| Property | Required | Description |
|--------------|----------|--------------------------------------------------------------------------------------------------------|
| DataFile | Yes | Log file or REST JSON file containing data to be uploaded to the ODE |
| UploadUrl | Yes | REST endpoint to which data files are sent |
| UploadFormat | Yes | FILE or BODY to specify if the data should be sent as an attachment or in the request body |
| KafkaTopics | Yes | List of topics, separated by commas, to which the test harness should expect messages out from the ODE |
| BundleStream | Yes | Used in conjunction with BundleID validation for grouping |
| OutputFile | No | (Optional) Log file path to which detailed test results will be logged |
| ConfigFile | No | (Optional) Path to ODE validator library custom configuration file |
| Property | Required | Description |
| ------------ | ----------- | ------------------------------------------------------------------------------------------------------ |
| DataFile | Yes | Log file or REST JSON file containing data to be uploaded to the ODE |
| UploadUrl | Yes | REST endpoint to which data files are sent |
| UploadFormat | Yes | FILE or BODY to specify if the data should be sent as an attachment or in the request body |
| KafkaTopics | Yes | List of topics, separated by commas, to which the test harness should expect messages out from the ODE |
| StreamId | Conditional | (Required if PerformBundleIdCheck = True) if Used in conjunction with BundleID validation for grouping |
| OutputFile | No | (Optional) Log file path to which detailed test results will be logged |
| ConfigFile | No | (Optional) Path to ODE validator library custom configuration file |

## Release History

Expand Down
40 changes: 20 additions & 20 deletions qa/test-harness/full-test-sample.ini
Original file line number Diff line number Diff line change
Expand Up @@ -7,87 +7,87 @@ UploadUrl = https://%(DOCKER_HOST_IP)s:8080/upload/bsmlog
UploadFormat = FILE
KafkaTopics = topic.OdeBsmJson
OutputFile = bsmLogDuringEvent.log
BundleStream = 0
StreamId = 0

[test2_bsmTx]
DataFile = ../../data/bsmTx.gz
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/upload/bsmlog
UploadFormat = FILE
KafkaTopics = topic.OdeBsmJson
OutputFile = bsmTx.log
BundleStream = 0
StreamId = 0

[test3_dnMsg]
DataFile = ../../data/dnMsg.gz
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/upload/bsmlog
UploadFormat = FILE
KafkaTopics = topic.OdeTimJson
OutputFile = dnMsg.log
BundleStream = 0
StreamId = 0

[test4_driverAlert]
DataFile = ../../data/driverAlert.gz
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/upload/bsmlog
UploadFormat = FILE
KafkaTopics = topic.OdeDriverAlertJson
OutputFile = driverAlert.log
BundleStream = 0
StreamId = 0

[test5_rxMsg_BSM_and_TIM]
DataFile = ../../data/rxMsg_BSM_and_TIM.gz
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/upload/bsmlog
UploadFormat = FILE
KafkaTopics = topic.OdeBsmJson,topic.OdeTimJson
OutputFile = rxMsg_BSM_and_TIM.log
BundleStream = 0
StreamId = 0

[test6_rxMsg_TIM_GeneratedBy_RSU]
DataFile = ../../data/rxMsg_TIM_GeneratedBy_RSU.gz
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/upload/bsmlog
UploadFormat = FILE
KafkaTopics = topic.OdeTimJson
OutputFile = rxMsg_TIM_GeneratedBy_RSU.log
BundleStream = 0
StreamId = 0

[test7_rxMsg_TIM_GeneratedBy_TMC_VIA_SAT]
DataFile = ../../data/rxMsg_TIM_GeneratedBy_TMC_VIA_SAT.gz
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/upload/bsmlog
UploadFormat = FILE
KafkaTopics = topic.OdeTimJson
OutputFile = rxMsg_TIM_GeneratedBy_TMC_VIA_SAT.log
BundleStream = 0
StreamId = 0

[test7_rxMsg_TIM_GeneratedBy_TMC_VIA_SNMP]
DataFile = ../../data/rxMsg_TIM_GeneratedBy_TMC_VIA_SNMP.gz
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/upload/bsmlog
UploadFormat = FILE
KafkaTopics = topic.OdeTimJson
OutputFile = rxMsg_TIM_GeneratedBy_TMC_VIA_SNMP.log
BundleStream = 0
StreamId = 0

[timtest1]
DataFile = ../../data/TIM_Message_Testing_Files/tim_geometry_multi_rsu_no_sdw.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_geometry_multi_rsu_no_sdw.log
BundleStream = 1
StreamId = 1

[timtest2]
DataFile = ../../data/TIM_Message_Testing_Files/tim_geometry_multi_rsu_sdw.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_geometry_multi_rsu_sdw.log
BundleStream = 1
StreamId = 1

[timtest3]
DataFile = ../../data/TIM_Message_Testing_Files/tim_path_LL_single_rsu_no_sdw.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_path_LL_single_rsu_no_sdw.log
BundleStream = 1
StreamId = 1

[timtest4]
DataFile = ../../data/TIM_Message_Testing_Files/tim_path_multiRSU_hexCRC.json
Expand All @@ -96,68 +96,68 @@ UploadFormat = BODY
Upload = FILE
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_path_multiRSU_hexCRC.log
BundleStream = 1
StreamId = 1

[timtest5]
DataFile = ../../data/TIM_Message_Testing_Files/tim_path_singleRSU.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_path_singleRSU.log
BundleStream = 1
StreamId = 1

[timtest6]
DataFile = ../../data/TIM_Message_Testing_Files/tim_path_xy_node_any_multi_rsu_no_sdw.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_path_xy_node_any_multi_rsu_no_sdw.log
BundleStream = 1
StreamId = 1

[timtest7]
DataFile = ../../data/TIM_Message_Testing_Files/tim_path_xy_node_any_single_rsu_no_sdw.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_path_xy_node_any_single_rsu_no_sdw.log
BundleStream = 1
StreamId = 1

[timtest8]
DataFile = ../../data/TIM_Message_Testing_Files/tim_path_xy_node_latlon_multi_rsu_no_sdw.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_path_xy_node_latlon_multi_rsu_no_sdw.log
BundleStream = 1
StreamId = 1

[timtest9]
DataFile = ../../data/TIM_Message_Testing_Files/tim_path_xy_node_latlon_no_rsu_sdw.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_path_xy_node_latlon_no_rsu_sdw.log
BundleStream = 1
StreamId = 1

[timtest10]
DataFile = ../../data/TIM_Message_Testing_Files/tim_region_multiRSU.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_region_multiRSU.log
BundleStream = 1
StreamId = 1

[timtest11]
DataFile = ../../data/TIM_Message_Testing_Files/tim_region_singleRSU.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_region_singleRSU.log
BundleStream = 1
StreamId = 1

[timtest12]
DataFile = ../../data/TIM_Message_Testing_Files/tim_region_singleRSU_hexCRC.json
UploadUrl = https://%(DOCKER_HOST_IP)s:8080/tim
UploadFormat = BODY
KafkaTopics = topic.J2735TimBroadcastJson
OutputFile = tim_region_singleRSU_hexCRC.log
BundleStream = 1
StreamId = 1
12 changes: 6 additions & 6 deletions qa/test-harness/testharness.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def __init__(self, config):
self.validator = TestCase(config.get('ConfigFile'))
self.output_file_path = config.get('OutputFile')
self.upload_format = config.get('UploadFormat')
self.bundleStream = config.get('BundleStream')
self.stream_id = config.get('StreamId')
try:
self.upload_url = config['UploadUrl']
self.data_file_path = config['DataFile']
Expand Down Expand Up @@ -141,13 +141,13 @@ def run(self):
for iteration in self.test_harness_iterations:
iteration.run()
if self.perform_bundle_id_check:
if iteration.bundleStream not in bundle_streams:
bundle_streams[iteration.bundleStream] = []
bundle_streams[iteration.bundleStream].append(iteration.bundle_id)
if iteration.stream_id not in bundle_streams:
bundle_streams[iteration.stream_id] = []
bundle_streams[iteration.stream_id].append(iteration.bundle_id)

if self.perform_bundle_id_check:
for stream_id in bundle_streams:
print("[INFO] Performing bundleId validation on stream %d" % int(stream_id))
print("[INFO] Performing bundleId validation on StreamId %d" % int(stream_id))
old_id = bundle_streams[stream_id][0]
bundle_ids_error = False
for cur_id in bundle_streams[stream_id][1:]:
Expand All @@ -156,4 +156,4 @@ def run(self):
bundle_ids_error = True
old_id = cur_id
if not bundle_ids_error:
print("[SUCCESS] BundleID validation passed for BundleStream %d." % int(stream_id))
print("[SUCCESS] BundleID validation passed for StreamId %d." % int(stream_id))

0 comments on commit 648a0d7

Please sign in to comment.