Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[TOOL-91] Add support for MDF #1975

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3,433 changes: 3,433 additions & 0 deletions application/apps/indexer/Cargo.lock

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions application/apps/indexer/indexer_cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,5 @@ uuid = "1.3"
someip-messages = { git = "https://github.com/esrlabs/someip" }
# someip-payload = { path = "../../../../../someip-payload"}
someip-payload = { git = "https://github.com/esrlabs/someip-payload" }
mdf = { path = "../../../../../mdf"}
# mdf = { git = "https://github.com/esrlabs/mdf" }
111 changes: 108 additions & 3 deletions application/apps/indexer/indexer_cli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,24 +31,35 @@ use dlt_core::{
parse::DltParseError,
statistics::{collect_dlt_stats, count_dlt_messages as count_dlt_messages_old},
};
use mdf::{
meta::MdfMeta,
parse::MdfParser,
read::{MdfReader, filter::MdfBusFilter},
stat::MdfScanner,
};
use env_logger::Env;
use futures::{pin_mut, stream::StreamExt};
use indexer_base::{config::*, error_reporter::*, progress::IndexingResults};
use indicatif::{ProgressBar, ProgressStyle};
use parsers::{
dlt::{attachment::FileExtractor, DltParser, DltRangeParser},
someip::SomeipParser,
mdf::MdfLogMessageParser,
text::StringTokenizer,
LogMessage, MessageStreamItem, ParseYield,
};
use processor::{export::export_raw, grabber::GrabError, text_source::TextFileSource};
use sources::{
binary::{pcap::ng::PcapngByteSource, raw::BinaryByteSource},
binary::{
pcap::ng::PcapngByteSource,
raw::BinaryByteSource,
mdf::MdfRecordByteSource
},
producer::MessageProducer,
};
use std::{
fs::File,
io::BufReader,
fs::{File, OpenOptions},
io::{BufReader, BufWriter, Write},
path::{Path, PathBuf},
};
use structopt::StructOpt;
Expand Down Expand Up @@ -247,6 +258,12 @@ enum Chip {
#[structopt(short, long, name = "FILE", help = "the model file (FIBEX))")]
model: Option<PathBuf>,
},
#[structopt(about = "events from mdf files")]
#[allow(dead_code)]
MdfEvents {
#[structopt(help = "the mdf file to parse")]
input: PathBuf,
},
#[structopt(about = "dlt statistics")]
DltStats {
#[structopt(help = "the DLT file to parse")]
Expand Down Expand Up @@ -448,6 +465,9 @@ pub async fn main() -> Result<()> {
output: _,
model: _,
} => println!("NYI someip from pcap not available on cli"),
Chip::MdfEvents {
input,
} => handle_mdf_events_subcommand(&input).await,
Chip::DltStats {
input,
legacy,
Expand Down Expand Up @@ -1460,6 +1480,91 @@ pub async fn main() -> Result<()> {
println!("res = {res:?}");
}

async fn handle_mdf_events_subcommand(
input: &Path
) {
println!("handle_mdf_events_subcommand");
let overall = Instant::now();

println!("read: {}", input.display());
let time = Instant::now();
let file = File::open(input).expect("open");
let mut source = BufReader::new(file);
let mut parser = MdfParser::new(&mut source);
let mdf = parser.parse().expect("parse");
println!("..read took: {:?}", time.elapsed());

let time = Instant::now();
mdf.validate().expect("validate");
println!("..validate took: {:?}", time.elapsed());

let time = Instant::now();
let meta = MdfMeta::new(&mdf);
println!("..analyze took: {:?}", time.elapsed());

let time = Instant::now();
let mut reader = MdfReader::new(&mut source, &meta);
let scanner = MdfScanner::new(&mut reader, &meta);
let stat = scanner.scan();
println!("events:\n{}", stat);
println!("..scan took: {:?}", time.elapsed());

for (index, buses) in stat.groups.iter().enumerate() {
for (stat, _) in buses.collect() {
let bus = &stat.bus;
let time = Instant::now();
let mut count: usize = 0;
let path = input.parent().unwrap().join(format!("{}_{:?}-{}.txt",
input.file_name().unwrap().to_str().unwrap(),
bus, index));

println!("write: {}", path.display());
let mut writer = BufWriter::new(
OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(path.clone())
.expect("create")
);

let mut reader = MdfReader::new(&mut source, &meta);
reader.filter(MdfBusFilter { buses: vec![bus.clone()] });
let source = MdfRecordByteSource::new(&mut reader, index);
let parser = MdfLogMessageParser::new(&meta.group(index).expect("meta"));
let mut producer = MessageProducer::new(parser, source, None);
let stream = producer.as_stream();
pin_mut!(stream);

loop {
let msg = stream.next().await;
match msg {
Some((_, MessageStreamItem::Item(ParseYield::Message(message)))) => {
writeln!(writer, "{}", message).expect("write");
count += 1;
},
Some((_, MessageStreamItem::Done)) => {
break
},
None => {
break
},
_ => continue,
}
}

writer.flush().expect("flush");
println!("events: {}", count);
println!("output: {} MB",
fs::metadata(path).expect("fs").len() as f32 / (1024 * 1024) as f32
);
println!("..write took: {:?}", time.elapsed());
}
}

println!("overall: {:?}", overall.elapsed());
}

async fn handle_dlt_stats_subcommand(
file_path: &Path,
count: bool,
Expand Down
3 changes: 3 additions & 0 deletions application/apps/indexer/parsers/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ humantime = "2.1"
lazy_static = "1.4"
log = "0.4.17"
memchr = "2.4"
nom = "7.1"
serde = { version = "1.0", features = ["derive"] }
thiserror = "1.0"
tokio-util = "0.7"
Expand All @@ -22,6 +23,8 @@ rand = "0.8.5"
someip-messages = { git = "https://github.com/esrlabs/someip" }
# someip-payload = { path = "../../../../../someip-payload" }
someip-payload = { git = "https://github.com/esrlabs/someip-payload" }
mdf = { path = "../../../../../mdf"}
# mdf = { git = "https://github.com/esrlabs/mdf" }

[dev-dependencies]
stringreader = "0.1.1"
Expand Down
21 changes: 21 additions & 0 deletions application/apps/indexer/parsers/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
pub mod dlt;
pub mod someip;
pub mod mdf;
pub mod text;
use serde::Serialize;
use std::{fmt::Display, io::Write};
Expand All @@ -17,6 +18,26 @@ pub enum Error {
Eof,
}

impl nom::error::ParseError<&[u8]> for Error {
fn from_error_kind(input: &[u8], kind: nom::error::ErrorKind) -> Self {
Error::Parse(format!(
"Nom error: {:?} ({} bytes left)",
kind,
input.len()
))
}

fn append(_: &[u8], _: nom::error::ErrorKind, other: Self) -> Self {
other
}
}

impl From<nom::Err<Error>> for Error {
fn from(err: nom::Err<Error>) -> Self {
Error::Parse(format!("{:?}", err))
}
}

#[derive(Debug)]
pub enum ParseYield<T> {
Message(T),
Expand Down
166 changes: 166 additions & 0 deletions application/apps/indexer/parsers/src/mdf.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
use crate::{Error, LogMessage, ParseYield, Parser};
use std::{fmt, fmt::Display, io::Write};
use serde::Serialize;
use nom::{
bytes::streaming::take,
combinator::map,
number::streaming::be_u64,
sequence::tuple,
};
use mdf::{
event::MdfEventBuilder,
meta::MdfDataGroupMeta,
read::record::MdfRecord,
err::MdfError,
};

/// A parser for MDF log messages.
pub struct MdfLogMessageParser<'a> {
builder: MdfEventBuilder<'a>
}

impl<'a> MdfLogMessageParser<'a> {
/// Creates a new MDF log message.
///
/// # Arguments
///
/// * `group` - The meta-info of the data-group within the MDF file.
pub fn new(group: &'a MdfDataGroupMeta) -> Self {
MdfLogMessageParser {
builder: MdfEventBuilder::new(group)
}
}
}

unsafe impl<'a> Send for MdfLogMessageParser<'a> {}
unsafe impl<'a> Sync for MdfLogMessageParser<'a> {}
Comment on lines +35 to +36
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why is unsafe needed here? we do not need unsafe for any other parsers


impl<'a> Parser<MdfLogMessage> for MdfLogMessageParser<'a> {
fn parse<'b>(
&mut self,
input: &'b [u8],
_timestamp: Option<u64>,
) -> Result<(&'b [u8], Option<ParseYield<MdfLogMessage>>), Error> {
let (rest, record) = MdfRecordSerializer::from_bytes(input)?;
match self.builder.consume_record(record) {
Ok(event) => {
Ok((
rest,
Some(ParseYield::from(MdfLogMessage::from(
format!("{}", event),
event.payload.to_vec(),
))),
))
}
Err(MdfError::NED) => {
Ok((rest, None))
}
Err(MdfError::UNK(_)) => {
Ok((rest, None))
}
Err(error) => {
Err(Error::Parse(format!("{}", error)))
}
}
}
}

/// Represents a MDF log message.
#[derive(Debug, Serialize)]
pub struct MdfLogMessage {
description: String,
bytes: Vec<u8>,
}

impl MdfLogMessage {
/// Creates a new log message for the given values.
pub fn from(description: String, bytes: Vec<u8>) -> Self {
MdfLogMessage { description, bytes }
}
}

impl LogMessage for MdfLogMessage {
fn to_writer<W: Write>(&self, writer: &mut W) -> Result<usize, std::io::Error> {
writer.write_all(&self.bytes)?;
Ok(self.bytes.len())
}
}

impl Display for MdfLogMessage {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description,)
}
}

/// MDF record serializer.
pub struct MdfRecordSerializer {}

impl MdfRecordSerializer {
/// Serializes the given record to bytes.
pub fn to_bytes(mut record: MdfRecord) -> Vec<u8> {
let mut buffer: Vec<u8> = vec![];

buffer.append(&mut record.record_id.to_be_bytes().to_vec());
buffer.append(&mut (record.record_index as u64).to_be_bytes().to_vec());
buffer.append(&mut (record.data.len() as u64).to_be_bytes().to_vec());
buffer.append(&mut record.data);

buffer
}

/// Deserializes a record from the given bytes.
pub fn from_bytes(input: &[u8]) -> Result<(&[u8], MdfRecord), Error> {
let (input, (id, index, length)) = tuple((be_u64, be_u64, be_u64))(input)?;

let result = Ok(map(
take(length as usize),
|data: &[u8]| MdfRecord {
record_id: id,
record_index: index as usize,
data: data.to_vec()
},
)(input)?);

result
}
}

#[cfg(test)]
mod test {
use super::*;

#[test]
fn test_mdf_record_serializer() {
let record = MdfRecord {
record_id: 1,
record_index: 2,
data: [0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18].to_vec(),
};

let bytes = MdfRecordSerializer::to_bytes(record);
assert_eq!(bytes,
[
// id
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
// index
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
// len
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
// data
0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18
].to_vec()
);

let (rest, result) = MdfRecordSerializer::from_bytes(&bytes).expect("record");

assert!(rest.is_empty());
assert_eq!{
MdfRecord {
record_id: 1,
record_index: 2,
data: [0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18].to_vec(),
},
result
}
}
}
2 changes: 2 additions & 0 deletions application/apps/indexer/sources/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ uuid = { version = "1.3", features = ["serde", "v4"] }
regex = "1.7"
lazy_static = "1.4"
shellexpand = "3.0.0"
mdf = { path = "../../../../../mdf"}
# mdf = { git = "https://github.com/esrlabs/mdf" }

[dev-dependencies]
env_logger = "0.10"
Loading
Loading