Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
155 changes: 155 additions & 0 deletions tools/trap/cli/logAnalyser/logPreprocessor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
#!/usr/bin/env python
###########################################################################
#
# Copyright 2026 Samsung Electronics All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
###########################################################################
# File : log_preprocessor.py
# Description: Contains preprocessing functions for log files including
# format validation and binary discovery

from __future__ import print_function
import os
import sys
import logAnalyser.logUtils as logUtils
from array import *

# Global variables
assertion_details = logUtils.assertion_details
BIN_ADDR_FXN = logUtils.BIN_ADDR_FXN


partition_string = logUtils.partition_string


# Function to format logs and delete the timestamp (supported formats-|xxxxxxxxx| and [xxxxxxxxx]) if it consists of timestamp at the start of each log line
def format_log_file(log_file):

current_line = "" # Initialize current_line

# Delete unwanted logs (if any) and timestamp at the start of each line
with open(log_file, "r") as f:
data = f.readlines()
with open(log_file, "w") as f:
data = iter(data)
for line in data:
delete_idx = 0
# Timestamp present if line starts with '|'
if line[0] == '|' or line[0] == '[':
for idx in range(1, len(line)):
if '|' == line[idx] or ']' == line[idx]:
delete_idx = idx + 1
break
if line[delete_idx] == ' ': # Check for trailing white spaces
delete_idx = delete_idx + 1
line = line[delete_idx:]
f.write(line)

# Check for invalid format after above formatting
with open(log_file, "r") as f:
data = f.readlines()
data = iter(data)
for line in data:
if partition_string in line:
line = next(data)
current_line = line
line = next(data)
continue
if 'Assertion failed at file:' in line and current_line == assertion_details:
word = line.split()
if word[1] != 'Assertion':
for idx in range(0, len(line)):
if 'A' == line[idx]:
delete_idx = idx
break
correctFormatString = line[delete_idx:]
print ("\n\t- Below log format is not supported in TRAP")
print ('\t\t-{0}\t- Instead, supported log format in TRAP is as follows:'.format(line))
print ("\t\t-{0} {1}\n\tKindly modify the log file as per accepted format.\n".format(word[word.index('Assertion')-1], correctFormatString))
sys.exit(1)

# Function to get the number of application binaries, names, text address and sizes
def find_number_of_binaries(lparser):

current_line = ""
# Parse the contents based on tokens in log file.
with open(lparser.log_file) as searchfile:
for line in searchfile:
if partition_string in line:
line = next(searchfile)
current_line = line
line = next(searchfile)
continue
# Get the number of applications loaded
if BIN_ADDR_FXN == current_line:
lparser.g_app_idx = lparser.g_app_idx + 1

app_idx = 0
lparser.g_stext_app = array('i', range(0, lparser.g_app_idx))
lparser.g_etext_app = array('i', range(0, lparser.g_app_idx))
with open(lparser.log_file) as searchfile:
for line in searchfile:
if partition_string in line:
line = next(searchfile)
current_line = line
line = next(searchfile)
continue
# Read the app text address and size
if BIN_ADDR_FXN == current_line:
word = line.split(':')
t = word[2].split(',') # word[2] is the App Start Text address
w = word[1].split(' ')
# w[1] denotes string '[<app_name>]'
start_idx = int(w[1].find('[')) + 1
end_idx = int(w[1].find(']'))
lparser.app_name.append(w[1][start_idx:end_idx])
lparser.g_stext_app[app_idx] = int(t[0], 16)
lparser.g_etext_app[app_idx] = lparser.g_stext_app[app_idx] + int(word[3], 10) # word[3] is text_size
app_idx = app_idx + 1

if app_idx == 0:
app_count = 0
app_names = []

debug_files = [
f for f in os.listdir(lparser.bin_path)
if f.endswith("_dbg") and os.path.isfile(os.path.join(lparser.bin_path, f))
]

for f in debug_files:
app_names.append(f.split("_")[0])
lparser.app_name.append(app_names[app_count])
app_count = app_count + 1

if app_count > 0:
lparser.read_all_elf = True
lparser.g_app_idx = app_count
lparser.g_stext_app = array('i', range(0, lparser.g_app_idx))
lparser.g_etext_app = array('i', range(0, lparser.g_app_idx))

for app_idx in range(app_count):
#setting start and end address with minimum and maximum possible values for now when address is not available
lparser.g_stext_app[app_idx] = int("0x00000000", 16)
lparser.g_etext_app[app_idx] = int("0x77777777", 16)
else:
print("\nNo debug files found for common and app binaries\n")

def preprocessLogFile(lparser):
# Format log file if timestamp is present at the start of each line
format_log_file(lparser.log_file)

# Get the number of application binaries, names, text address and sizes
find_number_of_binaries(lparser)

136 changes: 136 additions & 0 deletions tools/trap/cli/logAnalyser/logUtils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
#!/usr/bin/env python
###########################################################################
#
# Copyright 2026 Samsung Electronics All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
###########################################################################
# File : utils.py
# Description: Contains utility functions for log parsing including
# formatting, crash type detection, address validation, and state mapping

from __future__ import print_function

# strings used for log analysis
assertion_details = "Assertion details\n"
stack_details = "Asserted task's stack details\n"
register_dump = "Asserted task's register dump\n"
BIN_ADDR_FXN = "Loading location information\n"
tcb_info = "Asserted task's TCB info"

partition_string = "==========================================================="

# used in heapNode
closing_log_line = "##########################################################################################################################################"



def format_output(res, string):
r = res.split('\n')
print('\t- symbol addr {0} : {1}'.format(string, r[0]))
print('\t- function name {0} : {1}'.format(string, r[1]))
print('\t- file {0} : {1}'.format(string, r[2]))

def print_crash_type(parser, string):
if 'up_memfault' in string:
print('\n2. Crash type : memory fault')
elif 'up_busfault' in string:
print('\n2. Crash type : bus fault')
elif 'up_usagefault' in string:
print('\n2. Crash type : usage fault')
elif 'up_hardfault' in string:
print('\n2. Crash type : hard fault')
elif 'dataabort' in string:
print('\n2. Crash type : data abort')
elif 'prefetchabort' in string:
print('\n2. Crash type : prefetch abort')
elif 'undefinedinsn' in string:
print('\n2. Crash type : undefined instruction abort')
elif 'Assertion failed' in string:
parser.crash_type_assert = True
print('\n2. Crash type : code assertion by code ASSERT or PANIC')
else:
print('\n2. Crash type : etc')
if (parser.crash_type_assert == True):
print('\n3. Crash point\n\t-', string.split(': ',1)[1])
else:
print(' Crash log\n\t-', string)

def is_app_text_address(parser, address):
idx = 0
# Check the application text address range
for idx in range(parser.g_app_idx):
if (address >= hex(parser.g_stext_app[idx]) and address < hex(parser.g_etext_app[idx])):
return (idx + 1)
if (idx == parser.g_app_idx):
return False


# Function to check if address lies in the kernel text address range
def is_kernel_text_address(parser, address):

# Check the kernel text address range
if (address >= hex(parser.g_stext_ram) and address < hex(parser.g_etext_ram)) or (address >= hex(parser.g_stext_flash) and address < hex(parser.g_etext_flash)):
return True
else:
return False


def convert_stateno_statemsg(parser):
# Parse configuration to determine enabled features
config_smp = False
config_disable_signals = False
config_disable_mqueue = False
config_paging = False

# Read configuration file
with open(parser.config_path) as configfile:
for line in configfile:
if "CONFIG_SMP=y" in line:
config_smp = True
elif "CONFIG_DISABLE_SIGNALS=y" in line:
config_disable_signals = True
elif "CONFIG_DISABLE_MQUEUE=y" in line:
config_disable_mqueue = True
elif "CONFIG_PAGING=y" in line:
config_paging = True

# Build task state mapping
state_no = 0
parser.task_state[str(state_no)] = " Invalid"
state_no+=1
parser.task_state[str(state_no)] = " Pending preemption unlock"
state_no+=1
parser.task_state[str(state_no)] = " Wait to scheduling (Ready)"
state_no+=1
parser.task_state[str(state_no)] = " Assigned to CPU (Ready)"
state_no+=1
parser.task_state[str(state_no)] = " Running"
state_no+=1
parser.task_state[str(state_no)] = " Inactive"
state_no+=1
parser.task_state[str(state_no)] = " Wait Semaphore"
state_no+=1
parser.task_state[str(state_no)] = " Wait FIN"
state_no+=1
parser.task_state[str(state_no)] = " Wait Signal"
state_no+=1
parser.task_state[str(state_no)] = " Wait MQ Receive (MQ Empty)"
state_no+=1
parser.task_state[str(state_no)] = " Wait MQ Send (MQ Full)"
state_no+=1
parser.task_state[str(state_no)] = " Wait Page Fill"
state_no+=1


Loading