-
Notifications
You must be signed in to change notification settings - Fork 248
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Explanation This will enhance the capability of sssctl analyze tool. The new tool sssctl analyze error list will print if there is any error in sssd_domain.log Resolves: #6679
- Loading branch information
Showing
3 changed files
with
378 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
import re | ||
import argparse | ||
import logging | ||
import html | ||
import urllib.parse | ||
|
||
|
||
from sssd import util | ||
from sssd.source_files import Files | ||
from sssd.source_journald import Journald | ||
from sssd.parser import SubparsersAction | ||
from sssd.parser import Option | ||
from sssd.modules import request | ||
from sssd import sss_analyze | ||
|
||
logger = logging.getLogger() | ||
|
||
|
||
class ErrorAnalyzer: | ||
""" | ||
""" | ||
module_parser = None | ||
consumed_logs = [] | ||
print_opts = [ | ||
Option('--verbose', 'Verbose output', bool, '-v'), | ||
] | ||
|
||
def setup_args(self, parser_grp, cli): | ||
""" | ||
Setup module parser, subcommands, and options | ||
Args: | ||
parser_grp (argparse.Action): Parser group to nest | ||
module and subcommands under | ||
""" | ||
svc = util.Utils() | ||
desc = "Analyze error check module" | ||
self.module_parser = parser_grp.add_parser('error', | ||
description=desc, | ||
help='Error checker') | ||
|
||
subparser = self.module_parser.add_subparsers(title=None, | ||
dest='subparser', | ||
action=SubparsersAction, | ||
metavar='COMMANDS') | ||
|
||
subcmd_grp = subparser.add_parser_group('Operation Modes') | ||
cli.add_subcommand(subcmd_grp, 'list', 'Print sssd error msg', | ||
self.print_error, self.print_opts) | ||
|
||
self.module_parser.set_defaults(func=svc.print_module_help) | ||
|
||
return self.module_parser | ||
|
||
def print_error(self, args): | ||
""" | ||
""" | ||
utl = util.Utils() | ||
source = utl.load(args) | ||
component = source.Component.BE | ||
source.set_component(component, False) | ||
resp = "BE" | ||
patterns = [r'sdap_async_sys_connect request failed', 'terminated by own WATCHDOG', | ||
'ldap_sasl_interactive_bind_s failed', 'krb5_auth_timeout', 'SSSD is offline', 'Backend is offline', | ||
'tsig verify failure', 'ldap_install_tls failed', 's2n exop request failed'] | ||
for line in utl.matched_line(source, patterns): | ||
print('\n',line, "For details refer https://sssd.io/troubleshooting/errors.html") | ||
return |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,306 @@ | ||
import re | ||
import logging | ||
|
||
from sssd.source_files import Files | ||
from sssd.source_journald import Journald | ||
from sssd.parser import SubparsersAction | ||
from sssd.parser import Option | ||
|
||
logger = logging.getLogger() | ||
|
||
|
||
class Utils: | ||
""" | ||
A request analyzer module, handles request tracking logic | ||
and analysis. Parses input generated from a source Reader. | ||
""" | ||
|
||
def print_module_help(self, args): | ||
""" | ||
Print the module parser help output | ||
Args: | ||
args (Namespace): argparse parsed arguments | ||
""" | ||
self.module_parser.print_help() | ||
|
||
def setup_args(self, parser_grp, cli): | ||
""" | ||
Setup module parser, subcommands, and options | ||
Args: | ||
parser_grp (argparse.Action): Parser group to nest | ||
module and subcommands under | ||
""" | ||
desc = "Analyze request tracking module" | ||
self.module_parser = parser_grp.add_parser('request', | ||
description=desc, | ||
help='Request tracking') | ||
|
||
subparser = self.module_parser.add_subparsers(title=None, | ||
dest='subparser', | ||
action=SubparsersAction, | ||
metavar='COMMANDS') | ||
|
||
subcmd_grp = subparser.add_parser_group('Operation Modes') | ||
cli.add_subcommand(subcmd_grp, 'list', 'List recent requests', | ||
self.list_requests, self.list_opts) | ||
cli.add_subcommand(subcmd_grp, 'show', 'Track individual request ID', | ||
self.track_request, self.show_opts) | ||
|
||
self.module_parser.set_defaults(func=self.print_module_help) | ||
|
||
return self.module_parser | ||
|
||
def load(self, args): | ||
""" | ||
Load the appropriate source reader. | ||
Args: | ||
args (Namespace): argparse parsed arguments | ||
Returns: | ||
Instantiated source object | ||
""" | ||
if args.source == "journald": | ||
source = Journald() | ||
else: | ||
source = Files(args.logdir) | ||
return source | ||
|
||
def matched_line(self, source, patterns): | ||
""" | ||
Yield lines which match any number of patterns (OR) in | ||
provided patterns list. | ||
Args: | ||
source (Reader): source Reader object | ||
Yields: | ||
lines matching the provided pattern(s) | ||
""" | ||
for line in source: | ||
for pattern in patterns: | ||
re_obj = re.compile(pattern) | ||
if re_obj.search(line): | ||
if line.startswith(' * '): | ||
continue | ||
yield line | ||
|
||
def get_linked_ids(self, source, pattern, regex): | ||
""" | ||
Retrieve list of associated REQ_TRACE ids. Filter | ||
only source lines by pattern, then parse out the | ||
linked id with the provided regex. | ||
Args: | ||
source (Reader): source Reader object | ||
pattern (list of str): regex pattern(s) used for finding | ||
linked ids | ||
regex (str): regular expression used to extract linked id | ||
Returns: | ||
List of linked ids discovered | ||
""" | ||
linked_ids = [] | ||
for match in self.matched_line(source, pattern): | ||
id_re = re.compile(regex) | ||
match = id_re.search(match) | ||
if match: | ||
found = match.group(0) | ||
linked_ids.append(found) | ||
return linked_ids | ||
|
||
def consume_line(self, line, source, consume): | ||
""" | ||
Print or consume a line, if merge cli option is provided then consume | ||
boolean is set to True | ||
Args: | ||
line (str): line to process | ||
source (Reader): source Reader object | ||
consume (bool): If True, line is added to consume_logs | ||
list, otherwise print line | ||
Returns: | ||
True if line was processed, otherwise False | ||
""" | ||
found_results = True | ||
if consume: | ||
self.consumed_logs.append(line.rstrip(line[-1])) | ||
else: | ||
# files source includes newline | ||
if isinstance(source, Files): | ||
print(line, end='') | ||
else: | ||
print(line) | ||
return found_results | ||
|
||
def print_formatted_verbose(self, source): | ||
""" | ||
Parse log file and print formatted verbose list_requests output | ||
Args: | ||
source (Reader): source Reader object | ||
""" | ||
data = {} | ||
# collect cid log lines from single run through of parsing the log | ||
# into dictionary # (cid, ts) -> logline_output | ||
for line in source: | ||
if "CID#" not in line: | ||
continue | ||
|
||
# parse CID and ts from line, key is a tuple of (cid,ts) | ||
fields = line.split("[") | ||
# timestamp to the minute, cut off seconds, ms | ||
ts = fields[0][:17] | ||
result = re.search('CID#[0-9]*', fields[3]) | ||
cid = result.group(0) | ||
|
||
# if mapping exists, append line to output. Otherwise create new mapping | ||
if (cid, ts) in data.keys(): | ||
data[(cid, ts)] += line | ||
else: | ||
data[(cid, ts)] = line | ||
|
||
# pretty print the data | ||
for k, v in data.items(): | ||
cr_done = [] | ||
id_done = [] | ||
for cidline in v.splitlines(): | ||
plugin = "" | ||
name = "" | ||
id = "" | ||
|
||
# CR number | ||
fields = cidline.split("[") | ||
cr_field = fields[3][7:] | ||
cr = cr_field.split(":")[0][4:] | ||
# Client connected, top-level info line | ||
if re.search(r'\[cmd', cidline): | ||
self.print_formatted(cidline) | ||
# CR Plugin name | ||
if re.search("cache_req_send", cidline): | ||
plugin = cidline.split('\'')[1] | ||
id_done.clear() | ||
# Extract CR number | ||
fields = cidline.split("[") | ||
cr_field = fields[3][7:] | ||
cr = cr_field.split(":")[0][4:] | ||
# CR Input name | ||
elif re.search("cache_req_process_input", cidline): | ||
name = cidline.rsplit('[')[-1] | ||
# CR Input id | ||
elif re.search("cache_req_search_send", cidline): | ||
id = cidline.rsplit()[-1] | ||
|
||
if plugin: | ||
print(" - " + plugin) | ||
if name: | ||
# Avoid duplicate output with the same CR # | ||
if cr not in cr_done: | ||
print(" - " + name[:-1]) | ||
cr_done.append(cr) | ||
if (id and ("UID" in cidline or "GID" in cidline)): | ||
if id not in id_done and bool(re.search(r'\d', id)): | ||
print(" - " + id) | ||
id_done.append(id) | ||
|
||
def print_formatted(self, line): | ||
""" | ||
Parse line and print formatted list_requests output | ||
Args: | ||
line (str): line to parse | ||
Returns: | ||
Client ID from printed line, 0 otherwise | ||
""" | ||
# exclude backtrace logs | ||
if line.startswith(' * '): | ||
return 0 | ||
if "refreshed" in line: | ||
return 0 | ||
ts = line.split(")")[0] | ||
ts = ts[1:] | ||
fields = line.split("[") | ||
cid = fields[3][4:-9] | ||
cmd = fields[4][4:-1] | ||
uid = fields[5][4:-1] | ||
if not uid.isnumeric(): | ||
uid = fields[6][4:-1] | ||
print(f'{ts}: [uid {uid}] CID #{cid}: {cmd}') | ||
return cid | ||
|
||
def list_requests(self, args): | ||
""" | ||
List component (default: NSS) responder requests | ||
Args: | ||
args (Namespace): populated argparse namespace | ||
""" | ||
source = self.load(args) | ||
component = source.Component.NSS | ||
resp = "nss" | ||
# Log messages matching the following regex patterns contain | ||
# the useful info we need to produce list output | ||
patterns = [r'\[cmd'] | ||
if args.pam: | ||
component = source.Component.PAM | ||
resp = "pam" | ||
|
||
logger.info(f"******** Listing {resp} client requests ********") | ||
source.set_component(component, False) | ||
if args.verbose: | ||
self.print_formatted_verbose(source) | ||
else: | ||
for line in self.matched_line(source, patterns): | ||
if isinstance(source, Journald): | ||
print(line) | ||
else: | ||
self.print_formatted(line) | ||
|
||
def track_request(self, args): | ||
""" | ||
Print Logs pertaining to individual SSSD client request | ||
Args: | ||
args (Namespace): populated argparse namespace | ||
""" | ||
source = self.load(args) | ||
cid = args.cid | ||
resp_results = False | ||
be_results = False | ||
component = source.Component.NSS | ||
resp = "nss" | ||
pattern = [rf"\[CID#{cid}\]"] | ||
|
||
if args.pam: | ||
component = source.Component.PAM | ||
resp = "pam" | ||
|
||
logger.info(f"******** Checking {resp} responder for Client ID" | ||
f" {cid} *******") | ||
source.set_component(component, args.child) | ||
for match in self.matched_line(source, pattern): | ||
resp_results = self.consume_line(match, source, args.merge) | ||
|
||
logger.info(f"********* Checking Backend for Client ID {cid} ********") | ||
pattern = [rf'REQ_TRACE.*\[sssd.{resp} CID #{cid}\]'] | ||
source.set_component(source.Component.BE, args.child) | ||
|
||
be_id_regex = r'\[RID#[0-9]+\]' | ||
be_ids = self.get_linked_ids(source, pattern, be_id_regex) | ||
|
||
pattern.clear() | ||
[pattern.append(f'\\{id}') for id in be_ids] | ||
|
||
for match in self.matched_line(source, pattern): | ||
be_results = self.consume_line(match, source, args.merge) | ||
|
||
if args.merge: | ||
# sort by date/timestamp | ||
sorted_list = sorted(self.consumed_logs, | ||
key=lambda s: s.split(')')[0]) | ||
for entry in sorted_list: | ||
print(entry) | ||
if not resp_results and not be_results: | ||
logger.warn(f"ID {cid} not found in logs!") |