Skip to content

Commit

Permalink
COMPONENT: sssctl
Browse files Browse the repository at this point in the history
Removing the unused modules
  • Loading branch information
Roy214 committed May 29, 2023
1 parent 650e8d0 commit 21e5bfc
Show file tree
Hide file tree
Showing 6 changed files with 123 additions and 45 deletions.
2 changes: 2 additions & 0 deletions src/tools/analyzer/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,12 @@ dist_pkgpython_DATA = \
source_reader.py \
parser.py \
sss_analyze.py \
util.py \
$(NULL)

modulesdir = $(pkgpythondir)/modules
dist_modules_DATA = \
modules/__init__.py \
modules/request.py \
modules/error.py \
$(NULL)
62 changes: 62 additions & 0 deletions src/tools/analyzer/modules/error.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
from sssd import util
from sssd.parser import SubparsersAction
from sssd import sss_analyze

class ErrorAnalyzer:
"""
An error analyzer module, list if there is any error reported by sssd_be
"""
module_parser = None
print_opts = []

def print_module_help(self, args):
"""
Print the module parser help output
Args:
args (Namespace): argparse parsed arguments
"""
self.module_parser.print_help()

def setup_args(self, parser_grp, cli):
"""
Setup module parser, subcommands, and options
Args:
parser_grp (argparse.Action): Parser group to nest
module and subcommands under
"""
desc = "Analyze error check module"
self.module_parser = parser_grp.add_parser('error',
description=desc,
help='Error checker')

subparser = self.module_parser.add_subparsers(title=None,
dest='subparser',
action=SubparsersAction,
metavar='COMMANDS')

subcmd_grp = subparser.add_parser_group('Operation Modes')
cli.add_subcommand(subcmd_grp, 'list', 'Print error messages found in backend',
self.print_error, self.print_opts)

self.module_parser.set_defaults(func=self.print_module_help)

return self.module_parser

def print_error(self, args):
err = 0
utl = util.Utils()
source = utl.load(args)
component = source.Component.BE
source.set_component(component, False)
resp = "BE"
patterns = [r'sdap_async_sys_connect request failed', 'terminated by own WATCHDOG',
'ldap_sasl_interactive_bind_s failed', 'Communication with KDC timed out', 'SSSD is offline', 'Backend is offline',
'tsig verify failure', 'ldap_install_tls failed', 's2n exop request failed']
for line in utl.matched_line(source, patterns):
err +=1
print(line)
if err > 0:
print("For possible solutions please refer to https://sssd.io/troubleshooting/errors.html")
return
54 changes: 10 additions & 44 deletions src/tools/analyzer/modules/request.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import re
import logging

from sssd import util
from sssd.parser import SubparsersAction
from sssd.parser import Option

Expand Down Expand Up @@ -38,7 +38,6 @@ def print_module_help(self, args):
def setup_args(self, parser_grp, cli):
"""
Setup module parser, subcommands, and options
Args:
parser_grp (argparse.Action): Parser group to nest
module and subcommands under
Expand All @@ -63,42 +62,6 @@ def setup_args(self, parser_grp, cli):

return self.module_parser

def load(self, args):
"""
Load the appropriate source reader.
Args:
args (Namespace): argparse parsed arguments
Returns:
Instantiated source object
"""
if args.source == "journald":
from sssd.source_journald import Journald
source = Journald()
else:
from sssd.source_files import Files
source = Files(args.logdir)
return source

def matched_line(self, source, patterns):
"""
Yield lines which match any number of patterns (OR) in
provided patterns list.
Args:
source (Reader): source Reader object
Yields:
lines matching the provided pattern(s)
"""
for line in source:
for pattern in patterns:
re_obj = re.compile(pattern)
if re_obj.search(line):
if line.startswith(' * '):
continue
yield line

def get_linked_ids(self, source, pattern, regex):
"""
Retrieve list of associated REQ_TRACE ids. Filter
Expand All @@ -114,8 +77,9 @@ def get_linked_ids(self, source, pattern, regex):
Returns:
List of linked ids discovered
"""
utl = util.Utils()
linked_ids = []
for match in self.matched_line(source, pattern):
for match in utl.matched_line(source, pattern):
id_re = re.compile(regex)
match = id_re.search(match)
if match:
Expand Down Expand Up @@ -250,7 +214,8 @@ def list_requests(self, args):
Args:
args (Namespace): populated argparse namespace
"""
source = self.load(args)
utl = util.Utils()
source = utl.load(args)
component = source.Component.NSS
resp = "nss"
# Log messages matching the following regex patterns contain
Expand All @@ -266,7 +231,7 @@ def list_requests(self, args):
if args.verbose:
self.print_formatted_verbose(source)
else:
for line in self.matched_line(source, patterns):
for line in utl.matched_line(source, patterns):
if type(source).__name__ == 'Journald':
print(line)
else:
Expand All @@ -279,7 +244,8 @@ def track_request(self, args):
Args:
args (Namespace): populated argparse namespace
"""
source = self.load(args)
utl = util.Utils()
source = utl.load(args)
cid = args.cid
resp_results = False
be_results = False
Expand All @@ -294,7 +260,7 @@ def track_request(self, args):
logger.info(f"******** Checking {resp} responder for Client ID"
f" {cid} *******")
source.set_component(component, args.child)
for match in self.matched_line(source, pattern):
for match in utl.matched_line(source, pattern):
resp_results = self.consume_line(match, source, args.merge)

logger.info(f"********* Checking Backend for Client ID {cid} ********")
Expand All @@ -307,7 +273,7 @@ def track_request(self, args):
pattern.clear()
[pattern.append(f'\\{id}') for id in be_ids]

for match in self.matched_line(source, pattern):
for match in utl.matched_line(source, pattern):
be_results = self.consume_line(match, source, args.merge)

if args.merge:
Expand Down
2 changes: 1 addition & 1 deletion src/tools/analyzer/sss_analyze
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

from sssd import sss_analyze

Expand Down
3 changes: 3 additions & 0 deletions src/tools/analyzer/sss_analyze.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import argparse

from sssd.modules import request
from sssd.modules import error
from sssd.parser import SubparsersAction


Expand Down Expand Up @@ -55,9 +56,11 @@ def load_modules(self, parser, parser_grp):
"""
# Currently only the 'request' module exists
req = request.RequestAnalyzer()
err = error.ErrorAnalyzer()
cli = Analyzer()

req.setup_args(parser_grp, cli)
err.setup_args(parser_grp, cli)

def setup_args(self):
"""
Expand Down
45 changes: 45 additions & 0 deletions src/tools/analyzer/util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import re
import logging

from sssd.source_files import Files
from sssd.source_journald import Journald
from sssd.parser import SubparsersAction

logger = logging.getLogger()


class Utils:

def load(self, args):
"""
Load the appropriate source reader.
Args:
args (Namespace): argparse parsed arguments
Returns:
Instantiated source object
"""
if args.source == "journald":
source = Journald()
else:
source = Files(args.logdir)
return source

def matched_line(self, source, patterns):
"""
Yield lines which match any number of patterns (OR) in
provided patterns list.
Args:
source (Reader): source Reader object
Yields:
lines matching the provided pattern(s)
"""
for line in source:
for pattern in patterns:
re_obj = re.compile(pattern)
if re_obj.search(line):
if line.startswith(' * '):
continue
yield line

0 comments on commit 21e5bfc

Please sign in to comment.