Skip to content

Commit

Permalink
Merge pull request #1134 from NagiosEnterprises/dev-v3.0.2
Browse files Browse the repository at this point in the history
NCPA 3.0.2 Release
  • Loading branch information
Sebastian Wolf authored Mar 20, 2024
2 parents 6d2dd51 + 787f724 commit 6eb3ff0
Show file tree
Hide file tree
Showing 20 changed files with 231 additions and 68 deletions.
22 changes: 22 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,28 @@
Changelog
+++++++++

3.0.2 - 03/20/2024
==================
**Updates**

- Added a filter to log output to remove tokens from the WSGI Server's log output. (Blake Bahner)
- Added busy_time to the disk/physical endpoint on posix systems to provide the percentage of time the disk is busy. (Blake Bahner)
- Updated the bundled Python version to 3.11.8 and OpenSSL version to 3.0.13 to resolve CVEs. (Blake Bahner)
- Updated the bundled zLib version and link so the build won't break when zLib is updated. (Blake Bahner)

**Bug Fixes**

- Fixed an issue where plugins with unrecognized file extensions would not be executed. (Blake Bahner)
- Fixed an issue where NCPA would fail to restart after rebooting the host server (Sebastian Wolf, Blake Bahner)
- Fixed an issue where NCPA would crash if the passive log file was not present. (Ivan-Roger)
- Fixed an issue where plugins would fail to execute if the user's group had permission, but the user did not. (graham-collinson)
- Fixed an issue where NCPA would crash if ssl_ciphers was set for the listener. (Ivan-Roger)
- Fixed a documentation issue where the pid file name was not updated to reflect the NCPA 3 changes. (Blake Bahner)
- Fixed an issue where NCPA would crash if a plugin had no output. (Blake Bahner)
- Fixed an issue where Windows logs with a different date format would fail to parse. (gittethis)
- Fixed an issue where certain RHEL systems would fail to start NCPA on reboot. (Blake Bahner)
- Fixed an issue where Mac builds would fail due to a change in a dependency library. (Blake Bahner)

3.0.1 - 12/13/2023
==================
**Updates**
Expand Down
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ Downloads
Current versions:

+---------+-------------+-------------------------------------------------------+
| Current | **3.0.1** | `Downloads <https://www.nagios.org/ncpa/#downloads>`_ |
| Current | **3.0.2** | `Downloads <https://www.nagios.org/ncpa/#downloads>`_ |
+---------+-------------+-------------------------------------------------------+

Note: If your nagios repo is outdated when installing NCPA, you must first update your GPG key using the following:
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.0.1
3.0.2
7 changes: 4 additions & 3 deletions agent/listener/pluginnodes.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
import time
import logging
from configparser import ConfigParser
from configparser import NoOptionError
import subprocess
import shlex
import re
Expand Down Expand Up @@ -66,9 +66,10 @@ def get_plugin_instructions(self, config):
if extension.strip() == "":
return "$plugin_name $plugin_args"
return config.get("plugin directives", extension)
except ConfigParser.NoOptionError:
except NoOptionError:
return "$plugin_name $plugin_args"
else:
except Exception as e:
logging.error("Error processing plugin instructions: %r\nAttempting to run: %r", e, self.name)
return "$plugin_name $plugin_args"

def kill_proc(self, p, t):
Expand Down
20 changes: 19 additions & 1 deletion agent/listener/psapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,23 @@ def make_disk_nodes(disk_name):
"write_bytes",
method=lambda: (ps.disk_io_counters(perdisk=True)[disk_name].write_bytes, "B"),
)
if __SYSTEM__ == "posix" and platform.system() != "Darwin":
busy_time = RunnableNode(
"busy_time",
method=lambda: (ps.disk_io_counters(perdisk=True)[disk_name].busy_time, "ms"),
)
return ParentNode(
disk_name,
children=[
read_time,
read_bytes,
write_count,
write_time,
write_bytes,
read_count,
busy_time,
],
)
return ParentNode(
disk_name,
children=[
Expand Down Expand Up @@ -84,6 +101,7 @@ def make_mountpoint_nodes(partition_name):
safe_mountpoint = re.sub(r"[\\/]+", "|", mountpoint)


node_children = []
# Unix specific inode counter ~ sorry Windows! :'(
if __SYSTEM__ != "nt":
try:
Expand Down Expand Up @@ -114,7 +132,7 @@ def make_mountpoint_nodes(partition_name):
total,
maxpath,
opts,
inodes_used_percent
inodes_used_percent,
]
except OSError as ex:
# Log this error as debug only, normally means could not count inodes because
Expand Down
12 changes: 10 additions & 2 deletions agent/listener/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
__STARTED__ = datetime.datetime.now()
__INTERNAL__ = False


# The following if statement is a workaround that is allowing us to run this
# in debug mode, rather than a hard coded location.

Expand Down Expand Up @@ -177,7 +178,6 @@ def secure_compare(item1, item2):
# Authentication Wrappers
# ------------------------------


@listener.before_request
def before_request():
# allowed is set to False by default
Expand All @@ -187,7 +187,15 @@ def before_request():

# For logging some debug info for actual page requests
if isinstance(request.view_args, dict) and ('filename' not in request.view_args):
logging.info("before_request() - request.url: %s", request.url)
logurl = request.url
parts = logurl.split('token=')
new_parts = [parts[0]]
for part in parts[1:]:
sub_parts = part.split('&', 1)
sub_parts[0] = '********'
new_parts.append('&'.join(sub_parts))
logurl = 'token='.join(new_parts)
logging.info("before_request() - request.url: %s", logurl)
logging.debug(" before_request() - request.path: %s", request.path)
logging.debug(" before_request() - request.url_rule: %s", request.url_rule)
logging.debug(" before_request() - request.view_args: %s", request.view_args)
Expand Down
4 changes: 2 additions & 2 deletions agent/listener/static/help/configuration.html
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,8 @@ <h3>[general]</h3>
<tr>
<td><i class="fa fa-asterisk"></i></td>
<th>pidfile</th>
<td>var/run/ncpa_listener.pid</td>
<td>The name and location of where to place the <em>NCPA Listener</em> PID file. <em>Linux and Mac OS X only.</em></td>
<td>var/run/ncpa.pid</td>
<td>The name and location of where to place the <em>NCPA</em> PID file. <em>Linux and Mac OS X only.</em></td>
</tr>
<tr>
<td></td>
Expand Down
48 changes: 40 additions & 8 deletions agent/listener/windowslogs.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# !/usr/bin/env python
#!/usr/bin/env python

"""A plugin checking event logs on Windows is expected to take the following inputs:
Expand Down Expand Up @@ -62,8 +62,10 @@
class WindowsLogsNode(listener.nodes.LazyNode):

global stdLogs
global date_format1, date_format2
stdLogs = ['Application','System','Security','Setup','Forwarded Events']

date_format1 = '%Y-%m-%d %H:%M:%S.%f'
date_format2 = '%Y-%m-%d %H:%M:%S'
def walk(self, *args, **kwargs):
logtypes = get_logtypes(kwargs)
filters = get_filter_dict(kwargs)
Expand Down Expand Up @@ -317,7 +319,15 @@ def get_datetime_from_date_input(date_input):
logging.error('Date input was invalid, Given: %r, %r', date_input, exc)
t_delta = datetime.timedelta(days=1)
return t_delta
def check_date_format(date_string, date_format):
#formats_to_check = ["%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"]
try:
datetime.datetime.strptime(date_string, date_format)
return True
except ValueError:
pass

return False

def datetime_from_event_date(evt_date):
"""
Expand All @@ -328,7 +338,11 @@ def datetime_from_event_date(evt_date):
doesn't take care of this, but alas, here we are.
"""
date_string = str(evt_date)
time_generated = datetime.datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S')

if check_date_format(date_string,date_format1):
time_generated = datetime.datetime.strptime(date_string, date_format1)
else:
time_generated = datetime.datetime.strptime(date_string, date_format2)
return time_generated


Expand Down Expand Up @@ -507,8 +521,16 @@ def normalize_xml_event(row, name):
safe_log['severity'] = str(EVENT_TYPE_NEW.get(int(row['EventType']), 'UNKNOWN'))
safe_log['application'] = row['SourceName']
safe_log['utc_time_generated'] = row['TimeCreated SystemTime']

rDate=datetime.datetime.strptime(str(row['TimeCreated SystemTime']),'%m/%d/%y %H:%M:%S')
s1 = str(row['TimeCreated SystemTime'])
date_part, offset_part = s1.rsplit('+', 1)
rDate=datetime.datetime.strptime(str(date_part),date_format1)

hours_offset = int(offset_part[:2])
minutes_offset = int(offset_part[3:])
timezone_offset = datetime.timedelta(hours=hours_offset, minutes=minutes_offset)
if offset_part[0] == '-':
timezone_offset = -timezone_offset
rDate -=timezone_offset
timeDiffSec=(datetime.datetime.utcnow() - datetime.datetime.now()).total_seconds()
timeLocal = str(rDate+datetime.timedelta(seconds=-timeDiffSec))
safe_log['time_generated'] = timeLocal
Expand Down Expand Up @@ -569,9 +591,19 @@ def get_event_logs(server, name, filters):
]
if time_created_variant == win32evtlog.EvtVarTypeNull:
raise StopIteration
temp_date=datetime.datetime.strptime(str(time_created_value),'%m/%d/%y %H:%M:%S')
time_from_event=temp_date.strftime('%m/%d/%y %H:%M:%S')
time_generated = datetime_from_event_date(time_from_event)
s1 = str(time_created_value)
date_part, offset_part = s1.rsplit('+', 1)
temp_date=datetime.datetime.strptime(str(date_part),date_format1)

# Parse and adjust the timezone offset
hours_offset = int(offset_part[:2])
minutes_offset = int(offset_part[3:])
timezone_offset = datetime.timedelta(hours=hours_offset, minutes=minutes_offset)
if offset_part[0] == '-':
timezone_offset = -timezone_offset
temp_date -=timezone_offset
time_from_event=temp_date.strftime(date_format1)
time_generated = datetime_from_event_date(time_from_event)
if time_generated < logged_after:
raise StopIteration
else:
Expand Down
Loading

0 comments on commit 6eb3ff0

Please sign in to comment.