Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release 3.33.0 #2640

Merged
merged 10 commits into from
Nov 14, 2024
Merged
12 changes: 10 additions & 2 deletions .github/workflows/ci-lite.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ jobs:
type=ref,event=tag
- name: matrix
id: matrix
uses: splunk/addonfactory-test-matrix-action@v2.1.8
uses: splunk/addonfactory-test-matrix-action@v2.1.9

security-fossa-scan:
continue-on-error: true
Expand Down Expand Up @@ -161,14 +161,22 @@ jobs:
- meta
- build_action
steps:
# To use .trivyignore file, you must check out the repository
- name: Checkout
uses: actions/checkout@v4
with:
submodules: false
persist-credentials: false
- name: Run docker vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
image-ref: ${{ needs.meta.outputs.container_base }}
format: 'table'
exit-code: '1'
severity: 'CRITICAL,HIGH,MEDIUM,LOW'

trivyignores: '.trivyignore'
scanners: "vuln"

test-container:
runs-on: ubuntu-latest
needs:
Expand Down
12 changes: 10 additions & 2 deletions .github/workflows/ci-main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ jobs:
type=ref,event=tag
- name: matrix
id: matrix
uses: splunk/addonfactory-test-matrix-action@v2.1.8
uses: splunk/addonfactory-test-matrix-action@v2.1.9

security-fossa-scan:
continue-on-error: true
Expand Down Expand Up @@ -161,14 +161,22 @@ jobs:
- meta
- build_action
steps:
# To use .trivyignore file, you must check out the repository
- name: Checkout
uses: actions/checkout@v4
with:
submodules: false
persist-credentials: false
- name: Run docker vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
image-ref: ${{ needs.meta.outputs.container_base }}
format: 'table'
exit-code: '1'
severity: 'CRITICAL,HIGH,MEDIUM,LOW'

trivyignores: '.trivyignore'
scanners: "vuln"

test-container:
runs-on: ubuntu-latest
needs:
Expand Down
2 changes: 2 additions & 0 deletions .trivyignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# This has been safeguarded directly in the code
CVE-2024-35515
21 changes: 12 additions & 9 deletions dashboard/dashboard.xml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
</search>
<search id="baseEventsSearch">
<query>
index=* sc4s_container=$sc4s_instance$
| tstats count where index=* sc4s_container=$sc4s_instance$ by index _time
</query>
<earliest>$time_range.earliest$</earliest>
<latest>$time_range.latest$</latest>
Expand All @@ -43,8 +43,8 @@
<input type="time" token="time_range">
<label>Timer</label>
<default>
<earliest>rt-15m</earliest>
<latest>rt</latest>
<earliest>-15m</earliest>
<latest>now</latest>
</default>
</input>
<html>
Expand Down Expand Up @@ -220,6 +220,7 @@
<option name="numberPrecision">0</option>
<option name="rangeColors">["0x53a051", "0x0877a6", "0xf8be34", "0xf1813f", "0xdc4e41"]</option>
<option name="rangeValues">[0,30,70,100]</option>
<option name="refresh.display">progressbar</option>
<option name="showSparkline">1</option>
<option name="showTrendIndicator">1</option>
<option name="trellis.enabled">0</option>
Expand Down Expand Up @@ -310,14 +311,15 @@
<single>
<title>Total volume of actual syslog traffic delivered by this SC4S instance to Splunk</title>
<search base="baseEventsSearch">
<query>| stats count</query>
<query>| stats sum(count)</query>
</search>
<option name="colorBy">value</option>
<option name="colorMode">none</option>
<option name="drilldown">none</option>
<option name="numberPrecision">0</option>
<option name="rangeColors">["0x53a051", "0x0877a6", "0xf8be34", "0xf1813f", "0xdc4e41"]</option>
<option name="rangeValues">[0,30,70,100]</option>
<option name="refresh.display">progressbar</option>
<option name="showSparkline">1</option>
<option name="showTrendIndicator">1</option>
<option name="trellis.enabled">0</option>
Expand All @@ -336,7 +338,7 @@
<title>Distributions of events by index</title>
<chart>
<search base="baseEventsSearch">
<query>| stats count by index</query>
<query>| stats sum(count) as count by index</query>
</search>
<option name="charting.axisLabelsX.majorLabelStyle.overflowMode">ellipsisNone</option>
<option name="charting.axisLabelsX.majorLabelStyle.rotation">0</option>
Expand Down Expand Up @@ -366,6 +368,7 @@
<option name="charting.legend.mode">standard</option>
<option name="charting.legend.placement">right</option>
<option name="charting.lineWidth">2</option>
<option name="refresh.display">progressbar</option>
<option name="trellis.enabled">0</option>
<option name="trellis.scales.shared">1</option>
<option name="trellis.size">medium</option>
Expand All @@ -375,7 +378,7 @@
<title>Trends of events by index</title>
<table>
<search base="baseEventsSearch">
<query>| chart sparkline(count) AS "Indexes Trend" count AS Total BY index</query>
<query>| stats sparkline(sum(count)) as "Indexes Trend" sum(count) as Total by index</query>
</search>
<option name="count">20</option>
<option name="dataOverlayMode">none</option>
Expand All @@ -393,7 +396,7 @@
<chart>
<search>
<query>
index=* sc4s_container=$sc4s_instance$ | eval tags=split(sc4s_tags,"|") | mvexpand tags | search tags=".app.*" | timechart count by tags
| tstats count where index=* sc4s_container=$sc4s_instance$ by sc4s_tags _time | eval tags=split(sc4s_tags,"|") | mvexpand tags | search tags=".app.*" | timechart sum(count) by tags
</query>
<earliest>$time_range.earliest$</earliest>
<latest>$time_range.latest$</latest>
Expand Down Expand Up @@ -439,7 +442,7 @@
<table>
<search>
<query>
index=* sc4s_container=$sc4s_instance$ | eval tags=split(sc4s_tags,"|") | mvexpand tags | chart count by tags
| tstats count where index=* sc4s_container=$sc4s_instance$ by sc4s_tags _time | eval tags=split(sc4s_tags,"|") | mvexpand tags | stats sum(count) as eventCount by tags | sort - eventCount
</query>
<earliest>$time_range.earliest$</earliest>
<latest>$time_range.latest$</latest>
Expand All @@ -449,4 +452,4 @@
</table>
</panel>
</row>
</form>
</form>
1 change: 0 additions & 1 deletion docs/sources/vendor/Cisco/cisco_asa.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
| Ref | Link |
|----------------|---------------------------------------------------------------------------------------------------------|
| Splunk Add-on for ASA (No long supports FWSM and PIX) | <https://splunkbase.splunk.com/app/1620/> |
| Cisco eStreamer for Splunk | <https://splunkbase.splunk.com/app/1629/> |
| Product Manual | <https://www.cisco.com/c/en/us/support/docs/security/pix-500-series-security-appliances/63884-config-asa-00.html> |

## Sourcetypes
Expand Down
2 changes: 1 addition & 1 deletion docs/sources/vendor/Dell/avamar.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@

| key | sourcetype | index | notes |
|----------------|----------------|----------------|----------------|
| dell_avamar_cms| dell:avamar:msc| netops | none |
| dell_avamar_msc| dell:avamar:msc| netops | none |
2 changes: 1 addition & 1 deletion package/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ RUN apk add -U --upgrade --no-cache \
less \
net-tools \
netcat-openbsd \
openssl \
"openssl>=3.3.2-r1" \
procps \
py3-pip \
python3 \
Expand Down
2 changes: 1 addition & 1 deletion package/Dockerfile.lite
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ RUN apk add -U --upgrade --no-cache \
less \
net-tools \
netcat-openbsd \
openssl \
"openssl>=3.3.2-r1" \
procps \
py3-pip \
python3 \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ block parser app-postfilter-cisco_ise() {
application app-postfilter-cisco_ise[sc4s-finalfilter] {
filter {
program('CISE_' type(string) flags(prefix))
and "${.values.num}" != 1;
and "${.values.num}" != 1
and not program('CISE_Alarm');
};
parser { app-postfilter-cisco_ise(); };
};
29 changes: 25 additions & 4 deletions package/etc/conf.d/conflib/syslog/app-syslog-cisco_ise.conf
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,27 @@ parser ise_event_time {
block parser app-syslog-cisco_ise() {

channel {

if {
parser {
regexp-parser(
template("${MESSAGE}")
patterns("^(?<real_day>\\d{2}) (?<real_hour>\\d{2}:\\d{2}:\\d{2}) (?<real_host>[^ ]+) (?<real_program>[^ ]+) (?<rest_of_message>.*)")
prefix(".parsed.")
);

date-parser-nofilter(
format('%b %d %H:%M:%S')
template("${PROGRAM} ${.parsed.real_day} ${.parsed.real_hour}")
);
};
rewrite {
set("${.parsed.real_host}" value("HOST"));
set("${.parsed.real_program}" value("PROGRAM"));
set("${.parsed.rest_of_message}" value("MESSAGE"));
};
};

parser {
csv-parser(
columns(serial, num, seq, message)
Expand All @@ -44,13 +65,13 @@ block parser app-syslog-cisco_ise() {
product('ise')
);
};


};
};
};

application app-syslog-cisco_ise[sc4s-syslog-pgm] {
filter {
program('CISE_' type(string) flags(prefix));
program('CISE_' type(string) flags(prefix))
or message('CISE_' type(string) flags(substring));
};
parser { app-syslog-cisco_ise(); };
};
14 changes: 9 additions & 5 deletions package/etc/pylib/parser_source_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import traceback
import socket
import struct
from sqlitedict import SqliteDict

import time

Expand All @@ -17,7 +16,6 @@ class LogParser:
class LogDestination:
pass


def ip2int(addr):
ip4_to_int = lambda addr: struct.unpack("!I", socket.inet_aton(addr))[0]

Expand Down Expand Up @@ -53,8 +51,10 @@ def int_to_ip6(num):

class psc_parse(LogParser):
def init(self, options):
from sqlite_utils import RestrictedSqliteDict

self.logger = syslogng.Logger()
self.db = SqliteDict(f"{hostdict}.sqlite")
self.db = RestrictedSqliteDict(f"{hostdict}.sqlite")
return True

def deinit(self):
Expand All @@ -80,9 +80,11 @@ def parse(self, log_message):

class psc_dest(LogDestination):
def init(self, options):
from sqlite_utils import RestrictedSqliteDict

self.logger = syslogng.Logger()
try:
self.db = SqliteDict(f"{hostdict}.sqlite", autocommit=True)
self.db = RestrictedSqliteDict(f"{hostdict}.sqlite", autocommit=True)
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
Expand Down Expand Up @@ -123,7 +125,9 @@ def flush(self):


if __name__ == "__main__":
db = SqliteDict(f"{hostdict}.sqlite", autocommit=True)
from sqlite_utils import RestrictedSqliteDict

db = RestrictedSqliteDict(f"{hostdict}.sqlite", autocommit=True)
db[0] = "seed"
db.commit()
db.close()
10 changes: 6 additions & 4 deletions package/etc/pylib/parser_vps_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import traceback
import socket
import struct
from sqlitedict import SqliteDict

import time

Expand All @@ -17,14 +16,15 @@ class LogParser:
class LogDestination:
pass


hostdict = str("/var/lib/syslog-ng/vps")


class vpsc_parse(LogParser):
def init(self, options):
from sqlite_utils import RestrictedSqliteDict

self.logger = syslogng.Logger()
self.db = SqliteDict(f"{hostdict}.sqlite")
self.db = RestrictedSqliteDict(f"{hostdict}.sqlite")
return True

def deinit(self):
Expand All @@ -50,9 +50,11 @@ def parse(self, log_message):

class vpsc_dest(LogDestination):
def init(self, options):
from sqlite_utils import RestrictedSqliteDict

self.logger = syslogng.Logger()
try:
self.db = SqliteDict(f"{hostdict}.sqlite", autocommit=True)
self.db = RestrictedSqliteDict(f"{hostdict}.sqlite", autocommit=True)
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
Expand Down
28 changes: 28 additions & 0 deletions package/etc/pylib/sqlite_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import io
import pickle
from base64 import b64decode
from sqlitedict import SqliteDict


class RestrictedUnpickler(pickle.Unpickler):
def find_class(self, module, name):
"""Override pickle.Unpickler.find_class() to prevent deserialization of class instances."""
raise pickle.UnpicklingError("Class deserialization is disabled")


def restricted_loads(s):
"""Helper function analogous to pickle.loads()."""
return RestrictedUnpickler(io.BytesIO(s)).load()

def restricted_decode(obj):
"""Overwrite sqlitedict.decode() to prevent code injection."""
return restricted_loads(bytes(obj))

def restricted_decode_key(key):
"""Overwrite sqlitedict.decode_key() to prevent code injection."""
return restricted_loads(b64decode(key.encode("ascii")))


class RestrictedSqliteDict(SqliteDict):
def __init__(self, *args, **kwargs):
super(RestrictedSqliteDict, self).__init__(*args, decode=restricted_decode, decode_key=restricted_decode_key, **kwargs)
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ block parser app-postfilter-cisco_ise() {
application app-postfilter-cisco_ise[sc4s-finalfilter] {
filter {
program('CISE_' type(string) flags(prefix))
and "${.values.num}" != 1;
and "${.values.num}" != 1
and not program('CISE_Alarm');
};
parser { app-postfilter-cisco_ise(); };
};
Loading
Loading