-
Notifications
You must be signed in to change notification settings - Fork 353
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #3114 from splunk/update_data_sources
Add data sources dependabot
- Loading branch information
Showing
3 changed files
with
118 additions
and
27 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
name: Splunk TA Update | ||
|
||
on: | ||
workflow_dispatch: # Manually trigger the workflow | ||
schedule: | ||
- cron: '55 06 * * *' # Runs daily at midnight | ||
|
||
jobs: | ||
data-source-validation-and-update: | ||
runs-on: ubuntu-latest | ||
|
||
steps: | ||
- name: Checkout repository | ||
uses: actions/checkout@v4 | ||
with: | ||
ref: 'develop' | ||
token: ${{ secrets.DATA_SOURCES_DEPENDABOT }} | ||
|
||
- name: Set up Python | ||
uses: actions/setup-python@v5 | ||
with: | ||
python-version: '3.11' | ||
architecture: 'x64' # or the version your script requires | ||
|
||
- name: Install Python Dependencies and ContentCTL and Atomic Red Team | ||
run: | | ||
pip install "contentctl>=4.0.0" | ||
- name: Run ContentCTL Data source TA validation | ||
id: validate | ||
run: | | ||
pwd | ||
contentctl validate --data-source-TA-validation 2>&1 | tee data_source_validation.log | ||
continue-on-error: true | ||
|
||
- name: Print Validation Log | ||
run: | | ||
cat data_source_validation.log | ||
rm -f =4.0.0 | ||
- name: Update Data Sources if Validation Fails | ||
run: | | ||
python .github/workflows/update_data_sources_ta.py | ||
git status | ||
- name: Create Pull Request | ||
uses: peter-evans/create-pull-request@v6 | ||
with: | ||
token: ${{ secrets.DATA_SOURCES_DEPENDABOT }} | ||
commit-message: Updated TAs | ||
branch: auto-ta-update-${{ github.run_number }} | ||
base: develop | ||
title: Automated Splunk TA Update ${{ github.run_number }} | ||
body: "This PR contains updates to Splunk TAs made by GitHub Actions workflow." | ||
paths: | | ||
security_content/data_sources/** |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
import os | ||
import yaml | ||
from collections import OrderedDict | ||
|
||
# Custom YAML loader to preserve the order of keys | ||
class OrderedLoader(yaml.SafeLoader): | ||
pass | ||
|
||
def construct_mapping(loader, node): | ||
loader.flatten_mapping(node) | ||
return OrderedDict(loader.construct_pairs(node)) | ||
|
||
OrderedLoader.add_constructor( | ||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, | ||
construct_mapping | ||
) | ||
|
||
# Custom YAML dumper to preserve the order of keys | ||
class OrderedDumper(yaml.SafeDumper): | ||
pass | ||
|
||
def dict_representer(dumper, data): | ||
return dumper.represent_dict(data.items()) | ||
|
||
OrderedDumper.add_representer(OrderedDict, dict_representer) | ||
|
||
# Define the paths | ||
log_file_path = 'data_source_validation.log' | ||
data_sources_dir = 'data_sources' | ||
|
||
# Read the log file to find version mismatches | ||
with open(log_file_path, 'r') as log_file: | ||
log_lines = log_file.readlines() | ||
|
||
# Parse the log file to find the TA name and the latest version | ||
for i, line in enumerate(log_lines): | ||
if 'Version mismatch' in line: | ||
ta_name = log_lines[i].split("'")[3].strip() | ||
latest_version = log_lines[i + 1].split(':')[1].strip() | ||
print(f"Found version mismatch for TA: {ta_name}, updating to version: {latest_version}") | ||
|
||
# Update the YAML files in the data sources directory | ||
for filename in os.listdir(data_sources_dir): | ||
if filename.endswith('.yml'): | ||
file_path = os.path.join(data_sources_dir, filename) | ||
with open(file_path, 'r') as yml_file: | ||
data = yaml.load(yml_file, Loader=OrderedLoader) | ||
|
||
# Check if the TA name matches and update the version | ||
updated = False | ||
for ta in data.get('supported_TA', []): | ||
if ta['name'] == ta_name: | ||
if ta['version'] != latest_version: | ||
ta['version'] = latest_version | ||
updated = True | ||
|
||
# Write the updated data back to the YAML file | ||
if updated: | ||
with open(file_path, 'w') as yml_file: | ||
yaml.dump(data, yml_file, Dumper=OrderedDumper) | ||
|
||
print("Version updates completed.") |