chore: Conservative dependency updates for Jinja2 security fix (#14792)

* chore: Conservative dependency updates for security

- Update Ansible from 9.1.0 to 9.2.0 (one minor version bump only)
- Update Jinja2 to ~3.1.6 to fix CVE-2025-27516 (critical security fix)
- Pin netaddr to 1.3.0 (current stable version)

This is a minimal, conservative update focused on:
1. Critical security fix for Jinja2
2. Minor ansible update for bug fixes
3. Pinning netaddr to prevent surprises

No changes to Ansible collections - keeping them unpinned for now.

* fix: Address linter issues (ruff, yamllint, shellcheck)

- Fixed ruff configuration by moving linter settings to [tool.ruff.lint] section
- Fixed ruff code issues:
  - Moved imports to top of files (E402)
  - Removed unused variables or commented them out
  - Updated string formatting from % to .format()
  - Replaced dict() calls with literals
  - Fixed assert False usage in tests
- Fixed yamllint issues:
  - Added missing newlines at end of files
  - Removed trailing spaces
  - Added document start markers (---) to YAML files
  - Fixed 'on:' truthy warnings in GitHub workflows
- Fixed shellcheck issues:
  - Properly quoted variables in shell scripts
  - Fixed A && B || C pattern with proper if/then/else
  - Improved FreeBSD rc script quoting

All linters now pass without errors related to our code changes.

* fix: Additional yamllint fixes for GitHub workflows

- Added document start markers (---) to test-effectiveness.yml
- Fixed 'on:' truthy warning by quoting as 'on:'
- Removed trailing spaces from main.yml
- Added missing newline at end of test-effectiveness.yml
This commit is contained in:
Dan Guido 2025-08-03 07:45:26 -04:00 committed by GitHub
parent 49aa9c49a4
commit be744b16a2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
21 changed files with 266 additions and 266 deletions

1
.github/FUNDING.yml vendored
View file

@ -1,3 +1,4 @@
---
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]

View file

@ -1,3 +1,4 @@
---
version: 2
updates:
# Maintain dependencies for GitHub Actions

View file

@ -1,6 +1,7 @@
---
name: Integration Tests
on:
'on':
pull_request:
types: [opened, synchronize, reopened]
paths:

View file

@ -1,6 +1,7 @@
---
name: Lint
on: [push, pull_request]
'on': [push, pull_request]
permissions:
contents: read

View file

@ -1,6 +1,7 @@
---
name: Smart Test Selection
on:
'on':
pull_request:
types: [opened, synchronize, reopened]

View file

@ -1,6 +1,7 @@
---
name: Test Effectiveness Tracking
on:
'on':
schedule:
- cron: '0 0 * * 0' # Weekly on Sunday
workflow_dispatch: # Allow manual runs

View file

@ -11,9 +11,9 @@ usage() {
retcode="${1:-0}"
echo "To run algo from Docker:"
echo ""
echo "docker run --cap-drop=all -it -v <path to configurations>:"${DATA_DIR}" ghcr.io/trailofbits/algo:latest"
echo "docker run --cap-drop=all -it -v <path to configurations>:${DATA_DIR} ghcr.io/trailofbits/algo:latest"
echo ""
exit ${retcode}
exit "${retcode}"
}
if [ ! -f "${DATA_DIR}"/config.cfg ] ; then
@ -25,7 +25,7 @@ fi
if [ ! -e /dev/console ] ; then
echo "Looks like you're trying to run this container without a TTY."
echo "If you don't pass `-t`, you can't interact with the algo script."
echo "If you don't pass -t, you can't interact with the algo script."
echo ""
usage -1
fi
@ -41,4 +41,4 @@ test -d "${DATA_DIR}"/configs && rsync -qLktr --delete "${DATA_DIR}"/configs "${
retcode=${?}
rsync -qLktr --delete "${ALGO_DIR}"/configs "${DATA_DIR}"/
exit ${retcode}
exit "${retcode}"

View file

@ -138,9 +138,8 @@ def wait_action(module, rest, ip, action_id, timeout=10):
end_time = time.time() + 10
while time.time() < end_time:
response = rest.get(f'floating_ips/{ip}/actions/{action_id}')
status_code = response.status_code
# status_code = response.status_code # TODO: check status_code == 200?
status = response.json['action']['status']
# TODO: check status_code == 200?
if status == 'completed':
return True
elif status == 'errored':
@ -150,7 +149,7 @@ def wait_action(module, rest, ip, action_id, timeout=10):
def core(module):
api_token = module.params['oauth_token']
# api_token = module.params['oauth_token'] # unused for now
state = module.params['state']
ip = module.params['ip']
droplet_id = module.params['droplet_id']
@ -185,7 +184,7 @@ def get_floating_ip_details(module, rest):
if status_code == 200:
return json_data['floating_ip']
else:
module.fail_json(msg="Error assigning floating ip [{0}: {1}]".format(
module.fail_json(msg="Error assigning floating ip [{}: {}]".format(
status_code, json_data["message"]), region=module.params['region'])
@ -205,7 +204,7 @@ def assign_floating_id_to_droplet(module, rest):
module.exit_json(changed=True, data=json_data)
else:
module.fail_json(msg="Error creating floating ip [{0}: {1}]".format(
module.fail_json(msg="Error creating floating ip [{}: {}]".format(
status_code, json_data["message"]), region=module.params['region'])
@ -247,26 +246,26 @@ def create_floating_ips(module, rest):
if status_code == 202:
module.exit_json(changed=True, data=json_data)
else:
module.fail_json(msg="Error creating floating ip [{0}: {1}]".format(
module.fail_json(msg="Error creating floating ip [{}: {}]".format(
status_code, json_data["message"]), region=module.params['region'])
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(choices=['present', 'absent'], default='present'),
ip=dict(aliases=['id'], required=False),
region=dict(required=False),
droplet_id=dict(required=False, type='int'),
oauth_token=dict(
no_log=True,
argument_spec={
'state': {'choices': ['present', 'absent'], 'default': 'present'},
'ip': {'aliases': ['id'], 'required': False},
'region': {'required': False},
'droplet_id': {'required': False, 'type': 'int'},
'oauth_token': {
'no_log': True,
# Support environment variable for DigitalOcean OAuth Token
fallback=(env_fallback, ['DO_API_TOKEN', 'DO_API_KEY', 'DO_OAUTH_TOKEN']),
required=True,
),
validate_certs=dict(type='bool', default=True),
timeout=dict(type='int', default=30),
),
'fallback': (env_fallback, ['DO_API_TOKEN', 'DO_API_KEY', 'DO_OAUTH_TOKEN']),
'required': True,
},
'validate_certs': {'type': 'bool', 'default': True},
'timeout': {'type': 'int', 'default': 30},
},
required_if=[
('state', 'delete', ['ip'])
],

View file

@ -2,26 +2,23 @@
import json
from ansible.module_utils.gcp_utils import GcpModule, GcpSession, navigate_hash
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
################################################################################
# Imports
################################################################################
import json
from ansible.module_utils.gcp_utils import GcpModule, GcpSession, navigate_hash
################################################################################
# Main
################################################################################
def main():
module = GcpModule(argument_spec=dict(filters=dict(type='list', elements='str'), scope=dict(required=True, type='str')))
module = GcpModule(argument_spec={'filters': {'type': 'list', 'elements': 'str'}, 'scope': {'required': True, 'type': 'str'}})
if module._name == 'gcp_compute_image_facts':
module.deprecate("The 'gcp_compute_image_facts' module has been renamed to 'gcp_compute_regions_info'", version='2.13')
@ -59,7 +56,7 @@ def query_options(filters):
for f in filters:
# For multiple queries, all queries should have ()
if f[0] != '(' and f[-1] != ')':
queries.append("(%s)" % ''.join(f))
queries.append("({})".format(''.join(f)))
else:
queries.append(f)
@ -79,7 +76,7 @@ def return_if_object(module, response):
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
module.fail_json(msg="Invalid JSON response with error: {}".format(inst))
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))

View file

@ -2,6 +2,8 @@
# Ruff configuration
target-version = "py310"
line-length = 120
[tool.ruff.lint]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings

View file

@ -1,3 +1,3 @@
ansible==9.1.0
jinja2~=3.1.3
netaddr
ansible==9.2.0
jinja2~=3.1.6
netaddr==1.3.0

View file

@ -17,24 +17,28 @@ status_cmd=wg_status
pidfile="/var/run/$name.pid"
load_rc_config "$name"
: ${wg_enable="NO"}
: ${wg_interface="wg0"}
: "${wg_enable=NO}"
: "${wg_interface=wg0}"
wg_up() {
echo "Starting WireGuard..."
/usr/sbin/daemon -cS -p ${pidfile} ${command} up ${wg_interface}
/usr/sbin/daemon -cS -p "${pidfile}" "${command}" up "${wg_interface}"
}
wg_down() {
echo "Stopping WireGuard..."
${command} down ${wg_interface}
"${command}" down "${wg_interface}"
}
wg_status () {
not_running () {
echo "WireGuard is not running on $wg_interface" && exit 1
}
/usr/local/bin/wg show wg0 && echo "WireGuard is running on $wg_interface" || not_running
if /usr/local/bin/wg show wg0; then
echo "WireGuard is running on $wg_interface"
else
not_running
fi
}
run_rc_command "$1"

View file

@ -5,9 +5,8 @@ This helps identify which tests actually catch bugs vs just failing randomly
"""
import json
import subprocess
import sys
from datetime import datetime, timedelta
from collections import defaultdict
from datetime import datetime, timedelta
from pathlib import Path

View file

@ -1,8 +1,9 @@
"""Test fixtures for Algo unit tests"""
import os
import yaml
from pathlib import Path
import yaml
def load_test_variables():
"""Load test variables from YAML fixture"""

View file

@ -4,11 +4,8 @@ Simplified Docker-based localhost deployment tests
Verifies services can start and config files exist in expected locations
"""
import os
import sys
import subprocess
import time
import tempfile
from pathlib import Path
import sys
def check_docker_available():
@ -95,7 +92,7 @@ def test_docker_algo_image():
return False
# Read Dockerfile and validate basic structure
with open('Dockerfile', 'r') as f:
with open('Dockerfile') as f:
dockerfile_content = f.read()
required_elements = [

View file

@ -3,12 +3,9 @@
Test that generated configuration files have valid syntax
This validates WireGuard, StrongSwan, SSH, and other configs
"""
import os
import re
import subprocess
import sys
import tempfile
from pathlib import Path
def check_command_available(cmd):
@ -88,7 +85,7 @@ PersistentKeepalive = 25
errors.append(f"Invalid Endpoint format: {endpoint}")
if errors:
print(f"✗ WireGuard config validation failed:")
print("✗ WireGuard config validation failed:")
for error in errors:
print(f" - {error}")
assert False, "WireGuard config validation failed"
@ -165,7 +162,7 @@ conn ikev2-pubkey
errors.append(f"Invalid subnet format: {subnet}")
if errors:
print(f"✗ StrongSwan ipsec.conf validation failed:")
print("✗ StrongSwan ipsec.conf validation failed:")
for error in errors:
print(f" - {error}")
assert False, "ipsec.conf validation failed"
@ -223,7 +220,7 @@ def test_ssh_config_syntax():
errors.append("No Host definition found")
if errors:
print(f"✗ SSH config validation failed:")
print("✗ SSH config validation failed:")
for error in errors:
print(f" - {error}")
assert False, "SSH config validation failed"
@ -291,7 +288,7 @@ COMMIT
errors.append("Missing stateful connection tracking rule")
if errors:
print(f"✗ iptables rules validation failed:")
print("✗ iptables rules validation failed:")
for error in errors:
print(f" - {error}")
assert False, "iptables rules validation failed"
@ -358,7 +355,7 @@ addn-hosts=/var/lib/algo/dns/adblock.hosts
errors.append(f"Missing required option: {req}")
if errors:
print(f"✗ dnsmasq config validation failed:")
print("✗ dnsmasq config validation failed:")
for error in errors:
print(f" - {error}")
assert False, "dnsmasq config validation failed"

View file

@ -5,11 +5,9 @@ This catches undefined variables, syntax errors, and logic bugs
"""
import os
import sys
import tempfile
from pathlib import Path
import yaml
from jinja2 import Environment, FileSystemLoader, StrictUndefined, UndefinedError, TemplateSyntaxError
from jinja2 import Environment, FileSystemLoader, StrictUndefined, TemplateSyntaxError, UndefinedError
# Add parent directory to path for fixtures
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@ -153,7 +151,7 @@ def test_critical_templates():
errors.append(f"{template_path}: Render error - {e}")
if errors:
print(f"✗ Critical template rendering failed:")
print("✗ Critical template rendering failed:")
for error in errors:
print(f" - {error}")
assert False, "Critical template rendering errors"