summaryrefslogtreecommitdiff
path: root/utils
diff options
context:
space:
mode:
Diffstat (limited to 'utils')
-rwxr-xr-xutils/generate_whitelist.py46
-rwxr-xr-xutils/magento1_whitelist.sh56
-rwxr-xr-xutils/magento2_whitelist.sh83
-rwxr-xr-xutils/mass_whitelist.py208
4 files changed, 393 insertions, 0 deletions
diff --git a/utils/generate_whitelist.py b/utils/generate_whitelist.py
new file mode 100755
index 0000000..dabaa21
--- /dev/null
+++ b/utils/generate_whitelist.py
@@ -0,0 +1,46 @@
1#!/usr/bin/env python
2#coding=UTF-8
3
4import fnmatch
5import hashlib
6import os
7import sys
8
9try:
10 import yara
11except ImportError:
12 print('Please install python-yara')
13 sys.exit(1)
14
15if len(sys.argv) != 3:
16 print('Usage: %s name_of_the_rule_and_version folder_to_scan' % sys.argv[0])
17 sys.exit(1)
18
19if not os.path.isdir(sys.argv[2]):
20 print('%s is not a folder !' % sys.argv[2])
21 sys.exit(1)
22
23try:
24 rules = yara.compile(sys.path[0]+'/../php.yar', includes=True, error_on_warning=False)
25except yara.SyntaxError as e:
26 print("Can't compile rules: %s" % e)
27 sys.exit(1)
28
29output_list = list()
30
31for curdir, dirnames, filenames in os.walk(sys.argv[2]):
32 for filename in filenames:
33 fname = os.path.join(curdir, filename)
34 if 0 < os.stat(fname).st_size < 5 * 1024 * 1024:
35 matches = rules.match(fname, fast=True)
36 if matches:
37 with open(fname, 'rb') as f:
38 digest = hashlib.sha1(f.read()).hexdigest()
39 output_list.append('hash.sha1(0, filesize) == "%s" or // %s' % (digest, fname))
40
41
42if output_list:
43 output_rule = 'import "hash"\n\nrule %s\n{\n\tcondition:\n\t\t/* %s */\n\t\t' % (sys.argv[1].split(' ')[0], sys.argv[1])
44 output_rule += '\n\t\t'.join(output_list)
45 output_rule += '\n\t\tfalse\n}'
46 print(output_rule)
diff --git a/utils/magento1_whitelist.sh b/utils/magento1_whitelist.sh
new file mode 100755
index 0000000..a747f80
--- /dev/null
+++ b/utils/magento1_whitelist.sh
@@ -0,0 +1,56 @@
1#!/bin/bash
2# Quit script if something goes wrong
3set -o errexit -o nounset -o pipefail;
4
5SCRIPTDIR="$( dirname "$(readlink -f "$0")" )";
6OUTFILE="${SCRIPTDIR}/../whitelists/magento1ce.yar";
7TMPFILE="${OUTFILE}.new";
8
9# First empty the target whitelist so we can completely generate a new one
10cat <<EOF >"${OUTFILE}";
11private rule Magento1Ce : ECommerce
12{
13 condition:
14 false
15}
16EOF
17
18# Create a temporary directory and make sure it is empty
19GENTEMPDIR="$( mktemp -d --suffix="_gen_whitelist_m1" )";
20
21# Add header to whitelist tempfile
22cat <<EOF | tee "${TMPFILE}";
23private rule Magento1Ce : ECommerce
24{
25 condition:
26EOF
27
28# Fetch tags (releases) from Github repo
29TAGS=$( git ls-remote --tags https://github.com/OpenMage/magento-mirror.git | cut -d '/' -f3 | grep -P '^[\d\.]+$' );
30
31# Foreach tag (release)
32while read -r TAG; do
33 # Download tarball of release
34 wget "https://github.com/OpenMage/magento-mirror/archive/${TAG}.tar.gz" -O "${GENTEMPDIR}/${TAG}.tgz";
35 # Unpack tarball
36 tar -C "${GENTEMPDIR}" -xpzf "${GENTEMPDIR}/${TAG}.tgz";
37 # Add version comment to whitelist tempfile
38 echo " /* Magento CE ${TAG} */" | tee -a "${TMPFILE}";
39 # Generate whitelist for version, add output to whitelist tempfile
40 ${SCRIPTDIR}/generate_whitelist.py "Magento CE ${TAG}" "${GENTEMPDIR}/magento-mirror-${TAG}" | grep 'hash.sha1' | sed "s|// ${GENTEMPDIR}/magento-mirror-${TAG}/|// |" | tee -a "${TMPFILE}";
41 # Add white line, with indent
42 echo " " | tee -a "${TMPFILE}";
43done <<< "${TAGS}";
44
45# Add footer to whitelist tempfile
46cat <<EOF | tee -a "${TMPFILE}";
47 false
48}
49EOF
50
51# Copy temporary file to target whitelist while removing duplicate lines except empty ones
52cat "${TMPFILE}" | awk 'match($0,/^\s*$/)||!seen[$0]++' > "${OUTFILE}";
53
54# Clean up
55rm "${TMPFILE}";
56rm -rf "${GENTEMPDIR}";
diff --git a/utils/magento2_whitelist.sh b/utils/magento2_whitelist.sh
new file mode 100755
index 0000000..bb742c8
--- /dev/null
+++ b/utils/magento2_whitelist.sh
@@ -0,0 +1,83 @@
1#!/bin/bash
2# Quit script if something goes wrong
3set -o errexit -o nounset -o pipefail;
4
5SCRIPTDIR="$( dirname "$(readlink -f "$0")" )";
6OUTFILE="${SCRIPTDIR}/../whitelists/magento2.yar";
7TMPFILE="${OUTFILE}.new";
8
9# First empty the target whitelist so we can completely generate a new one
10cat <<EOF >"${OUTFILE}";
11private rule Magento2 : ECommerce
12{
13 condition:
14 false
15}
16EOF
17
18# Create a temporary directory and make sure it is empty
19GENTEMPDIR="$( mktemp -d --suffix="_gen_whitelist_m2" )";
20
21# Composer access tokens
22if [ ! -f "${HOME}/.composer/auth.json" ]; then
23 echo -e "\nYou have no '.composer/auth.json' in your home dir. We will create it from a template and open an editor.";
24 echo -e "Press [Enter] to continue. Press Ctrl-C if you wish to leave.";
25 read;
26 mkdir -p "${HOME}/.composer";
27 cat <<EOF >"${HOME}/.composer/auth.json"
28{
29 "INFO_GITHUB": "==== GET TOKEN: https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/ ====",
30 "github-oauth": {
31 "github.com": "---github-token-goes-here---"
32 },
33 "INFO_MAGENTO": "==== GET TOKEN: https://devdocs.magento.com/guides/v2.0/install-gde/prereq/connect-auth.html ====",
34 "http-basic": {
35 "repo.magento.com": {
36 "username": "---public-key-goes-here---",
37 "password": "---private-key-goes-here---"
38 }
39 }
40}
41EOF
42 editor "${HOME}/.composer/auth.json";
43fi
44
45# Add header to whitelist tempfile
46cat <<EOF | tee "${TMPFILE}";
47private rule Magento2 : ECommerce
48{
49 condition:
50EOF
51
52# Fetch tags (releases) from Github repo
53TAGS=$( git ls-remote --tags https://github.com/magento/magento2.git | cut -d '/' -f3 | grep -P '^[\d\.]+$' | sort -V );
54
55# Foreach tag (release)
56while read -r TAG; do
57 # Download tarball of release
58 wget "https://github.com/magento/magento2/archive/${TAG}.tar.gz" -O "${GENTEMPDIR}/${TAG}.tgz";
59 # Unpack tarball
60 tar -C "${GENTEMPDIR}" -xpzf "${GENTEMPDIR}/${TAG}.tgz";
61 # Run 'composer install' inside unpacked release
62 SOURCEDIR="${GENTEMPDIR}/magento2-${TAG}";
63 composer --working-dir="${SOURCEDIR}" -- install;
64 # Add version comment to whitelist
65 echo " /* Magento2 ${TAG} */" | tee -a "${TMPFILE}";
66 # Generate whitelist for version, add output to whitelist tempfile
67 ${SCRIPTDIR}/generate_whitelist.py "Magento2 ${TAG}" "${SOURCEDIR}" | grep 'hash.sha1' | sed "s|// ${SOURCEDIR}/|// |" | tee -a "${TMPFILE}";
68 # Add white line, with indent
69 echo " " | tee -a "${TMPFILE}";
70done <<< "${TAGS}";
71
72# Add footer to whitelist tempfile
73cat <<EOF | tee -a "${TMPFILE}";
74 false
75}
76EOF
77
78# Copy temporary file to target whitelist while removing duplicate lines except empty ones
79cat "${TMPFILE}" | awk 'match($0,/^\s*$/)||!seen[$0]++' > "${OUTFILE}";
80
81# Clean up
82rm "${TMPFILE}";
83rm -rf "${GENTEMPDIR}";
diff --git a/utils/mass_whitelist.py b/utils/mass_whitelist.py
new file mode 100755
index 0000000..868f7b5
--- /dev/null
+++ b/utils/mass_whitelist.py
@@ -0,0 +1,208 @@
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3
4from __future__ import print_function
5
6import sys
7import tarfile
8from copy import copy
9from datetime import datetime
10from collections import OrderedDict
11from hashlib import sha1
12from urllib2 import urlopen, HTTPError
13from StringIO import StringIO
14
15import yara
16
17USAGE = """
18USAGE: %(prog)s <NAME> <URL_PATTERN> [<MAJOR> [<MINOR> [<PATCH>]]]
19
20Options:
21 NAME : name of the CMS/whatever being whitelisted
22 URL_PATTERN : download URL with __version__ as a version placeholder
23 MAJOR : minimum and maximum major version to crawl (eg: 1-8, 8)
24 MINOR : minimum and maximum minor version to crawl
25 PATCH : minimum and maximum patch version to crawl
26
27Examples:
28 %(prog)s drupal https://ftp.drupal.org/files/projects/drupal-__version__.tar.gz 9 50
29 %(prog)s drupal https://ftp.drupal.org/files/projects/drupal-__version__.tar.gz 4-9 1-50
30
31 %(prog)s wordpress https://wordpress.org/wordpress-__version__.tar.gz 4 15
32
33 %(prog)s symphony https://github.com/symfony/symfony/archive/v__version__.tar.gz 3 9
34
35 %(prog)s phpmyadmin https://files.phpmyadmin.net/phpMyAdmin/__version__/phpMyAdmin-__version__-all-languages.tar.gz 4 9
36""" % {'prog': sys.argv[0]}
37
38
39class Opts:
40 DEFAULT_MIN = 0
41 DEFAULT_MAX = 99
42 YARA_RULES = yara.compile(sys.path[0]+'/../php.yar', includes=True, error_on_warning=True)
43
44 @classmethod
45 def to_str(cls):
46 values = []
47 for attr in cls.__dict__:
48 if attr.isupper():
49 values.append('%s=%s' % (attr, getattr(cls, attr)))
50 return '<Opts(%s)>' % ' '.join(values)
51
52
53def eprint(*args, **kwargs):
54 print(*args, file=sys.stderr, **kwargs)
55
56
57def extract_version_arg(index):
58 min_ver, max_ver = (Opts.DEFAULT_MIN, Opts.DEFAULT_MAX)
59 if len(sys.argv) >= (index + 1):
60 if '-' in sys.argv[index]:
61 min_ver, max_ver = map(int, sys.argv[index].split('-'))
62 else:
63 max_ver = int(sys.argv[index])
64 return min_ver, max_ver
65
66
67def generate_whitelist(version):
68 rules = {}
69
70 # download archive
71 dl_failed = False
72 download_url = Opts.URL_PATTERN.replace('__version__', version)
73 download_url_str = Opts.URL_PATTERN.replace('__version__', '\x1b[1;33m%s\x1b[0m' % version)
74 eprint("[+] Downloading %s... " % download_url_str, end='')
75 sys.stdout.flush()
76 try:
77 resp = urlopen(download_url)
78 resp_code = resp.code
79 except HTTPError as err:
80 dl_failed = True
81 resp_code = err.code
82 if dl_failed or (resp_code != 200):
83 eprint("\x1b[1;31mFAILED (%d)\x1b[0m" % resp_code)
84 return None
85 data = StringIO(resp.read())
86 data.seek(0)
87 eprint("\x1b[1;32mOK\x1b[0m")
88
89 # extract archive and check against YARA signatures (in-memory)
90 eprint("[-] Generating whitelist... ", end='')
91 sys.stdout.flush()
92 tar = tarfile.open(mode='r:gz', fileobj=data)
93 for entry in tar.getnames():
94 entry_fd = tar.extractfile(entry)
95 if entry_fd is None:
96 continue
97 entry_data = entry_fd.read()
98 matches = Opts.YARA_RULES.match(data=entry_data, fast=True)
99 if matches:
100 rules['/'.join(entry.split('/')[1:])] = sha1(entry_data).hexdigest()
101 eprint("\x1b[1;32mDONE\x1b[0m")
102
103 return rules
104
105
106# init vars
107whitelists = OrderedDict()
108
109# check args
110if (len(sys.argv) < 3) or (len(sys.argv) > 6):
111 eprint(USAGE)
112 sys.exit(1)
113
114# parse args
115Opts.CMS_NAME = sys.argv[1]
116Opts.URL_PATTERN = sys.argv[2]
117Opts.MIN_MAJOR, Opts.MAX_MAJOR = extract_version_arg(3)
118Opts.MIN_MINOR, Opts.MAX_MINOR = extract_version_arg(4)
119Opts.MIN_PATCH, Opts.MAX_PATCH = extract_version_arg(5)
120
121# loop over possible versions
122for vmajor in range(Opts.MIN_MAJOR, Opts.MAX_MAJOR + 1):
123 # download without vminor and vpatch (but ignore if it doesn't exist)
124 version = "%d" % vmajor
125 rules = generate_whitelist(version)
126 if (rules is not None) and rules:
127 whitelists[version] = rules
128
129 has_mversion = False
130 first_mloop = True
131 for vminor in range(Opts.MIN_MINOR, Opts.MAX_MINOR + 1):
132 # download without vpatch (but ignore if it doesn't exist)
133 version = "%d.%d" % (vmajor, vminor)
134 rules = generate_whitelist(version)
135 if rules is not None:
136 has_mversion = True
137 if rules:
138 whitelists[version] = rules
139 #if (rules is None) and (has_mversion or not first_mloop):
140 # break
141 first_mloop = False
142
143 has_pversion = False
144 first_ploop = True
145 for vpatch in range(Opts.MIN_PATCH, Opts.MAX_PATCH + 1):
146 version = "%d.%d.%d" % (vmajor, vminor, vpatch)
147 rules = generate_whitelist(version)
148 if rules is not None:
149 has_pversion = True
150 if rules:
151 whitelists[version] = rules
152 # break loop if download failed and:
153 # - a version has already been found during this loop
154 # - this is the 2nd iteration (if a version wasn't found,
155 # it means download failed twice)
156 if (rules is None) and (has_pversion or not first_ploop):
157 break
158 first_ploop = False
159
160# remove duplicate entries:
161eprint("[+] Deduplicating detections... ", end='')
162known_files = []
163for version, rules in copy(whitelists.items()):
164 used_rules = 0
165 for filename, digest in rules.items():
166 rtuple = (filename, digest)
167 if rtuple in known_files:
168 del whitelists[version][filename]
169 else:
170 known_files.append(rtuple)
171 used_rules += 1
172 if used_rules == 0:
173 del whitelists[version]
174eprint("\x1b[1;32mDONE\x1b[0m")
175
176eprint("[+] Generating final whitelist... ", end='')
177# build final rule
178prefix = 8 * ' '
179conditions = []
180len_wl = len(whitelists.keys()) - 1
181for index, (version, rules) in enumerate(whitelists.items()):
182 cond_str = '%s/* %s %s */\n' % (prefix, Opts.CMS_NAME.title(), version)
183 len_rules = len(rules.keys()) - 1
184 for inner_index, (filename, digest) in enumerate(rules.items()):
185 if (index == len_wl) and (inner_index == len_rules): # last loop iteration
186 cond_str += '%shash.sha1(0, filesize) == "%s" // %s\n' % (prefix, digest, filename)
187 else:
188 cond_str += '%shash.sha1(0, filesize) == "%s" or // %s\n' % (prefix, digest, filename)
189 conditions.append(cond_str)
190eprint("\x1b[1;32mDONE\x1b[0m")
191
192final_rule = """
193import "hash"
194
195private rule %(name)s
196{
197 meta:
198 generated = "%(gendate)s"
199
200 condition:
201%(conditions)s
202}
203""" % {
204 'name': Opts.CMS_NAME.title(),
205 'gendate': datetime.now().isoformat(),
206 'conditions': '\n'.join(conditions)
207}
208print(final_rule)