-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
8 changed files
with
206 additions
and
71 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
These files are originally from the upstream [HTTPS Everywhere](https://github.com/EFForg/https-everywhere/) | ||
repository maintained by EFF. That repository has since been archived, | ||
so we've forked the scripts and since made modifications to them. | ||
|
||
These files are Copyright © 2010-2021 Electronic Frontier Foundation and others | ||
under the GPL v2, or at your option, any later version. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
#!/bin/bash | ||
|
||
set -e | ||
|
||
if [ $# -ne 2 ]; then | ||
echo "Usage: $0 public_key_file output_path" | ||
exit | ||
fi | ||
|
||
|
||
RULESETS_FILE=rulesets/default.rulesets | ||
|
||
SIGNED_SHA256SUM_BASE64=`mktemp /tmp/ruleset-signature.sha256.base64.XXXXXXXX` | ||
trap 'rm $SIGNED_SHA256SUM_BASE64' EXIT | ||
|
||
mkdir -p $2 | ||
TIMESTAMP=`date +%s` | ||
REFERENCE=`git rev-parse HEAD` | ||
echo "{ \"timestamp\": $TIMESTAMP, \"reference\": \"$REFERENCE\", \"rulesets\":" "`cat $RULESETS_FILE`" "}" | tr -d '\n' | gzip -nc > $2/default.rulesets.$TIMESTAMP.gz | ||
|
||
echo 'Hash for signing: ' | ||
sha256sum $2/default.rulesets.$TIMESTAMP.gz | cut -f1 -d' ' | ||
echo metahash for confirmation only $(sha256sum $2/default.rulesets.$TIMESTAMP.gz | cut -f1 -d' ' | tr -d '\n' | sha256sum | cut -c1-6) ... | ||
|
||
echo 'Paste in the data from the QR code, then type Ctrl-D:' | ||
cat | tr -d '\n' > $SIGNED_SHA256SUM_BASE64 | ||
|
||
base64 -d $SIGNED_SHA256SUM_BASE64 > $2/rulesets-signature.$TIMESTAMP.sha256 | ||
openssl dgst -sha256 -sigopt rsa_padding_mode:pss -sigopt rsa_pss_saltlen:32 -verify $1 -signature $2/rulesets-signature.$TIMESTAMP.sha256 $2/default.rulesets.$TIMESTAMP.gz | ||
|
||
echo $TIMESTAMP > $2/latest-rulesets-timestamp |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,118 @@ | ||
#!/usr/bin/env python3 | ||
|
||
# Merge all the .xml rulesets into a single "default.rulesets" file -- this | ||
# prevents inodes from wasting disk space, but more importantly, this works | ||
# around the fact that zip does not perform well on a pile of small files. | ||
|
||
# Currently, it merges rulesets into a JSON Object for minimal overhead, | ||
# in both storage and parsing speed. | ||
|
||
import argparse | ||
import glob | ||
import json | ||
import os | ||
import unicodedata | ||
import xml.etree.ElementTree | ||
|
||
|
||
def normalize(f): | ||
""" | ||
OSX and Linux filesystems encode composite characters differently in | ||
filenames. We should normalize to NFC: http://unicode.org/reports/tr15/ | ||
""" | ||
f = unicodedata.normalize("NFC", f) | ||
return f | ||
|
||
|
||
# commandline arguments parsing (nobody use it, though) | ||
parser = argparse.ArgumentParser(description="Merge rulesets") | ||
parser.add_argument("--source_dir", default="src/chrome/content/rules") | ||
|
||
args = parser.parse_args() | ||
|
||
# output filename, pointed to the merged ruleset | ||
ofn = os.path.join(args.source_dir, "default.rulesets") | ||
ojson = os.path.join(args.source_dir, "default.rulesets.json") | ||
|
||
# XML Ruleset Files | ||
files = map(normalize, glob.glob(os.path.join(args.source_dir, "*.xml"))) | ||
|
||
# Under git bash, sed -i issues errors and sets the file "read-only". | ||
if os.path.isfile(ofn): | ||
os.system("chmod u+w " + ofn) | ||
if os.path.isfile(ojson): | ||
os.system("chmod u+w " + ojson) | ||
|
||
# Library (JSON Object) | ||
library = [] | ||
|
||
# Parse XML ruleset and construct JSON library | ||
print(" * Parsing XML ruleset and constructing JSON library...") | ||
for filename in sorted(files): | ||
tree = xml.etree.ElementTree.parse(filename) | ||
root = tree.getroot() | ||
|
||
ruleset = {} | ||
trivialNameSecureCookie = None | ||
|
||
for attr in root.attrib: | ||
ruleset[attr] = root.attrib[attr] | ||
|
||
for child in root: | ||
if child.tag in ["target", "rule", "securecookie", "exclusion"]: | ||
if child.tag not in ruleset: | ||
ruleset[child.tag] = [] | ||
else: | ||
continue | ||
|
||
if child.tag == "target": | ||
ruleset["target"].append(child.attrib["host"]) | ||
|
||
elif child.tag == "rule": | ||
ru = {} | ||
ru["from"] = child.attrib["from"] | ||
ru["to"] = child.attrib["to"] | ||
|
||
ruleset["rule"].append(ru) | ||
|
||
elif child.tag == "securecookie": | ||
if child.attrib["name"] == ".+": | ||
if not trivialNameSecureCookie: | ||
trivialNameSecureCookie = {} | ||
trivialNameSecureCookie["host"] = child.attrib["host"] | ||
trivialNameSecureCookie["name"] = ".+" | ||
else: | ||
trivialNameSecureCookie["host"] = ( | ||
trivialNameSecureCookie["host"] + "|" + child.attrib["host"] | ||
) | ||
else: | ||
sc = {} | ||
sc["host"] = child.attrib["host"] | ||
sc["name"] = child.attrib["name"] | ||
|
||
ruleset["securecookie"].append(sc) | ||
|
||
elif child.tag == "exclusion": | ||
if len(ruleset["exclusion"]) == 0: | ||
ruleset["exclusion"].append(child.attrib["pattern"]) | ||
else: | ||
ruleset["exclusion"][0] = ruleset["exclusion"][0] + "|" + child.attrib["pattern"] | ||
|
||
if trivialNameSecureCookie: | ||
ruleset["securecookie"].insert(0, trivialNameSecureCookie) | ||
|
||
library.append(ruleset) | ||
|
||
# Write to default.rulesets | ||
print(" * Writing JSON library to %s and %s" % (ofn, ojson)) | ||
outfile = open(ofn, "w") | ||
jsonout = open(ojson, "w") | ||
|
||
outfile.write(json.dumps(library, separators=(",", ":"))) | ||
jsonout.write(json.dumps(library, separators=(",", ":"))) | ||
|
||
outfile.close() | ||
jsonout.close() | ||
|
||
# Everything is okay. | ||
print(" * Everything is okay.") |