mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2024-11-30 04:02:46 +01:00
81 lines
2.5 KiB
Python
Executable file
81 lines
2.5 KiB
Python
Executable file
#!/usr/bin/env python3
|
|
#
|
|
# This script generates a global equivalent domains JSON file from
|
|
# the upstream Bitwarden source repo.
|
|
#
|
|
import json
|
|
import re
|
|
import sys
|
|
import urllib.request
|
|
|
|
from collections import OrderedDict
|
|
|
|
if not (2 <= len(sys.argv) <= 3):
|
|
print("usage: %s <OUTPUT-FILE> [GIT-REF]" % sys.argv[0])
|
|
print()
|
|
print("This script generates a global equivalent domains JSON file from")
|
|
print("the upstream Bitwarden source repo.")
|
|
sys.exit(1)
|
|
|
|
OUTPUT_FILE = sys.argv[1]
|
|
GIT_REF = 'master' if len(sys.argv) == 2 else sys.argv[2]
|
|
|
|
BASE_URL = 'https://github.com/bitwarden/server/raw/%s' % GIT_REF
|
|
ENUMS_URL = '%s/src/Core/Enums/GlobalEquivalentDomainsType.cs' % BASE_URL
|
|
DOMAIN_LISTS_URL = '%s/src/Core/Utilities/StaticStore.cs' % BASE_URL
|
|
|
|
# Enum lines look like:
|
|
#
|
|
# EnumName0 = 0,
|
|
# EnumName1 = 1,
|
|
#
|
|
ENUM_RE = re.compile(
|
|
r'\s*' # Leading whitespace (optional).
|
|
r'([_0-9a-zA-Z]+)' # Enum name (capture group 1).
|
|
r'\s*=\s*' # '=' with optional surrounding whitespace.
|
|
r'([0-9]+)' # Enum value (capture group 2).
|
|
)
|
|
|
|
# Global domains lines look like:
|
|
#
|
|
# GlobalDomains.Add(GlobalEquivalentDomainsType.EnumName, new List<string> { "x.com", "y.com" });
|
|
#
|
|
DOMAIN_LIST_RE = re.compile(
|
|
r'\s*' # Leading whitespace (optional).
|
|
r'GlobalDomains\.Add\(GlobalEquivalentDomainsType\.'
|
|
r'([_0-9a-zA-Z]+)' # Enum name (capture group 1).
|
|
r'\s*,\s*new List<string>\s*{'
|
|
r'([^}]+)' # Domain list (capture group 2).
|
|
r'}\);'
|
|
)
|
|
|
|
enums = dict()
|
|
domain_lists = OrderedDict()
|
|
|
|
# Read in the enum names and values.
|
|
with urllib.request.urlopen(ENUMS_URL) as response:
|
|
for ln in response.read().decode('utf-8').split('\n'):
|
|
m = ENUM_RE.match(ln)
|
|
if m:
|
|
enums[m.group(1)] = int(m.group(2))
|
|
|
|
# Read in the domain lists.
|
|
with urllib.request.urlopen(DOMAIN_LISTS_URL) as response:
|
|
for ln in response.read().decode('utf-8').split('\n'):
|
|
m = DOMAIN_LIST_RE.match(ln)
|
|
if m:
|
|
# Strip double quotes and extraneous spaces in each domain.
|
|
domain_lists[m.group(1)] = [d.strip(' "') for d in m.group(2).split(",")]
|
|
|
|
# Build the global domains data structure.
|
|
global_domains = []
|
|
for name, domain_list in domain_lists.items():
|
|
entry = OrderedDict()
|
|
entry["Type"] = enums[name]
|
|
entry["Domains"] = domain_list
|
|
entry["Excluded"] = False
|
|
global_domains.append(entry)
|
|
|
|
# Write out the global domains JSON file.
|
|
with open(OUTPUT_FILE, 'w') as f:
|
|
json.dump(global_domains, f, indent=2)
|