7146cdebaf
It looks like at some point the RAX bind output changed format slightly, which messed up our backup script. Rework it to parse the current output. This parsing is obviously a little fragile ... it is nice to have the output sorted and lined up nicely (like our manually maintained opendev.org bind files...). If the format changes again and this becomes a problem, maybe we switch to dumping the RAX output directly and forget about formatting it nicely. Change-Id: I742dd6ef9ffdb377274b384b847625c98dd5ff16
245 lines
8.0 KiB
Python
Executable File
245 lines
8.0 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
|
|
# Copyright 2020 Red Hat, Inc.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
# not use this file except in compliance with the License. You may obtain
|
|
# a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
# License for the specific language governing permissions and limitations
|
|
# under the License.
|
|
|
|
#
|
|
# Export domains for a given user/project
|
|
#
|
|
# Set auth values in the environment, or in a .ini file specified with
|
|
# --config:
|
|
#
|
|
# RACKSPACE_USERNAME = used to login to web
|
|
# RACKSPACE_PROJECT_ID = listed on account info
|
|
# RACKSPACE_API_KEY = listed in the account details page
|
|
#
|
|
# By default exports all domains, filter the list with --domains=
|
|
#
|
|
|
|
import argparse
|
|
import configparser
|
|
import collections
|
|
import datetime
|
|
import glob
|
|
import logging
|
|
import os
|
|
import requests
|
|
import sys
|
|
import time
|
|
|
|
RACKSPACE_IDENTITY_ENDPOINT='https://identity.api.rackspacecloud.com/v2.0/tokens'
|
|
RACKSPACE_DNS_ENDPOINT="https://dns.api.rackspacecloud.com/v1.0"
|
|
|
|
RACKSPACE_PROJECT_ID=os.environ.get('RACKSPACE_PROJECT_ID', None)
|
|
RACKSPACE_USERNAME=os.environ.get('RACKSPACE_USERNAME', None)
|
|
RACKSPACE_API_KEY=os.environ.get('RACKSPACE_API_KEY', None)
|
|
|
|
def get_auth_token(session):
|
|
# Get auth token
|
|
data = {'auth':
|
|
{
|
|
'RAX-KSKEY:apiKeyCredentials':
|
|
{
|
|
'username': RACKSPACE_USERNAME,
|
|
'apiKey': RACKSPACE_API_KEY
|
|
}
|
|
}
|
|
}
|
|
token_response = session.post(url=RACKSPACE_IDENTITY_ENDPOINT, json=data)
|
|
token = token_response.json()['access']['token']['id']
|
|
|
|
return token
|
|
|
|
def get_domain_list(session, token):
|
|
# List all domains
|
|
domain_list_url = "%s/%s/domains" % (RACKSPACE_DNS_ENDPOINT,
|
|
RACKSPACE_PROJECT_ID)
|
|
headers = {
|
|
'Accept': 'application/json',
|
|
'X-Auth-Token': token,
|
|
'X-Project-Id': RACKSPACE_PROJECT_ID,
|
|
'Content-Type': 'application/json'
|
|
}
|
|
domain_list_response = session.get(url=domain_list_url, headers=headers)
|
|
return domain_list_response.json()['domains']
|
|
|
|
def get_domain_id(session, token, domain):
|
|
# Find domain id
|
|
domain_url = "%s/%s/domains/search" % (RACKSPACE_DNS_ENDPOINT,
|
|
RACKSPACE_PROJECT_ID)
|
|
headers = {
|
|
'Accept': 'application/json',
|
|
'X-Auth-Token': token,
|
|
'X-Project-Id': RACKSPACE_PROJECT_ID,
|
|
'Content-Type': 'application/json'
|
|
}
|
|
|
|
query = {'name': domain}
|
|
domain_response = session.get(url=domain_url, params=query, headers=headers)
|
|
domains = domain_response.json()
|
|
|
|
for d in domains['domains']:
|
|
if d['name'] == domain:
|
|
return d
|
|
|
|
logging.error("Did not find domain: %s" % domain)
|
|
sys.exit(1)
|
|
|
|
def do_bind_export(session, token, domain_id, outfile):
|
|
# export to file
|
|
headers = {
|
|
'Accept': 'application/json',
|
|
'X-Auth-Token': token,
|
|
'X-Project-Id': RACKSPACE_PROJECT_ID,
|
|
'Content-Type': 'application/json'
|
|
}
|
|
|
|
# Run export
|
|
export_url = '%s/%s/domains/%s/export' % (RACKSPACE_DNS_ENDPOINT,
|
|
RACKSPACE_PROJECT_ID,
|
|
domain_id)
|
|
|
|
# We get a callback URL; we should loop around and correctly
|
|
# detect the completed status and timeout and whatnot. But we
|
|
# just sleep and that's enough.
|
|
export_response = session.get(url=export_url, headers=headers)
|
|
if export_response.status_code != 202:
|
|
logging.error("Didn't get export callback?")
|
|
sys.exit(1)
|
|
r = export_response.json()
|
|
callback_url = r['callbackUrl']
|
|
time.sleep(2)
|
|
|
|
query = {'showDetails': 'true'}
|
|
final_response = session.get(callback_url, params=query, headers=headers)
|
|
|
|
bind_output = final_response.json()['response']['contents'].split('\n')
|
|
|
|
output = []
|
|
|
|
# Read and parse the record lines for sorting; the skip is because
|
|
# the first 3 lines are comments and the rest are SOA records
|
|
# (written separately below).
|
|
for line in bind_output[10:]:
|
|
if line == '':
|
|
continue
|
|
fields = line.split(' ')
|
|
output.append(fields)
|
|
|
|
# find padding space for the first column so everything lines up nice
|
|
max_first = max([len(x[0]) for x in output])
|
|
|
|
# create a dict keyed by domain with each record
|
|
out_dict = collections.defaultdict(list)
|
|
for domain in output:
|
|
out_dict[domain[0]].append(domain[1:])
|
|
|
|
outstr = ''
|
|
|
|
# first output comments and SOA from original
|
|
for line in bind_output[:10]:
|
|
outstr += "%s\n" % line
|
|
outstr += '\n'
|
|
|
|
# print out the rest of the entries, with individual records
|
|
# sorted and grouped
|
|
for domain in sorted(out_dict):
|
|
records = out_dict[domain]
|
|
# sort records by type
|
|
records.sort(key=lambda x: x[1])
|
|
for record in records:
|
|
outstr += ("%-*s\t%s\n" % (max_first+1, domain, '\t'.join(record) ))
|
|
outstr += '\n'
|
|
|
|
with open(outfile, 'w') as f:
|
|
f.write(outstr)
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description='Dump Rackspace DNS domains')
|
|
parser.add_argument('--domains', dest='domains',
|
|
help='Comma separated list of domains to export')
|
|
parser.add_argument('--output-dir', dest='output_dir',
|
|
default='/var/lib/rax-dns-backup')
|
|
parser.add_argument('--config', dest='config',
|
|
default='/etc/rax-dns-auth.conf')
|
|
parser.add_argument('--keep', dest='keep', type=int, default=30)
|
|
parser.add_argument('--debug', dest='debug', action='store_true')
|
|
args = parser.parse_args()
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
if args.debug:
|
|
logging.getLogger().setLevel(logging.DEBUG)
|
|
requests_log = logging.getLogger("requests.packages.urllib3")
|
|
requests_log.setLevel(logging.DEBUG)
|
|
requests_log.propogate = True
|
|
|
|
logging.debug("Starting")
|
|
|
|
try:
|
|
logging.info("Reading config file %s" % args.config)
|
|
config = configparser.ConfigParser()
|
|
config.read(args.config)
|
|
global RACKSPACE_PROJECT_ID
|
|
global RACKSPACE_USERNAME
|
|
global RACKSPACE_API_KEY
|
|
RACKSPACE_PROJECT_ID = config['DEFAULT']['RACKSPACE_PROJECT_ID']
|
|
RACKSPACE_USERNAME = config['DEFAULT']['RACKSPACE_USERNAME']
|
|
RACKSPACE_API_KEY = config['DEFAULT']['RACKSPACE_API_KEY']
|
|
except:
|
|
logging.info("Skipping config read")
|
|
|
|
if (not RACKSPACE_PROJECT_ID) or \
|
|
(not RACKSPACE_USERNAME) or \
|
|
(not RACKSPACE_API_KEY):
|
|
logging.error("Must set auth variables!")
|
|
sys.exit(1)
|
|
|
|
if not os.path.isdir(args.output_dir):
|
|
logging.error("Output directory does not exist")
|
|
sys.exit(1)
|
|
|
|
session = requests.Session()
|
|
token = get_auth_token(session)
|
|
|
|
if args.domains:
|
|
to_dump = []
|
|
domains = args.domains.split(',')
|
|
for domain in domains:
|
|
logging.debug("Looking up domain: %s" % domain)
|
|
to_dump.append(get_domain_id(session, token, domain))
|
|
else:
|
|
to_dump = get_domain_list(session, token)
|
|
|
|
date_suffix = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.db")
|
|
|
|
for domain in to_dump:
|
|
outfile = os.path.join(
|
|
args.output_dir, "%s_%s" % (domain['name'], date_suffix))
|
|
logging.info("Dumping %s to %s" % (domain['name'], outfile))
|
|
|
|
do_bind_export(session, token, domain['id'], outfile)
|
|
|
|
# cleanup old runs
|
|
old_files = glob.glob(os.path.join(args.output_dir,
|
|
'%s_*.db' % domain['name']))
|
|
old_files.sort()
|
|
for f in old_files[:-args.keep]:
|
|
logging.info("Cleaning up old output: %s" % f)
|
|
os.unlink(f)
|
|
|
|
if __name__ == "__main__":
|
|
main()
|
|
|