Remove unused tools
Spring-cleaning on the tools/ directory, since most of these are not used (if at all) lets just remove them to clean out the garbage. Change-Id: I683de39ffbf0cfa56deb9809a1ae77deadb752a2
This commit is contained in:
parent
a9333843e0
commit
65ae792dd6
@ -109,22 +109,6 @@ it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
pip-download
|
||||
------------
|
||||
|
||||
`pip-download` is a small helper utility that interacts with pip and the pip API to
|
||||
download packages into a given directory (using a common extraction and download
|
||||
cache subdirectories). It also automatically prunes duplicated downloads if they
|
||||
are of the same project name (which pip appears to do sometimes, such as in the distribute
|
||||
and setuptools fiasco). This helps avoid needless duplication::
|
||||
|
||||
$ ./tools/pip-download -d /tmp/e 'setuptools>0.8' 'flake8'
|
||||
Saved /tmp/e/flake8-2.0.tar.gz
|
||||
Saved /tmp/e/mccabe-0.2.1.tar.gz
|
||||
Saved /tmp/e/pep8-1.4.6.tar.gz
|
||||
Saved /tmp/e/pyflakes-0.7.3.tar.gz
|
||||
Saved /tmp/e/setuptools-0.9.8.tar.gz
|
||||
|
||||
|
||||
specprint
|
||||
---------
|
||||
@ -217,11 +201,6 @@ the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
|
||||
git-changelog
|
||||
-------------
|
||||
This tool generates a pretty software's changelog from git history.
|
||||
|
||||
|
||||
build-install-node-from-source.sh
|
||||
---------------------------------
|
||||
|
||||
@ -233,11 +212,6 @@ build-openvswitch.sh
|
||||
|
||||
Helps build latest `openvswitch` from source into rpms.
|
||||
|
||||
clean-pip
|
||||
---------
|
||||
|
||||
This utility removes package installed by pip but not by rpm.
|
||||
|
||||
clear-dns.sh
|
||||
------------
|
||||
|
||||
@ -247,23 +221,3 @@ img-uploader
|
||||
------------
|
||||
|
||||
Helper tool to upload images to glance using your anvil settings.
|
||||
|
||||
validate-yaml
|
||||
-------------
|
||||
|
||||
Validates a yaml file is formatted correctly.
|
||||
|
||||
yaml-pretty
|
||||
-----------
|
||||
|
||||
Pretty prints yaml into a standard format.
|
||||
|
||||
resize.sh
|
||||
---------
|
||||
|
||||
Resizes a images filesystem using guestfish.
|
||||
|
||||
euca.sh
|
||||
-------
|
||||
|
||||
Creates ec2 keys for usage with nova.
|
||||
|
@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# this utility removes package installed by pip
|
||||
# but not by rpm
|
||||
|
||||
tmp_dir=$(mktemp -d)
|
||||
|
||||
echo "Moving unowned files to $tmp_dir"
|
||||
|
||||
for f in /usr/lib*/python*/site-packages/*; do
|
||||
if ! rpm -qf $f &>/dev/null; then
|
||||
mv -v $f $tmp_dir/
|
||||
fi
|
||||
done
|
@ -1,91 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
import os
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir,
|
||||
os.pardir))
|
||||
|
||||
if os.path.exists(os.path.join(possible_topdir,
|
||||
'anvil',
|
||||
'__init__.py')):
|
||||
sys.path.insert(0, possible_topdir)
|
||||
|
||||
|
||||
from anvil.components.helpers import keystone
|
||||
from anvil import importer
|
||||
from anvil import log as logging
|
||||
from anvil import passwords
|
||||
from anvil import utils
|
||||
|
||||
|
||||
def get_token():
|
||||
pw_storage = passwords.KeyringProxy(path='/etc/anvil/passwords.cfg')
|
||||
lookup_name = "service_token"
|
||||
prompt = "Please enter the password for %s: " % ('/etc/anvil/passwords.cfg')
|
||||
(exists, token) = pw_storage.read(lookup_name, prompt)
|
||||
if not exists:
|
||||
pw_storage.save(lookup_name, token)
|
||||
return token
|
||||
|
||||
|
||||
def replace_services_endpoints(token, options):
|
||||
client = importer.construct_entry_point("keystoneclient.v2_0.client:Client",
|
||||
token=token, endpoint=options.keystone_uri)
|
||||
current_endpoints = client.endpoints.list()
|
||||
current_services = client.services.list()
|
||||
|
||||
def filter_resource(r):
|
||||
raw = dict(r.__dict__) # Can't access the raw attrs, arg...
|
||||
raw_cleaned = {}
|
||||
for k, v in raw.items():
|
||||
if k == 'manager' or k.startswith('_'):
|
||||
continue
|
||||
raw_cleaned[k] = v
|
||||
return raw_cleaned
|
||||
|
||||
for e in current_endpoints:
|
||||
print("Deleting endpoint: ")
|
||||
print(utils.prettify_yaml(filter_resource(e)))
|
||||
client.endpoints.delete(e.id)
|
||||
|
||||
for s in current_services:
|
||||
print("Deleting service: ")
|
||||
print(utils.prettify_yaml(filter_resource(s)))
|
||||
client.services.delete(s.id)
|
||||
|
||||
if options.file:
|
||||
with(open(options.file, 'r')) as fh:
|
||||
contents = yaml.load(fh)
|
||||
set_contents = {
|
||||
'services': contents.get('services', []),
|
||||
'endpoints': contents.get('endpoints', []),
|
||||
}
|
||||
print("Regenerating with:")
|
||||
print(utils.prettify_yaml(set_contents))
|
||||
set_contents['users'] = []
|
||||
set_contents['roles'] = []
|
||||
set_contents['tenants'] = []
|
||||
initer = keystone.Initializer(token, options.keystone_uri)
|
||||
initer.initialize(**set_contents)
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser()
|
||||
parser.add_option("-k", '--keystone', dest='keystone_uri',
|
||||
help='keystone endpoint uri to authenticate with', metavar='KEYSTONE')
|
||||
parser.add_option("-f", '--file', dest='file',
|
||||
help='service and endpoint creation file', metavar='FILE')
|
||||
(options, args) = parser.parse_args()
|
||||
if not options.keystone_uri or not options.file:
|
||||
parser.error("options are missing, please try -h")
|
||||
logging.setupLogging(logging.DEBUG)
|
||||
replace_services_endpoints(get_token(), options)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
@ -1,96 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# From devstack.sh commit 0bd2410d469f11934b5965d83b57d56418e66b48
|
||||
|
||||
# Create EC2 credentials for the current user as defined by OS_TENANT_NAME:OS_USERNAME
|
||||
|
||||
ME=`basename $0`
|
||||
|
||||
if [[ -n "$1" ]]; then
|
||||
USERNAME=$1
|
||||
fi
|
||||
|
||||
if [[ -n "$2" ]]; then
|
||||
TENANT=$2
|
||||
fi
|
||||
|
||||
# Find the other rc files
|
||||
RC_DIR="/etc/anvil"
|
||||
CORE_RC="install.rc"
|
||||
EC2_RC="euca.rc"
|
||||
|
||||
if [ ! -f "$RC_DIR/$CORE_RC" ];
|
||||
then
|
||||
GEN_CMD="smithy -a install"
|
||||
echo "File '$RC_DIR/$CORE_RC' needed before running '$ME'"
|
||||
echo "Please run './$GEN_CMD' to get this file."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get user configuration
|
||||
source $RC_DIR/$CORE_RC
|
||||
|
||||
# Woah!
|
||||
if [ -f $RC_DIR/$EC2_RC ];
|
||||
then
|
||||
echo "Woah cowboy you seem to already have '$RC_DIR/$EC2_RC'!"
|
||||
while true; do
|
||||
read -p "Overwrite it and continue? " yn
|
||||
case $yn in
|
||||
[Yy]* ) break;;
|
||||
[Nn]* ) exit 1;;
|
||||
* ) echo "Please answer y or n.";;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
# Bug https://bugs.launchpad.net/keystone/+bug/962600
|
||||
unset SERVICE_TOKEN
|
||||
unset SERVICE_ENDPOINT
|
||||
|
||||
# Set the ec2 url so euca2ools works
|
||||
EC2_URL=$(keystone catalog --service ec2 | awk '/ publicURL / { print $4 }')
|
||||
|
||||
# Create EC2 credentials for the current user
|
||||
CREDS=$(keystone ec2-credentials-create)
|
||||
EC2_ACCESS_KEY=$(echo "$CREDS" | awk '/ access / { print $4 }')
|
||||
EC2_SECRET_KEY=$(echo "$CREDS" | awk '/ secret / { print $4 }')
|
||||
|
||||
# Euca2ools Certificate stuff for uploading bundles
|
||||
NOVA_KEY_DIR=${NOVA_KEY_DIR:-$RC_DIR}
|
||||
S3_URL=$(keystone catalog --service s3 | awk '/ publicURL / { print $4 }')
|
||||
|
||||
# ??
|
||||
EC2_USER_ID=42
|
||||
|
||||
# For a comment
|
||||
NOW=`date`
|
||||
|
||||
# Make a nice file for u
|
||||
ENV_FN=$RC_DIR/$EC2_RC
|
||||
echo "Making $ENV_FN"
|
||||
|
||||
cat > $ENV_FN <<EOF
|
||||
# Created on $NOW
|
||||
|
||||
# General goodies
|
||||
export EC2_ACCESS_KEY=$EC2_ACCESS_KEY
|
||||
export EC2_SECRET_KEY=$EC2_SECRET_KEY
|
||||
export NOVA_KEY_DIR=$NOVA_KEY_DIR
|
||||
export EC2_URL=$EC2_URL
|
||||
export S3_URL=$S3_URL
|
||||
export EC2_USER_ID=$EC2_USER_ID
|
||||
|
||||
export NOVA_CERT=\${NOVA_KEY_DIR}/cacert.pem
|
||||
export EC2_CERT=\${NOVA_KEY_DIR}/cert.pem
|
||||
export EC2_PRIVATE_KEY=\${NOVA_KEY_DIR}/pk.pem
|
||||
export EUCALYPTUS_CERT=\${NOVA_CERT} # euca-bundle-image seems to require this set
|
||||
|
||||
# Aliases
|
||||
alias ec2-bundle-image="ec2-bundle-image --cert \${EC2_CERT} --privatekey \${EC2_PRIVATE_KEY} --user \${EC2_USER_ID} --ec2cert \${NOVA_CERT}"
|
||||
alias ec2-upload-bundle="ec2-upload-bundle -a \${EC2_ACCESS_KEY} -s \${EC2_SECRET_KEY} --url \${S3_URL} --ec2cert \${NOVA_CERT}"
|
||||
EOF
|
||||
|
||||
echo "For future euca commands please run \$ source '$ENV_FN'"
|
||||
echo "Please also install your local distributions euca2ools or go to http://www.eucalyptus.com/download/euca2ools"
|
||||
echo "Goodbye."
|
@ -1,80 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
## Tool to run the nova config generating code and spit out a dummy
|
||||
## version. Useful for testing that code in isolation.
|
||||
|
||||
import atexit
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir,
|
||||
os.pardir))
|
||||
|
||||
if os.path.exists(os.path.join(possible_topdir,
|
||||
'anvil',
|
||||
'__init__.py')):
|
||||
sys.path.insert(0, possible_topdir)
|
||||
|
||||
from anvil.components.helpers.nova import ConfConfigurator
|
||||
from anvil import shell as sh
|
||||
from anvil.trace import TraceWriter
|
||||
from anvil import utils
|
||||
|
||||
DUMMY_FILE = tempfile.mktemp()
|
||||
|
||||
|
||||
def at_exit_cleaner():
|
||||
sh.unlink(DUMMY_FILE)
|
||||
|
||||
atexit.register(at_exit_cleaner)
|
||||
|
||||
|
||||
def make_fakey(all_opts, last='dummy'):
|
||||
src = {}
|
||||
tmp = src
|
||||
last_opt = all_opts[-1]
|
||||
for opt in all_opts[0:-1]:
|
||||
tmp[opt] = {}
|
||||
tmp = tmp[opt]
|
||||
tmp[last_opt] = last
|
||||
return src
|
||||
|
||||
|
||||
class DummyInstaller(object):
|
||||
def get_option(self, option, *options, **kwargs):
|
||||
if option == 'db':
|
||||
src = {
|
||||
option: {
|
||||
'host': 'localhost',
|
||||
'port': 3306,
|
||||
'type': 'mysql',
|
||||
'user': 'root'
|
||||
},
|
||||
}
|
||||
elif option == 'ip':
|
||||
return utils.get_host_ip()
|
||||
elif utils.has_any(option, 'extra_flags', 'extra_opts', 'instances_path'):
|
||||
return ''
|
||||
else:
|
||||
# Make a fake dictionary hierarchy
|
||||
src = make_fakey([option] + list(options))
|
||||
return utils.get_deep(src, [option] + list(options))
|
||||
|
||||
def get_bool_option(self, option, *options, **kwargs):
|
||||
return False
|
||||
|
||||
def get_password(self, option, *options, **kwargs):
|
||||
return "forbinus"
|
||||
|
||||
def target_config(self, config_fn):
|
||||
return None
|
||||
|
||||
def __init__(self):
|
||||
self.tracewriter = TraceWriter(DUMMY_FILE)
|
||||
|
||||
|
||||
d = DummyInstaller()
|
||||
c = ConfConfigurator(d)
|
||||
print(c.generate("foo.conf"))
|
@ -1,237 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
"""
|
||||
This tool generates a pretty software's changelog from git history.
|
||||
|
||||
http://fedoraproject.org/wiki/How_to_create_an_RPM_package says:
|
||||
|
||||
%changelog: Changes in the package. Use the format example above.
|
||||
Do NOT put software's changelog at here. This changelog
|
||||
is for RPM itself.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import iso8601
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
per_call_am = 50
|
||||
|
||||
|
||||
class ExecutionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def translate_utf8(text):
|
||||
return text.decode('utf8').encode('ascii', 'replace')
|
||||
|
||||
|
||||
def parse_mailmap(wkdir):
|
||||
mapping = {}
|
||||
mailmap_fn = os.path.join(wkdir, '.mailmap')
|
||||
if not os.path.isfile(mailmap_fn):
|
||||
return mapping
|
||||
for line in open(mailmap_fn, 'rb').read().splitlines():
|
||||
line = line.strip()
|
||||
if len(line) and not line.startswith('#') and ' ' in line:
|
||||
try:
|
||||
(canonical_email, alias) = [x for x in line.split(' ')
|
||||
if x.startswith('<')]
|
||||
mapping[alias] = canonical_email
|
||||
except (TypeError, ValueError, IndexError):
|
||||
pass
|
||||
return mapping
|
||||
|
||||
|
||||
# Based off of http://www.brianlane.com/nice-changelog-entries.html
|
||||
class GitChangeLog(object):
|
||||
def __init__(self, wkdir):
|
||||
self.wkdir = wkdir
|
||||
self.date_buckets = None
|
||||
|
||||
def _get_commit_detail(self, commit, field, am=1):
|
||||
detail_cmd = ['git', 'log', '--color=never', '-%s' % (am),
|
||||
"--pretty=format:%s" % (field), commit]
|
||||
(stdout, _stderr) = call_subprocess(detail_cmd, cwd=self.wkdir,
|
||||
show_stdout=False)
|
||||
ret = stdout.strip('\n').splitlines()
|
||||
if len(ret) == 1:
|
||||
ret = ret[0]
|
||||
else:
|
||||
ret = [x for x in ret if x.strip() != '']
|
||||
ret = "\n".join(ret)
|
||||
return ret
|
||||
|
||||
def get_log(self, commit):
|
||||
if self.date_buckets is None:
|
||||
self.date_buckets = self._get_log(commit)
|
||||
return self.date_buckets
|
||||
|
||||
def _skip_entry(self, summary, date, email, name):
|
||||
for f in [summary, name, email]:
|
||||
try:
|
||||
translate_utf8(f)
|
||||
except UnicodeError:
|
||||
logger.warn("Non-utf8 field %s found", f)
|
||||
return True
|
||||
email = email.lower().strip()
|
||||
summary = summary.strip()
|
||||
if not all([summary, date, email, name]):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_log(self, commit):
|
||||
log_cmd = ['git', 'log',
|
||||
'--no-merges', '--pretty=oneline',
|
||||
'--color=never', commit]
|
||||
(sysout, _stderr) = call_subprocess(log_cmd, cwd=self.wkdir,
|
||||
show_stdout=False)
|
||||
lines = sysout.strip('\n').splitlines()
|
||||
|
||||
# Extract the raw commit details
|
||||
mailmap = parse_mailmap(self.wkdir)
|
||||
log = []
|
||||
|
||||
for i in range(0, len(lines), per_call_am):
|
||||
line = lines[i]
|
||||
fields = line.split(' ')
|
||||
if not len(fields):
|
||||
continue
|
||||
|
||||
# See: http://opensource.apple.com/source/Git/Git-26/src/git-htmldocs/pretty-formats.txt
|
||||
commit_id = fields[0]
|
||||
commit_details = self._get_commit_detail(commit_id,
|
||||
"[%s][%ai][%aE][%an]",
|
||||
per_call_am)
|
||||
|
||||
# Extracts the pieces that should be in brackets.
|
||||
details_matcher = r"^\s*\[(.*?)\]\[(.*?)\]\[(.*?)\]\[(.*?)\]\s*$"
|
||||
for a_commit in commit_details.splitlines():
|
||||
matcher = re.match(details_matcher, a_commit)
|
||||
if not matcher:
|
||||
continue
|
||||
(summary, date, author_email, author_name) = matcher.groups()
|
||||
author_email = mailmap.get(author_email, author_email)
|
||||
try:
|
||||
date = iso8601.parse_date(date)
|
||||
except iso8601.ParseError:
|
||||
date = None
|
||||
if self._skip_entry(summary, date, author_email, author_name):
|
||||
continue
|
||||
log.append({
|
||||
'summary': translate_utf8(summary),
|
||||
'when': date,
|
||||
'author_email': translate_utf8(author_email),
|
||||
'author_name': translate_utf8(author_name),
|
||||
})
|
||||
|
||||
# Bucketize the dates by day
|
||||
date_buckets = collections.defaultdict(list)
|
||||
for entry in log:
|
||||
day = entry['when'].date()
|
||||
date_buckets[day].append(entry)
|
||||
return date_buckets
|
||||
|
||||
def format_log(self, commit):
|
||||
date_buckets = self.get_log(commit)
|
||||
lines = []
|
||||
for d in reversed(sorted(date_buckets.keys())):
|
||||
entries = date_buckets[d]
|
||||
for entry in entries:
|
||||
header = "* %s %s <%s>" % (d.strftime("%a %b %d %Y"),
|
||||
entry['author_name'],
|
||||
entry['author_email'])
|
||||
lines.append(header)
|
||||
summary = entry['summary']
|
||||
sublines = textwrap.wrap(summary, 77)
|
||||
if len(sublines):
|
||||
lines.append("- %s" % sublines[0])
|
||||
if len(sublines) > 1:
|
||||
for subline in sublines[1:]:
|
||||
lines.append(" %s" % subline)
|
||||
lines.append("")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def create_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"--debug", "-d",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Print debug information")
|
||||
parser.add_argument(
|
||||
"--filename", "-f",
|
||||
default="ChangeLog",
|
||||
help="Name of changelog file (default: ChangeLog)")
|
||||
parser.add_argument(
|
||||
"commit",
|
||||
metavar="<commit>",
|
||||
default="HEAD",
|
||||
nargs="?",
|
||||
help="The name of a commit for which to generate the log"
|
||||
" (default: HEAD)")
|
||||
return parser
|
||||
|
||||
|
||||
def call_subprocess(cmd, cwd=None, show_stdout=True, raise_on_returncode=True):
|
||||
if show_stdout:
|
||||
stdout = None
|
||||
else:
|
||||
stdout = subprocess.PIPE
|
||||
proc = subprocess.Popen(cmd, cwd=cwd, stderr=None, stdin=None, stdout=stdout)
|
||||
ret = proc.communicate()
|
||||
if proc.returncode:
|
||||
cwd = cwd or os.getcwd()
|
||||
command_desc = " ".join(cmd)
|
||||
if raise_on_returncode:
|
||||
raise ExecutionError(
|
||||
"Command %s failed with error code %s in %s"
|
||||
% (command_desc, proc.returncode, cwd))
|
||||
else:
|
||||
logger.warn(
|
||||
"Command %s had error code %s in %s"
|
||||
% (command_desc, proc.returncode, cwd))
|
||||
return ret
|
||||
|
||||
|
||||
def setup_logging(options):
|
||||
level = logging.DEBUG if options.debug else logging.WARNING
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(level)
|
||||
|
||||
|
||||
def main():
|
||||
parser = create_parser()
|
||||
options = parser.parse_args()
|
||||
setup_logging(options)
|
||||
source_dir = os.getcwd()
|
||||
|
||||
# .git can be a dir or a gitref regular file (for a git submodule)
|
||||
if not os.path.exists(os.path.join(source_dir, ".git")):
|
||||
print >> sys.stderr, "fatal: Not a git repository"
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
with open("%s/%s" % (source_dir, options.filename), "wb") as out:
|
||||
out.write(GitChangeLog(source_dir).format_log(options.commit))
|
||||
except Exception as ex:
|
||||
print >> sys.stderr, ex
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as exp:
|
||||
print >> sys.stderr, exp
|
@ -1,46 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# install me this way:
|
||||
# cp pre-commit "$(git rev-parse --git-dir)/hooks/"
|
||||
|
||||
|
||||
gitdir="$(readlink -f $(git rev-parse --git-dir))"
|
||||
|
||||
tmpdir=
|
||||
cleanup_tmpdir()
|
||||
{
|
||||
[ -z "$tmpdir" ] || rm -rf -- "$tmpdir"
|
||||
exit "$@"
|
||||
}
|
||||
tmpdir=$(mktemp -dt "${0##*/}.XXXXXXXX")
|
||||
trap 'cleanup_tmpdir $?' EXIT
|
||||
trap 'clenaup_tmpdir 143' HUP INT QUIT PIPE TERM
|
||||
|
||||
|
||||
|
||||
git checkout-index -a --prefix="$tmpdir/"
|
||||
cd "$tmpdir"
|
||||
|
||||
indexed_files()
|
||||
{
|
||||
git --git-dir="$gitdir" diff --cached --name-only --diff-filter=AM
|
||||
}
|
||||
|
||||
FILES="$(indexed_files | grep -E '\.py$')"
|
||||
if [ -n "$FILES" ]; then
|
||||
pylint $FILES || STATUS=1
|
||||
if grep -nEH --color '(import pdb|pdb.set_trace)' $FILES; then
|
||||
echo "Please remove pdb"
|
||||
STATUS=1
|
||||
fi
|
||||
fi
|
||||
|
||||
FILES="$(indexed_files | grep -E '\.(py|html|js)$')"
|
||||
if [ -n "$FILES" ]; then
|
||||
if grep -nEH --color '\s+$' $FILES; then
|
||||
echo "Please remove trailing spaces"
|
||||
STATUS=1
|
||||
fi
|
||||
fi
|
||||
|
||||
exit $STATUS
|
@ -1,56 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
### This is a utility script that can be used to resize a vm's disk
|
||||
### image in case that image is just a bare filesystem with no
|
||||
### partition table or boot record. You should shutdown the vm prior
|
||||
### to using this script.
|
||||
|
||||
if [ $# -lt 2 ]; then
|
||||
echo "Usage: resize.sh <img> <size> (e.g., resize.sh disk 500G)" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DISK="$1"
|
||||
SIZE="$2"
|
||||
|
||||
echo "Will attempt to resize $DISK to $SIZE."
|
||||
|
||||
if [ ! -w "$DISK" ]; then
|
||||
echo "Error: Cannot write to $DISK, maybe you need to sudo."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TMPDISK="$DISK.$RANDOM"
|
||||
|
||||
if !(cp "$DISK" "$TMPDISK"); then
|
||||
echo "Error: unable to make a temporary copy of $DISK named $TMPDISK." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if !(qemu-img resize "$TMPDISK" "$SIZE"); then
|
||||
echo "Error: qemu-img failed." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Attempting guestfs resize... this might take a few minutes."
|
||||
|
||||
guestfish <<EOF
|
||||
add $TMPDISK
|
||||
run
|
||||
e2fsck /dev/vda forceall:true
|
||||
resize2fs /dev/vda
|
||||
sync
|
||||
umount-all
|
||||
EOF
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error: guestfish resize failed." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if !(mv "$TMPDISK" "$DISK"); then
|
||||
echo "Error: unable to move $TMPDISK back on top of $DISK." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Great success."
|
||||
|
@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""Try to read a YAML file and report any errors.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
fh = open(sys.argv[1], 'r')
|
||||
yaml.load(fh.read())
|
||||
fh.close()
|
@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
# See: http://pyyaml.org/wiki/PyYAMLDocumentation
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = list(sys.argv)
|
||||
args = args[1:]
|
||||
for fn in args:
|
||||
fh = open(fn, 'r')
|
||||
data = yaml.load(fh.read())
|
||||
fh.close()
|
||||
formatted = yaml.dump(data,
|
||||
line_break="\n",
|
||||
indent=4,
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
print("# Formatted %s" % (fn))
|
||||
print(formatted)
|
Loading…
Reference in New Issue
Block a user