fix all pep8 errors

fix pep8 errors in all .py files

Change-Id: Ic8b3da8861176a34e8660071d769ea0f408052ea
Closes-Bug: #1342678
This commit is contained in:
vrovachev 2014-07-16 18:28:01 +04:00
parent d6244a8f31
commit 0912b7066e
9 changed files with 152 additions and 119 deletions

View File

@ -45,7 +45,7 @@ class PuppetTestGenerator:
self.modules_path = modules_path
self.tests_directory = tests_directory_path
self.default_template = 'puppet_module_test.py'
self.default_template = 'puppet_module_test.py.tmpl'
self.test_file_prefix = 'TestPuppetModule'
self.modules = []

View File

@ -65,15 +65,17 @@ full_match = False
requires_api_version = '2.1'
plugin_type = (TYPE_CORE,)
def config_hook(conduit):
global check_obsoletes
global only_samearch
global full_match
# Plugin configuration
check_obsoletes = conduit.confBool('main', 'check_obsoletes', default = False)
only_samearch = conduit.confBool('main', 'only_samearch', default = False)
full_match = conduit.confBool('main', 'full_match', default = False)
check_obsoletes = conduit.confBool('main', 'check_obsoletes',
default=False)
only_samearch = conduit.confBool('main', 'only_samearch', default=False)
full_match = conduit.confBool('main', 'full_match', default=False)
if full_match:
check_obsoletes = False
@ -91,10 +93,12 @@ def config_hook(conduit):
if parser:
if hasattr(parser, 'plugin_option_group'):
parser = parser.plugin_option_group
parser.add_option('', '--samearch-priorities', dest='samearch',
action='store_true', default = False,
parser.add_option(
'', '--samearch-priorities', dest='samearch', action='store_true',
default=False,
help="Priority-exclude packages based on name + arch")
def _all_repo_priorities_same(allrepos):
""" Are all repos at the same priority """
first = None
@ -105,6 +109,7 @@ def _all_repo_priorities_same(allrepos):
return False
return True
def exclude_hook(conduit):
global only_samearch
global check_obsoletes
@ -115,7 +120,7 @@ def exclude_hook(conduit):
# If they haven't done anything, don't do any work
if _all_repo_priorities_same(allrepos):
return
# Check whether the user specified the --samearch option.
opts, commands = conduit.getCmdLine()
if opts and opts.samearch:
@ -132,15 +137,17 @@ def exclude_hook(conduit):
if only_samearch:
pkg_priorities = dict()
if check_obsoletes or not only_samearch:
pkg_priorities_archless = dict()
pkg_priorities_archless = dict()
for repo in allrepos:
if repo.enabled:
if only_samearch:
repopkgs = _pkglist_to_dict(conduit.getPackages(repo), repo.priority, True)
repopkgs = _pkglist_to_dict(conduit.getPackages(repo),
repo.priority, True)
_mergeprioritydicts(pkg_priorities, repopkgs)
if check_obsoletes or not only_samearch:
repopkgs_archless = _pkglist_to_dict(conduit.getPackages(repo), repo.priority)
repopkgs_archless = _pkglist_to_dict(
conduit.getPackages(repo), repo.priority)
_mergeprioritydicts(pkg_priorities_archless, repopkgs_archless)
# Eliminate packages that have a low priority
@ -155,18 +162,21 @@ def exclude_hook(conduit):
pname = po.name
if only_samearch:
key = "%s.%s" % (pname,po.arch)
if key in pkg_priorities and pkg_priorities[key] < repo.priority:
key = "%s.%s" % (pname, po.arch)
if (key in pkg_priorities and
pkg_priorities[key] < repo.priority):
delPackage = True
else:
key = "%s" % pname
if key in pkg_priorities_archless and pkg_priorities_archless[key] < repo.priority:
if (key in pkg_priorities_archless and
pkg_priorities_archless[key] < repo.priority):
delPackage = True
if delPackage:
conduit.delPackage(po)
cnt += 1
conduit.info(3," --> %s from %s excluded (priority)" % (po,po.repoid))
conduit.info(3, " --> %s from %s excluded "
"(priority)" % (po, po.repoid))
# If this packages obsoletes other packages, check whether
# one of the obsoleted packages is not available through
@ -176,19 +186,25 @@ def exclude_hook(conduit):
obsolete_pkgs = obsoletes[po.pkgtup]
for obsolete_pkg in obsolete_pkgs:
pkg_name = obsolete_pkg[0]
if pkg_name in pkg_priorities_archless and pkg_priorities_archless[pkg_name] < repo.priority:
if (pkg_name in pkg_priorities_archless
and pkg_priorities_archless[pkg_name] <
repo.priority):
conduit.delPackage(po)
cnt += 1
conduit.info(3," --> %s from %s excluded (priority)" % (po,po.repoid))
conduit.info(
3, " --> %s from %s excluded "
"(priority)" % (po, po.repoid))
break
if cnt:
conduit.info(2, '%d packages excluded due to repository priority protections' % cnt)
conduit.info(2, '%d packages excluded due to repository '
'priority protections' % cnt)
if check_obsoletes:
# Atm. the update object doesn't get updated when we manually exclude
# things ... so delete it. This needs to be re-written.
conduit._base.up = None
def _pkglist_to_dict(pl, priority, addArch = False):
def _pkglist_to_dict(pl, priority, addArch=False):
global full_match
out = dict()
for p in pl:
@ -197,12 +213,13 @@ def _pkglist_to_dict(pl, priority, addArch = False):
else:
pname = p.name
if addArch:
key = "%s.%s" % (pname,p.arch)
key = "%s.%s" % (pname, p.arch)
out[key] = priority
else:
out[pname] = priority
return out
def _mergeprioritydicts(dict1, dict2):
for package in dict2.keys():
if package not in dict1 or dict2[package] < dict1[package]:

View File

@ -21,48 +21,65 @@ import itertools
logging.basicConfig()
logger = logging.getLogger()
class Vertex(object):
def __init__(self, node, interface):
self.node = node
self.interface = interface
def __str__(self):
return "<Vtx: %s.%s>" % (self.node, self.interface)
def __repr__(self):
return self.__str__()
def __eq__(self, other):
return self.node == other.node and self.interface == other.interface
def __ne__(self, other):
return self.node != other.node or self.interface != other.interface
def __hash__(self):
return hash(str(self))
class Arc(object):
def __init__(self, vertex_a, vertex_b):
self.arc = (vertex_a, vertex_b)
def __str__(self):
return "<Arc: %s>" % (self.arc,)
def __repr__(self):
return self.__str__()
def __getitem__(self, i):
return self.arc[i]
def __eq__(self, other):
l = map(lambda x, y: x == y, self.arc, other.arc)
return bool(filter(lambda x: x, l))
def __ne__(self, other):
l = map(lambda x, y: x != y, self.arc, other.arc)
return bool(filter(lambda x: x, l))
def __hash__(self):
return hash(str(self))
def invert(self):
return Arc(self.arc[1], self.arc[0])
class NetChecker(object):
def __init__(self, nodes, arcs):
self.nodes = nodes
self.arcs = arcs
logger.debug("Init: got %d nodes and %d arcs", len(nodes), len(self.arcs))
logger.debug("Init: got %d nodes and %d arcs", len(nodes),
len(self.arcs))
@staticmethod
def _invert_arc(arc):
@ -207,11 +224,12 @@ class NetChecker(object):
for t in topos:
logger.debug("_uniq_topos: now testing: %s" % t)
if not isincluded(t, [i for i in topos if id(i) != id(t)]):
copy.append(t)
copy.append(t)
return copy
class ClassbasedNetChecker(NetChecker):
@staticmethod
def _invert_arc(arc):
return arc.invert()
@ -229,9 +247,6 @@ class ClassbasedNetChecker(NetChecker):
return Vertex(node, interface)
def generateFullMesh(nodes, interfaces, Klass, stability=1.0):
A = []
vertices = itertools.product(nodes, interfaces, nodes, interfaces)
@ -245,6 +260,7 @@ def generateFullMesh(nodes, interfaces, Klass, stability=1.0):
logger.debug("generateArcs: %d arcs generated", len(A))
return A
def generateMesh(nodes1, ifaces1, nodes2, ifaces2, Klass, stability=1.0):
A = []
vertices = itertools.product(nodes1, ifaces1, nodes2, ifaces2)
@ -261,7 +277,7 @@ def generateMesh(nodes1, ifaces1, nodes2, ifaces2, Klass, stability=1.0):
def printChoice(choice, step=4):
def printlist(l, indent=0, step=2):
print '%s[' % (' ' * indent)
print '%s[' % (' ' * indent)
for i in l:
if type(i) is dict:
print '%s-' % (' ' * indent)
@ -270,7 +286,8 @@ def printChoice(choice, step=4):
printlist(i, indent + step, step)
else:
print '%s- %s' % (' ' * indent, str(i))
print '%s]' % (' ' * indent)
print '%s]' % (' ' * indent)
def printdict(d, indent=0, step=2):
for k, v in d.iteritems():
if type(v) is dict:
@ -289,9 +306,6 @@ def printChoice(choice, step=4):
print choice
print ""
nodes = ['s1', 's2', 's3', 's4']

View File

@ -15,7 +15,7 @@ class GithubEngine(object):
self.pool = ConnectionPool(factory=Connection)
self.token = token
self.headers = {'Content-Type': 'application/json',
'Authorization': 'token %s' % self.token}
'Authorization': 'token %s' % self.token}
# We don't use this method, but it can be useful in some cases
def create_token(self, user, password):
@ -23,45 +23,42 @@ class GithubEngine(object):
auth = BasicAuth(user, password)
authreqdata = {"scopes": ["repo"], "note": "admin script"}
resource = Resource('https://api.github.com/authorizations',
pool=self.pool, filters=[auth])
pool=self.pool, filters=[auth])
response = resource.post(headers={"Content-Type": "application/json"},
payload=json.dumps(authreqdata))
payload=json.dumps(authreqdata))
self.token = json.loads(response.body_string())['token']
def list_repos(self):
resource = Resource('https://api.github.com/user/repos',
pool=self.pool)
pool=self.pool)
response = resource.get(headers=self.headers)
return json.loads(response.body_string())
def get_pull_request_by_label(self, user, repo, label):
resource = Resource("https://api.github.com/repos/%s/%s/pulls" %
(user, repo))
(user, repo))
pulls = json.loads(resource.get(headers=self.headers).body_string())
pulls_by_label = filter(lambda p: p['head']['label']==label, pulls)
pulls_by_label = filter(lambda p: p['head']['label'] == label, pulls)
return pulls_by_label # I hope there is no more than one
def update_pull_request(self, user, repo, number, data):
resource = Resource("https://api.github.com/repos/%s/%s/pulls/%s" %
(user, repo, number))
(user, repo, number))
res = resource.post(headers=self.headers,
payload=json.dumps(data))
payload=json.dumps(data))
return json.loads(res.body_string())
def create_pull_request(self, user, repo, to_user, base_branch,
branch, title="", body=""):
branch, title="", body=""):
if not title:
title = "Robot pull request. Please review."
resource = Resource("https://api.github.com/repos/%s/%s/pulls" %
(to_user, repo))
pulldata = {
"title": title, "body": body,
"head": "%s:%s" % (user, branch),
"base": base_branch
}
(to_user, repo))
pulldata = {"title": title, "body": body,
"head": "%s:%s" % (user, branch), "base": base_branch}
response = resource.post(headers=self.headers,
payload=json.dumps(pulldata))
payload=json.dumps(pulldata))
return json.loads(response.body_string())
@ -84,10 +81,10 @@ class GitEngine(object):
if not cwd:
cwd = self.local_repo
print "Executing command %s in cwd=%s" % (command, cwd)
proc = subprocess.Popen(command, cwd=cwd, \
stderr=subprocess.PIPE, \
stdout=subprocess.PIPE, \
shell=True)
proc = subprocess.Popen(command, cwd=cwd,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
shell=True)
try:
stdout_value = proc.stdout.read().rstrip()
stderr_value = proc.stderr.read().rstrip()
@ -98,7 +95,7 @@ class GitEngine(object):
if status != 0:
print "ERRROR: Command: '%s' Status: %s err: '%s' out: '%s'" % \
(command, status, stderr_value, stdout_value)
(command, status, stderr_value, stdout_value)
raise GitEngineError(status, stderr_value)
return stdout_value
@ -109,15 +106,15 @@ class GitEngine(object):
if not remote_path:
remote_path = self.remote_path
# Check if we can do fast-forward
if not self.is_rebased(local_branch, "remotes/%s/%s" % \
(self.refs_name, remote_branch)):
if not self.is_rebased(local_branch, "remotes/%s/%s" % (
self.refs_name, remote_branch)):
print "ERROR: Not able to push. " \
"Branch %s was not rebased to %s" % \
(local_branch, remote_branch)
raise
command = "git push %s %s:%s" % \
(remote_path, local_branch, remote_branch)
command = "git push %s %s:%s" % (remote_path, local_branch,
remote_branch)
try:
self.__exec(command)
except GitEngineError as e:

View File

@ -26,8 +26,8 @@ class Review(object):
else:
self.origin_repo_url = params.repo_url
self.origin_branch = params.origin_branch
self.origin_user, self.origin_repo = \
p.match(self.origin_repo_url).groups()
self.origin_user, self.origin_repo = p.match(
self.origin_repo_url).groups()
config = ConfigParser.ConfigParser()
config.read(os.path.expanduser("~/.review.conf"))
@ -38,8 +38,8 @@ class Review(object):
def rebase(self):
self.git.fetch(refs_name='devel')
self.git.fetch(remote_path=self.origin_repo_url, refs_name='origin')
self.git.checkout_from_remote_branch("remotes/devel/%s" % \
self.remote_branch)
self.git.checkout_from_remote_branch(
"remotes/devel/%s" % self.remote_branch)
self.git.submodule_init()
# Wipe all submodule's dirs before rebasing.
@ -48,81 +48,85 @@ class Review(object):
try:
self.git.rebase("remotes/origin/%s" % self.origin_branch)
except:
raise Exception("ERROR: Auto-rebase of %s failed." \
" Try to 'git rebase origin/%s' from your local" \
"branch and push again" % \
(self.remote_branch, self.origin_branch))
raise Exception(
"ERROR: Auto-rebase of %s failed. Try to "
"'git rebase origin/%s' from your local branch "
"and push again" % (self.remote_branch, self.origin_branch))
self.git.submodule_update()
def push(self):
self.git.push(remote_branch=self.origin_branch, \
remote_path=self.origin_repo_url)
self.git.push(remote_branch=self.origin_branch,
remote_path=self.origin_repo_url)
# Remove remote branch as we don't need it after merge
self.git.remove_remote_branch(remote_branch=self.remote_branch, \
remote_path=self.repo_url)
self.git.remove_remote_branch(remote_branch=self.remote_branch,
remote_path=self.repo_url)
print "Closing pull request.."
self._github_lazy_init()
pull_requests = self.github.get_pull_request_by_label(self.origin_user,
self.origin_repo, "%s:%s" % (self.user, self.remote_branch))
pull_requests = self.github.get_pull_request_by_label(
self.origin_user, self.origin_repo, "%s:%s" % (
self.user, self.remote_branch))
if pull_requests:
pull_number = pull_requests[0]['number']
print "Found pull request #%s. Closing.." % pull_number
newdata = {'state': 'closed'}
self.github.update_pull_request(self.origin_user, self.origin_repo,
pull_number, newdata)
pull_number, newdata)
def add_pull_request(self, title="default title", body="default body"):
self._github_lazy_init()
try:
res = self.github.create_pull_request(self.user, self.repo,
self.origin_user, self.origin_branch,
self.remote_branch, title, body)
res = self.github.create_pull_request(
self.user, self.repo, self.origin_user, self.origin_branch,
self.remote_branch, title, body)
pull_number = res['number']
except restkit.errors.RequestFailed as e:
print "Error occured while creating pull request." \
"Possibly it already exists."
pull_requests = self.github.get_pull_request_by_label( \
self.origin_user, self.origin_repo, \
"%s:%s" % (self.user, self.remote_branch))
" Possibly it already exists."
pull_requests = self.github.get_pull_request_by_label(
self.origin_user, self.origin_repo, "%s:%s" % (
self.user, self.remote_branch))
pull_number = pull_requests[0]['number']
url = "https://github.com/%s/%s/pull/%s" % \
(self.origin_user, self.origin_repo, pull_number)
print "<a href=\"%s\">Pull request #%s</a>" % \
(url, pull_number)
url = "https://github.com/%s/%s/pull/%s" % (
self.origin_user, self.origin_repo, pull_number)
print "<a href=\"%s\">Pull request #%s</a>" % (url, pull_number)
def _github_lazy_init(self):
if not self.github:
self.github = git_api.GithubEngine(self.github_user,
self.github_token)
self.github_token)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Review system")
parser.add_argument('--repo', dest='repo_url', type=str, required=True,
help='URL to repository, format: git@github.com:<user>/<repo>.git')
help='URL to repository, format: '
'git@github.com:<user>/<repo>.git')
parser.add_argument('--branch', dest='remote_branch', type=str,
required=True, help='Remote branch')
required=True, help='Remote branch')
parser.add_argument('--origin-repo', dest='origin_repo_url', type=str,
required=False,
help='URL to repository, format: git@github.com:<user>/<repo>.git')
required=False, help='URL to repository, format: git'
'@github.com:<user>/<repo>.git')
parser.add_argument('--origin-branch', dest='origin_branch',
default='master', required=False, type=str,
help='Remote branch')
default='master', required=False, type=str,
help='Remote branch')
parser.add_argument('-t' '--pull_title', dest='pull_title', type=str,
help='Title for pull request')
help='Title for pull request')
parser.add_argument('-b' '--pull_body', dest='pull_body', type=str,
help='Body for pull request')
help='Body for pull request')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-c', '--check', action='store_true',
help='Check if branch can be rebased. Prepare it for tests.')
group.add_argument('-a', '--add', action='store_true',
help='Add pull request from user branch to master')
group.add_argument('-p', '--push', action='store_true',
help='Pushes rebased code from user branch to remote master')
group.add_argument(
'-c', '--check', action='store_true',
help='Check if branch can be rebased. Prepare it for tests.')
group.add_argument(
'-a', '--add', action='store_true',
help='Add pull request from user branch to master')
group.add_argument(
'-p', '--push', action='store_true',
help='Pushes rebased code from user branch to remote master')
params = parser.parse_args()
@ -142,4 +146,3 @@ if __name__ == "__main__":
elif params.push:
rvw.rebase()
rvw.push()

View File

@ -45,14 +45,15 @@ def is_valid_combination(values, names):
dictionary = dict(zip(names, values))
rules = [
lambda d: "RedHat" == d["os"] and "ceph" == d["storage volume"]
, lambda d: "RedHat" == d["os"] and "ceph" == d["storage images"]
, lambda d: "RedHat" == d["os"] and "yes" == d["savanna"]
, lambda d: "RedHat" == d["os"] and "yes" == d["murano"]
, lambda d: "RedHat" == d["os"] and "neutron GRE" == d["network"]
, lambda d: "RedHat" == d["os"] and "neutron VLAN" == d["network"]
, lambda d: d["cinder"] > 0 and d["storage volume"] == "default"
, lambda d: d["ceph"] > 0 and d["storage volume"] == "default" and d["storage images"] == "default"
lambda d: "RedHat" == d["os"] and "ceph" == d["storage volume"],
lambda d: "RedHat" == d["os"] and "ceph" == d["storage images"],
lambda d: "RedHat" == d["os"] and "yes" == d["savanna"],
lambda d: "RedHat" == d["os"] and "yes" == d["murano"],
lambda d: "RedHat" == d["os"] and "neutron GRE" == d["network"],
lambda d: "RedHat" == d["os"] and "neutron VLAN" == d["network"],
lambda d: d["cinder"] > 0 and d["storage volume"] == "default",
lambda d: d["ceph"] > 0 and d["storage volume"] == "default"
and d["storage images"] == "default"
]
for rule in rules:
@ -66,9 +67,9 @@ def is_valid_combination(values, names):
pairwise = all_pairs(
[x[1] for x in parameters]
, filter_func=lambda values: is_valid_combination(values, [x[0] for x in
parameters])
[x[1] for x in parameters],
filter_func=lambda values: is_valid_combination(values,
[x[0] for x in parameters])
)
for i, v in enumerate(pairwise):

View File

@ -22,8 +22,10 @@ import daemon.pidlockfile
import BaseHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class SimpleHTTPDaemon:
def __init__(self, address='0.0.0.0', port='9001', pid_file='/var/run/simplehttpd.pid', ttl=600):
def __init__(self, address='0.0.0.0', port='9001',
pid_file='/var/run/simplehttpd.pid', ttl=600):
self.address = address
self.port = port
self.pid_file = pid_file
@ -32,8 +34,8 @@ class SimpleHTTPDaemon:
def run_http_server(self):
HandlerClass = SimpleHTTPRequestHandler
ServerClass = BaseHTTPServer.HTTPServer
Protocol = "HTTP/1.0"
ServerClass = BaseHTTPServer.HTTPServer
Protocol = "HTTP/1.0"
server_address = (self.address, self.port)
HandlerClass.protocol_version = Protocol
httpd = ServerClass(server_address, HandlerClass)
@ -43,10 +45,10 @@ class SimpleHTTPDaemon:
def start(self):
self.end = time.time() + self.ttl
context = daemon.DaemonContext(
working_directory = os.getcwd(),
umask = 0o002,
pidfile = daemon.pidlockfile.PIDLockFile(self.pid_file),
)
working_directory=os.getcwd(),
umask=0o002,
pidfile=daemon.pidlockfile.PIDLockFile(self.pid_file)
)
with context:
self.run_http_server()
@ -64,4 +66,3 @@ if __name__ == "__main__":
server = SimpleHTTPDaemon('0.0.0.0', port, pid, 600)
server.start()

View File

@ -28,10 +28,11 @@ import tftpy
tftpy.setLogLevel(logging.WARNING)
class TClient(threading.Thread):
def __init__(self, hostname, port, remote_filename, local_filename=None):
logger.debug("Initializing TClient instance: "
"hostname: '%s' port: %s remote file: '%s' local file: '%s'",
logger.debug("Initializing TClient instance: hostname: '%s' port: %s "
"remote file: '%s' local file: '%s'",
hostname, port, remote_filename, local_filename)
super(TClient, self).__init__()
self.hostname = hostname
@ -48,7 +49,7 @@ class TClient(threading.Thread):
def stat(self):
s = self.client.context.metrics
return (s.bytes, s.duration, s.kbps)
return s.bytes, s.duration, s.kbps
def term_handler(signum, sigframe):
@ -115,4 +116,3 @@ if __name__ == "__main__":
for i, c in enumerate(clients):
logger.debug("Statistics tftp client thread: %s", i)
logger.info("Bytes: %s, Duration: %s, Speed: %s kbps" % c.stat())