Format all Java files with google-java-format

Having a standard tool for formatting saves reviewers' valuable time.
google-java-format is Google's standard formatter and is somewhat
inspired by gofmt[1]. This commit formats everything using
google-java-format version 1.2.

The downside of this one-off formatting is breaking blame. This can be
somewhat hacked around with a tool like git-hyper-blame[2], but it's
definitely not optimal until/unless this kind of feature makes its way
to git core.

Not in this change:
* Tool support, e.g. Eclipse. The command must be run manually [3].
* Documentation of best practice, e.g. new 100-column default.

[1] https://talks.golang.org/2015/gofmt-en.slide#3
[2] https://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/git-hyper-blame.html
[3] git ls-files | grep java$ | xargs google-java-format -i

Change-Id: Id5f3c6de95ce0b68b41f0a478b5c99a93675aaa3
Signed-off-by: David Pursehouse <dpursehouse@collab.net>
This commit is contained in:
Dave Borowitz
2016-11-13 09:56:32 -08:00
committed by David Pursehouse
parent 6723b6d0fa
commit 292fa154c1
2443 changed files with 54816 additions and 57825 deletions

View File

@@ -17,33 +17,27 @@ package com.google.gerrit.rules;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.SetMultimap;
import com.google.gerrit.extensions.registration.DynamicSet;
import java.util.Collection;
/**
* Loads the classes for Prolog predicates.
*/
/** Loads the classes for Prolog predicates. */
public class PredicateClassLoader extends ClassLoader {
private final SetMultimap<String, ClassLoader> packageClassLoaderMap =
LinkedHashMultimap.create();
public PredicateClassLoader(
final DynamicSet<PredicateProvider> predicateProviders,
final ClassLoader parent) {
final DynamicSet<PredicateProvider> predicateProviders, final ClassLoader parent) {
super(parent);
for (PredicateProvider predicateProvider : predicateProviders) {
for (String pkg : predicateProvider.getPackages()) {
packageClassLoaderMap.put(pkg, predicateProvider.getClass()
.getClassLoader());
packageClassLoaderMap.put(pkg, predicateProvider.getClass().getClassLoader());
}
}
}
@Override
protected Class<?> findClass(final String className)
throws ClassNotFoundException {
protected Class<?> findClass(final String className) throws ClassNotFoundException {
final Collection<ClassLoader> classLoaders =
packageClassLoaderMap.get(getPackageName(className));
for (final ClassLoader cl : classLoaders) {

View File

@@ -15,17 +15,15 @@ package com.google.gerrit.rules;
import com.google.common.collect.ImmutableSet;
import com.google.gerrit.extensions.annotations.ExtensionPoint;
import com.googlecode.prolog_cafe.lang.Predicate;
/**
* Provides additional packages that contain Prolog predicates that should be
* made available in the Prolog environment. The predicates can e.g. be used in
* the project submit rules.
* Provides additional packages that contain Prolog predicates that should be made available in the
* Prolog environment. The predicates can e.g. be used in the project submit rules.
*
* Each Java class defining a Prolog predicate must be in one of the provided
* packages and its name must apply to the 'PRED_[functor]_[arity]' format. In
* addition it must extend {@link Predicate}.
* <p>Each Java class defining a Prolog predicate must be in one of the provided packages and its
* name must apply to the 'PRED_[functor]_[arity]' format. In addition it must extend {@link
* Predicate}.
*/
@ExtensionPoint
public interface PredicateProvider {

View File

@@ -25,34 +25,30 @@ import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import com.google.inject.assistedinject.Assisted;
import com.googlecode.prolog_cafe.lang.BufferingPrologControl;
import com.googlecode.prolog_cafe.lang.Predicate;
import com.googlecode.prolog_cafe.lang.PredicateEncoder;
import com.googlecode.prolog_cafe.lang.Prolog;
import com.googlecode.prolog_cafe.lang.PrologMachineCopy;
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Per-thread Prolog interpreter.
* <p>
* This class is not thread safe.
* <p>
* A single copy of the Prolog interpreter, for the current thread.
*
* <p>This class is not thread safe.
*
* <p>A single copy of the Prolog interpreter, for the current thread.
*/
public class PrologEnvironment extends BufferingPrologControl {
private static final Logger log =
LoggerFactory.getLogger(PrologEnvironment.class);
private static final Logger log = LoggerFactory.getLogger(PrologEnvironment.class);
public interface Factory {
/**
@@ -112,8 +108,8 @@ public class PrologEnvironment extends BufferingPrologControl {
}
/**
* Copy the stored values from another interpreter to this one.
* Also gets the cleanup from the child interpreter
* Copy the stored values from another interpreter to this one. Also gets the cleanup from the
* child interpreter
*/
public void copyStoredValues(PrologEnvironment child) {
storedValues.putAll(child.storedValues);
@@ -121,9 +117,8 @@ public class PrologEnvironment extends BufferingPrologControl {
}
/**
* Assign the environment a cleanup list (in order to use a centralized list)
* If this enivronment's list is non-empty, append its cleanup tasks to the
* assigning list.
* Assign the environment a cleanup list (in order to use a centralized list) If this
* enivronment's list is non-empty, append its cleanup tasks to the assigning list.
*/
public void setCleanup(List<Runnable> newCleanupList) {
newCleanupList.addAll(cleanup);
@@ -132,17 +127,16 @@ public class PrologEnvironment extends BufferingPrologControl {
/**
* Adds cleanup task to run when close() is called
*
* @param task is run when close() is called
*/
public void addToCleanup(Runnable task) {
cleanup.add(task);
}
/**
* Release resources stored in interpreter's hash manager.
*/
/** Release resources stored in interpreter's hash manager. */
public void close() {
for (final Iterator<Runnable> i = cleanup.iterator(); i.hasNext();) {
for (final Iterator<Runnable> i = cleanup.iterator(); i.hasNext(); ) {
try {
i.next().run();
} catch (Throwable err) {
@@ -155,12 +149,16 @@ public class PrologEnvironment extends BufferingPrologControl {
@Singleton
public static class Args {
private static final Class<Predicate> CONSULT_STREAM_2;
static {
try {
@SuppressWarnings("unchecked")
Class<Predicate> c = (Class<Predicate>) Class.forName(
PredicateEncoder.encode(Prolog.BUILTIN, "consult_stream", 2),
false, RulesCache.class.getClassLoader());
Class<Predicate> c =
(Class<Predicate>)
Class.forName(
PredicateEncoder.encode(Prolog.BUILTIN, "consult_stream", 2),
false,
RulesCache.class.getClassLoader());
CONSULT_STREAM_2 = c;
} catch (ClassNotFoundException e) {
throw new LinkageError("cannot find predicate consult_stream", e);
@@ -177,7 +175,8 @@ public class PrologEnvironment extends BufferingPrologControl {
private final int compileLimit;
@Inject
Args(ProjectCache projectCache,
Args(
ProjectCache projectCache,
GitRepositoryManager repositoryManager,
PatchListCache patchListCache,
PatchSetInfoFactory patchSetInfoFactory,
@@ -194,8 +193,12 @@ public class PrologEnvironment extends BufferingPrologControl {
int limit = config.getInt("rules", null, "reductionLimit", 100000);
reductionLimit = limit <= 0 ? Integer.MAX_VALUE : limit;
limit = config.getInt("rules", null, "compileReductionLimit",
(int) Math.min(10L * limit, Integer.MAX_VALUE));
limit =
config.getInt(
"rules",
null,
"compileReductionLimit",
(int) Math.min(10L * limit, Integer.MAX_VALUE));
compileLimit = limit <= 0 ? Integer.MAX_VALUE : limit;
}

View File

@@ -26,7 +26,6 @@ import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.googlecode.prolog_cafe.exceptions.CompileException;
import com.googlecode.prolog_cafe.exceptions.SyntaxException;
import com.googlecode.prolog_cafe.exceptions.TermException;
@@ -39,15 +38,6 @@ import com.googlecode.prolog_cafe.lang.PrologMachineCopy;
import com.googlecode.prolog_cafe.lang.StructureTerm;
import com.googlecode.prolog_cafe.lang.SymbolTerm;
import com.googlecode.prolog_cafe.lang.Term;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.RawParseUtils;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.Reader;
@@ -65,24 +55,28 @@ import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.RawParseUtils;
/**
* Manages a cache of compiled Prolog rules.
* <p>
* Rules are loaded from the {@code site_path/cache/rules/rules-SHA1.jar}, where
* {@code SHA1} is the SHA1 of the Prolog {@code rules.pl} in a project's
* {@link RefNames#REFS_CONFIG} branch.
*
* <p>Rules are loaded from the {@code site_path/cache/rules/rules-SHA1.jar}, where {@code SHA1} is
* the SHA1 of the Prolog {@code rules.pl} in a project's {@link RefNames#REFS_CONFIG} branch.
*/
@Singleton
public class RulesCache {
private static final List<String> PACKAGE_LIST = ImmutableList.of(
Prolog.BUILTIN, "gerrit");
private static final List<String> PACKAGE_LIST = ImmutableList.of(Prolog.BUILTIN, "gerrit");
private static final class MachineRef extends WeakReference<PrologMachineCopy> {
final ObjectId key;
MachineRef(ObjectId key, PrologMachineCopy pcm,
ReferenceQueue<PrologMachineCopy> queue) {
MachineRef(ObjectId key, PrologMachineCopy pcm, ReferenceQueue<PrologMachineCopy> queue) {
super(pcm, queue);
this.key = key;
}
@@ -98,16 +92,17 @@ public class RulesCache {
private final ClassLoader systemLoader;
private final PrologMachineCopy defaultMachine;
private final Map<ObjectId, MachineRef> machineCache = new HashMap<>();
private final ReferenceQueue<PrologMachineCopy> dead =
new ReferenceQueue<>();
private final ReferenceQueue<PrologMachineCopy> dead = new ReferenceQueue<>();
@Inject
protected RulesCache(@GerritServerConfig Config config, SitePaths site,
GitRepositoryManager gm, DynamicSet<PredicateProvider> predicateProviders) {
protected RulesCache(
@GerritServerConfig Config config,
SitePaths site,
GitRepositoryManager gm,
DynamicSet<PredicateProvider> predicateProviders) {
maxDbSize = config.getInt("rules", null, "maxPrologDatabaseSize", 256);
maxSrcBytes = config.getInt("rules", null, "maxSourceBytes", 128 << 10);
enableProjectRules = config.getBoolean("rules", null, "enable", true)
&& maxSrcBytes > 0;
enableProjectRules = config.getBoolean("rules", null, "enable", true) && maxSrcBytes > 0;
cacheDir = site.resolve(config.getString("cache", null, "directory"));
rulesDir = cacheDir != null ? cacheDir.resolve("rules") : null;
gitMgr = gm;
@@ -127,9 +122,7 @@ public class RulesCache {
* @return a Prolog machine, after loading the specified rules.
* @throws CompileException the machine cannot be created.
*/
public synchronized PrologMachineCopy loadMachine(
Project.NameKey project,
ObjectId rulesId)
public synchronized PrologMachineCopy loadMachine(Project.NameKey project, ObjectId rulesId)
throws CompileException {
if (!enableProjectRules || project == null || rulesId == null) {
return defaultMachine;
@@ -154,8 +147,7 @@ public class RulesCache {
return pcm;
}
public PrologMachineCopy loadMachine(String name, Reader in)
throws CompileException {
public PrologMachineCopy loadMachine(String name, Reader in) throws CompileException {
PrologMachineCopy pmc = consultRules(name, in);
if (pmc == null) {
throw new CompileException("Cannot consult rules from the stream " + name);
@@ -173,8 +165,8 @@ public class RulesCache {
}
}
private PrologMachineCopy createMachine(Project.NameKey project,
ObjectId rulesId) throws CompileException {
private PrologMachineCopy createMachine(Project.NameKey project, ObjectId rulesId)
throws CompileException {
// If the rules are available as a complied JAR on local disk, prefer
// that over dynamic consult as the bytecode will be faster.
//
@@ -196,29 +188,26 @@ public class RulesCache {
return pmc;
}
private PrologMachineCopy consultRules(String name, Reader rules)
throws CompileException {
private PrologMachineCopy consultRules(String name, Reader rules) throws CompileException {
BufferingPrologControl ctl = newEmptyMachine(systemLoader);
PushbackReader in = new PushbackReader(rules, Prolog.PUSHBACK_SIZE);
try {
if (!ctl.execute(Prolog.BUILTIN, "consult_stream",
SymbolTerm.intern(name), new JavaObjectTerm(in))) {
if (!ctl.execute(
Prolog.BUILTIN, "consult_stream", SymbolTerm.intern(name), new JavaObjectTerm(in))) {
return null;
}
} catch (SyntaxException e) {
throw new CompileException(e.toString(), e);
} catch (TermException e) {
Term m = e.getMessageTerm();
if (m instanceof StructureTerm && "syntax_error".equals(m.name())
&& m.arity() >= 1) {
if (m instanceof StructureTerm && "syntax_error".equals(m.name()) && m.arity() >= 1) {
StringBuilder msg = new StringBuilder();
if (m.arg(0) instanceof ListTerm) {
msg.append(Joiner.on(' ').join(((ListTerm) m.arg(0)).toJava()));
} else {
msg.append(m.arg(0).toString());
}
if (m.arity() == 2 && m.arg(1) instanceof StructureTerm
&& "at".equals(m.arg(1).name())) {
if (m.arity() == 2 && m.arg(1) instanceof StructureTerm && "at".equals(m.arg(1).name())) {
Term at = m.arg(1).arg(0).dereference();
if (at instanceof ListTerm) {
msg.append(" at: ");
@@ -259,8 +248,7 @@ public class RulesCache {
return b.toString().trim();
}
private String read(Project.NameKey project, ObjectId rulesId)
throws CompileException {
private String read(Project.NameKey project, ObjectId rulesId) throws CompileException {
try (Repository git = gitMgr.openRepository(project)) {
try {
ObjectLoader ldr = git.open(rulesId, Constants.OBJ_BLOB);
@@ -279,8 +267,8 @@ public class RulesCache {
private BufferingPrologControl newEmptyMachine(ClassLoader cl) {
BufferingPrologControl ctl = new BufferingPrologControl();
ctl.setMaxDatabaseSize(maxDbSize);
ctl.setPrologClassLoader(new PrologClassLoader(new PredicateClassLoader(
predicateProviders, cl)));
ctl.setPrologClassLoader(
new PrologClassLoader(new PredicateClassLoader(predicateProviders, cl)));
ctl.setEnabled(EnumSet.allOf(Prolog.Feature.class), false);
List<String> packages = new ArrayList<>();

View File

@@ -38,16 +38,14 @@ public class StoredValue<T> {
/**
* Initialize a stored value key using any Java Object.
*
* @param key unique identity of the stored value. This will be the hash key
* in the Prolog Environments's hash map.
* @param key unique identity of the stored value. This will be the hash key in the Prolog
* Environments's hash map.
*/
public StoredValue(Object key) {
this.key = key;
}
/**
* Initializes a stored value key with a new unique key.
*/
/** Initializes a stored value key with a new unique key. */
public StoredValue() {
key = this;
}

View File

@@ -36,16 +36,13 @@ import com.google.gerrit.server.patch.PatchSetInfoNotAvailableException;
import com.google.gerrit.server.project.ChangeControl;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gwtorm.server.OrmException;
import com.googlecode.prolog_cafe.exceptions.SystemException;
import com.googlecode.prolog_cafe.lang.Prolog;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
public final class StoredValues {
public static final StoredValue<ReviewDb> REVIEW_DB = create(ReviewDb.class);
@@ -76,65 +73,68 @@ public final class StoredValues {
}
}
public static final StoredValue<PatchSetInfo> PATCH_SET_INFO = new StoredValue<PatchSetInfo>() {
@Override
public PatchSetInfo createValue(Prolog engine) {
Change change = getChange(engine);
PatchSet ps = getPatchSet(engine);
PrologEnvironment env = (PrologEnvironment) engine.control;
PatchSetInfoFactory patchInfoFactory =
env.getArgs().getPatchSetInfoFactory();
try {
return patchInfoFactory.get(change.getProject(), ps);
} catch (PatchSetInfoNotAvailableException e) {
throw new SystemException(e.getMessage());
}
}
};
public static final StoredValue<PatchList> PATCH_LIST = new StoredValue<PatchList>() {
@Override
public PatchList createValue(Prolog engine) {
PrologEnvironment env = (PrologEnvironment) engine.control;
PatchSet ps = getPatchSet(engine);
PatchListCache plCache = env.getArgs().getPatchListCache();
Change change = getChange(engine);
Project.NameKey project = change.getProject();
ObjectId b = ObjectId.fromString(ps.getRevision().get());
Whitespace ws = Whitespace.IGNORE_NONE;
PatchListKey plKey = PatchListKey.againstDefaultBase(b, ws);
PatchList patchList;
try {
patchList = plCache.get(plKey, project);
} catch (PatchListNotAvailableException e) {
throw new SystemException("Cannot create " + plKey);
}
return patchList;
}
};
public static final StoredValue<Repository> REPOSITORY = new StoredValue<Repository>() {
@Override
public Repository createValue(Prolog engine) {
PrologEnvironment env = (PrologEnvironment) engine.control;
GitRepositoryManager gitMgr = env.getArgs().getGitRepositoryManager();
Change change = getChange(engine);
Project.NameKey projectKey = change.getProject();
final Repository repo;
try {
repo = gitMgr.openRepository(projectKey);
} catch (IOException e) {
throw new SystemException(e.getMessage());
}
env.addToCleanup(new Runnable() {
public static final StoredValue<PatchSetInfo> PATCH_SET_INFO =
new StoredValue<PatchSetInfo>() {
@Override
public void run() {
repo.close();
public PatchSetInfo createValue(Prolog engine) {
Change change = getChange(engine);
PatchSet ps = getPatchSet(engine);
PrologEnvironment env = (PrologEnvironment) engine.control;
PatchSetInfoFactory patchInfoFactory = env.getArgs().getPatchSetInfoFactory();
try {
return patchInfoFactory.get(change.getProject(), ps);
} catch (PatchSetInfoNotAvailableException e) {
throw new SystemException(e.getMessage());
}
}
});
return repo;
}
};
};
public static final StoredValue<PatchList> PATCH_LIST =
new StoredValue<PatchList>() {
@Override
public PatchList createValue(Prolog engine) {
PrologEnvironment env = (PrologEnvironment) engine.control;
PatchSet ps = getPatchSet(engine);
PatchListCache plCache = env.getArgs().getPatchListCache();
Change change = getChange(engine);
Project.NameKey project = change.getProject();
ObjectId b = ObjectId.fromString(ps.getRevision().get());
Whitespace ws = Whitespace.IGNORE_NONE;
PatchListKey plKey = PatchListKey.againstDefaultBase(b, ws);
PatchList patchList;
try {
patchList = plCache.get(plKey, project);
} catch (PatchListNotAvailableException e) {
throw new SystemException("Cannot create " + plKey);
}
return patchList;
}
};
public static final StoredValue<Repository> REPOSITORY =
new StoredValue<Repository>() {
@Override
public Repository createValue(Prolog engine) {
PrologEnvironment env = (PrologEnvironment) engine.control;
GitRepositoryManager gitMgr = env.getArgs().getGitRepositoryManager();
Change change = getChange(engine);
Project.NameKey projectKey = change.getProject();
final Repository repo;
try {
repo = gitMgr.openRepository(projectKey);
} catch (IOException e) {
throw new SystemException(e.getMessage());
}
env.addToCleanup(
new Runnable() {
@Override
public void run() {
repo.close();
}
});
return repo;
}
};
public static final StoredValue<AnonymousUser> ANONYMOUS_USER =
new StoredValue<AnonymousUser>() {
@@ -153,6 +153,5 @@ public final class StoredValues {
}
};
private StoredValues() {
}
private StoredValues() {}
}