Format all Java files with google-java-format

Having a standard tool for formatting saves reviewers' valuable time.
google-java-format is Google's standard formatter and is somewhat
inspired by gofmt[1]. This commit formats everything using
google-java-format version 1.2.

The downside of this one-off formatting is breaking blame. This can be
somewhat hacked around with a tool like git-hyper-blame[2], but it's
definitely not optimal until/unless this kind of feature makes its way
to git core.

Not in this change:
* Tool support, e.g. Eclipse. The command must be run manually [3].
* Documentation of best practice, e.g. new 100-column default.

[1] https://talks.golang.org/2015/gofmt-en.slide#3
[2] https://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/git-hyper-blame.html
[3] git ls-files | grep java$ | xargs google-java-format -i

Change-Id: Id5f3c6de95ce0b68b41f0a478b5c99a93675aaa3
Signed-off-by: David Pursehouse <dpursehouse@collab.net>
This commit is contained in:
Dave Borowitz
2016-11-13 09:56:32 -08:00
committed by David Pursehouse
parent 6723b6d0fa
commit 292fa154c1
2443 changed files with 54816 additions and 57825 deletions

View File

@@ -29,10 +29,8 @@ import com.google.gerrit.server.cache.h2.H2CacheImpl.ValueHolder;
import com.google.gerrit.server.config.ConfigUtil;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.inject.Inject;
import org.eclipse.jgit.lib.Config;
import java.util.concurrent.TimeUnit;
import org.eclipse.jgit.lib.Config;
public class DefaultCacheFactory implements MemoryCacheFactory {
public static class Module extends LifecycleModule {
@@ -50,7 +48,8 @@ public class DefaultCacheFactory implements MemoryCacheFactory {
private final ForwardingRemovalListener.Factory forwardingRemovalListenerFactory;
@Inject
public DefaultCacheFactory(@GerritServerConfig Config config,
public DefaultCacheFactory(
@GerritServerConfig Config config,
ForwardingRemovalListener.Factory forwardingRemovalListenerFactory) {
this.cfg = config;
this.forwardingRemovalListenerFactory = forwardingRemovalListenerFactory;
@@ -62,33 +61,29 @@ public class DefaultCacheFactory implements MemoryCacheFactory {
}
@Override
public <K, V> LoadingCache<K, V> build(
CacheBinding<K, V> def,
CacheLoader<K, V> loader) {
public <K, V> LoadingCache<K, V> build(CacheBinding<K, V> def, CacheLoader<K, V> loader) {
return create(def, false).build(loader);
}
@SuppressWarnings("unchecked")
<K, V> CacheBuilder<K, V> create(
CacheBinding<K, V> def,
boolean unwrapValueHolder) {
CacheBuilder<K,V> builder = newCacheBuilder();
<K, V> CacheBuilder<K, V> create(CacheBinding<K, V> def, boolean unwrapValueHolder) {
CacheBuilder<K, V> builder = newCacheBuilder();
builder.recordStats();
builder.maximumWeight(cfg.getLong(
"cache", def.name(), "memoryLimit",
def.maximumWeight()));
builder.maximumWeight(cfg.getLong("cache", def.name(), "memoryLimit", def.maximumWeight()));
builder = builder.removalListener(forwardingRemovalListenerFactory.create(def.name()));
Weigher<K, V> weigher = def.weigher();
if (weigher != null && unwrapValueHolder) {
final Weigher<K, V> impl = weigher;
weigher = (Weigher<K, V>) new Weigher<K, ValueHolder<V>> () {
@Override
public int weigh(K key, ValueHolder<V> value) {
return impl.weigh(key, value.value);
}
};
weigher =
(Weigher<K, V>)
new Weigher<K, ValueHolder<V>>() {
@Override
public int weigh(K key, ValueHolder<V> value) {
return impl.weigh(key, value.value);
}
};
} else if (weigher == null) {
weigher = unitWeight();
}
@@ -96,10 +91,10 @@ public class DefaultCacheFactory implements MemoryCacheFactory {
Long age = def.expireAfterWrite(TimeUnit.SECONDS);
if (has(def.name(), "maxAge")) {
builder.expireAfterWrite(ConfigUtil.getTimeUnit(cfg,
"cache", def.name(), "maxAge",
age != null ? age : 0,
TimeUnit.SECONDS), TimeUnit.SECONDS);
builder.expireAfterWrite(
ConfigUtil.getTimeUnit(
cfg, "cache", def.name(), "maxAge", age != null ? age : 0, TimeUnit.SECONDS),
TimeUnit.SECONDS);
} else if (age != null) {
builder.expireAfterWrite(age, TimeUnit.SECONDS);
}

View File

@@ -31,11 +31,6 @@ import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import com.google.inject.TypeLiteral;
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -46,11 +41,13 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
class H2CacheFactory implements PersistentCacheFactory, LifecycleListener {
private static final Logger log =
LoggerFactory.getLogger(H2CacheFactory.class);
private static final Logger log = LoggerFactory.getLogger(H2CacheFactory.class);
private final DefaultCacheFactory defaultFactory;
private final Config config;
@@ -77,17 +74,16 @@ class H2CacheFactory implements PersistentCacheFactory, LifecycleListener {
this.cacheMap = cacheMap;
if (cacheDir != null) {
executor = Executors.newFixedThreadPool(
1,
new ThreadFactoryBuilder()
.setNameFormat("DiskCache-Store-%d")
.build());
cleanup = Executors.newScheduledThreadPool(
1,
new ThreadFactoryBuilder()
.setNameFormat("DiskCache-Prune-%d")
.setDaemon(true)
.build());
executor =
Executors.newFixedThreadPool(
1, new ThreadFactoryBuilder().setNameFormat("DiskCache-Store-%d").build());
cleanup =
Executors.newScheduledThreadPool(
1,
new ThreadFactoryBuilder()
.setNameFormat("DiskCache-Prune-%d")
.setDaemon(true)
.build());
} else {
executor = null;
cleanup = null;
@@ -162,11 +158,14 @@ class H2CacheFactory implements PersistentCacheFactory, LifecycleListener {
return defaultFactory.build(def);
}
SqlStore<K, V> store = newSqlStore(def.name(), def.keyType(), limit,
def.expireAfterWrite(TimeUnit.SECONDS));
H2CacheImpl<K, V> cache = new H2CacheImpl<>(
executor, store, def.keyType(),
(Cache<K, ValueHolder<V>>) defaultFactory.create(def, true).build());
SqlStore<K, V> store =
newSqlStore(def.name(), def.keyType(), limit, def.expireAfterWrite(TimeUnit.SECONDS));
H2CacheImpl<K, V> cache =
new H2CacheImpl<>(
executor,
store,
def.keyType(),
(Cache<K, ValueHolder<V>>) defaultFactory.create(def, true).build());
synchronized (caches) {
caches.add(cache);
}
@@ -175,23 +174,21 @@ class H2CacheFactory implements PersistentCacheFactory, LifecycleListener {
@SuppressWarnings("unchecked")
@Override
public <K, V> LoadingCache<K, V> build(
CacheBinding<K, V> def,
CacheLoader<K, V> loader) {
public <K, V> LoadingCache<K, V> build(CacheBinding<K, V> def, CacheLoader<K, V> loader) {
long limit = config.getLong("cache", def.name(), "diskLimit", def.diskLimit());
if (cacheDir == null || limit <= 0) {
return defaultFactory.build(def, loader);
}
SqlStore<K, V> store = newSqlStore(def.name(), def.keyType(), limit,
def.expireAfterWrite(TimeUnit.SECONDS));
Cache<K, ValueHolder<V>> mem = (Cache<K, ValueHolder<V>>)
defaultFactory.create(def, true)
.build((CacheLoader<K, V>) new H2CacheImpl.Loader<>(
executor, store, loader));
H2CacheImpl<K, V> cache = new H2CacheImpl<>(
executor, store, def.keyType(), mem);
SqlStore<K, V> store =
newSqlStore(def.name(), def.keyType(), limit, def.expireAfterWrite(TimeUnit.SECONDS));
Cache<K, ValueHolder<V>> mem =
(Cache<K, ValueHolder<V>>)
defaultFactory
.create(def, true)
.build((CacheLoader<K, V>) new H2CacheImpl.Loader<>(executor, store, loader));
H2CacheImpl<K, V> cache = new H2CacheImpl<>(executor, store, def.keyType(), mem);
caches.add(cache);
return cache;
}
@@ -210,10 +207,7 @@ class H2CacheFactory implements PersistentCacheFactory, LifecycleListener {
}
private <V, K> SqlStore<K, V> newSqlStore(
String name,
TypeLiteral<K> keyType,
long maxSize,
Long expireAfterWrite) {
String name, TypeLiteral<K> keyType, long maxSize, Long expireAfterWrite) {
StringBuilder url = new StringBuilder();
url.append("jdbc:h2:").append(cacheDir.resolve(name).toUri());
if (h2CacheSize >= 0) {
@@ -224,7 +218,10 @@ class H2CacheFactory implements PersistentCacheFactory, LifecycleListener {
if (h2AutoServer) {
url.append(";AUTO_SERVER=TRUE");
}
return new SqlStore<>(url.toString(), keyType, maxSize,
return new SqlStore<>(
url.toString(),
keyType,
maxSize,
expireAfterWrite == null ? 0 : expireAfterWrite.longValue());
}
}

View File

@@ -28,11 +28,6 @@ import com.google.common.hash.PrimitiveSink;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.server.cache.PersistentCache;
import com.google.inject.TypeLiteral;
import org.h2.jdbc.JdbcSQLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InvalidClassException;
import java.io.ObjectOutputStream;
@@ -54,44 +49,44 @@ import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.h2.jdbc.JdbcSQLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Hybrid in-memory and database backed cache built on H2.
* <p>
* This cache can be used as either a recall cache, or a loading cache if a
* CacheLoader was supplied to its constructor at build time. Before creating an
* entry the in-memory cache is checked for the item, then the database is
* checked, and finally the CacheLoader is used to construct the item. This is
* mostly useful for CacheLoaders that are computationally intensive, such as
* the PatchListCache.
* <p>
* Cache stores and invalidations are performed on a background thread, hiding
* the latency associated with serializing the key and value pairs and writing
* them to the database log.
* <p>
* A BloomFilter is used around the database to reduce the number of SELECTs
* issued against the database for new cache items that have not been seen
* before, a common operation for the PatchListCache. The BloomFilter is sized
* when the cache starts to be 64,000 entries or double the number of items
* currently in the database table.
* <p>
* This cache does not export its items as a ConcurrentMap.
*
* <p>This cache can be used as either a recall cache, or a loading cache if a CacheLoader was
* supplied to its constructor at build time. Before creating an entry the in-memory cache is
* checked for the item, then the database is checked, and finally the CacheLoader is used to
* construct the item. This is mostly useful for CacheLoaders that are computationally intensive,
* such as the PatchListCache.
*
* <p>Cache stores and invalidations are performed on a background thread, hiding the latency
* associated with serializing the key and value pairs and writing them to the database log.
*
* <p>A BloomFilter is used around the database to reduce the number of SELECTs issued against the
* database for new cache items that have not been seen before, a common operation for the
* PatchListCache. The BloomFilter is sized when the cache starts to be 64,000 entries or double the
* number of items currently in the database table.
*
* <p>This cache does not export its items as a ConcurrentMap.
*
* @see H2CacheFactory
*/
public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
PersistentCache {
public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements PersistentCache {
private static final Logger log = LoggerFactory.getLogger(H2CacheImpl.class);
private static final ImmutableSet<String> OLD_CLASS_NAMES = ImmutableSet.of(
"com.google.gerrit.server.change.ChangeKind");
private static final ImmutableSet<String> OLD_CLASS_NAMES =
ImmutableSet.of("com.google.gerrit.server.change.ChangeKind");
private final Executor executor;
private final SqlStore<K, V> store;
private final TypeLiteral<K> keyType;
private final Cache<K, ValueHolder<V>> mem;
H2CacheImpl(Executor executor,
H2CacheImpl(
Executor executor,
SqlStore<K, V> store,
TypeLiteral<K> keyType,
Cache<K, ValueHolder<V>> mem) {
@@ -134,8 +129,7 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
}
@Override
public V get(K key, Callable<? extends V> valueLoader)
throws ExecutionException {
public V get(K key, Callable<? extends V> valueLoader) throws ExecutionException {
return mem.get(key, new LoadingCallable(key, valueLoader)).value;
}
@@ -287,8 +281,7 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
@Override
public void funnel(K from, PrimitiveSink into) {
try (ObjectOutputStream ser =
new ObjectOutputStream(new SinkOutputStream(into))) {
try (ObjectOutputStream ser = new ObjectOutputStream(new SinkOutputStream(into))) {
ser.writeObject(from);
ser.flush();
} catch (IOException err) {
@@ -307,30 +300,30 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
}
static final KeyType<?> OTHER = new KeyType<>();
static final KeyType<String> STRING = new KeyType<String>() {
@Override
String columnType() {
return "VARCHAR(4096)";
}
static final KeyType<String> STRING =
new KeyType<String>() {
@Override
String columnType() {
return "VARCHAR(4096)";
}
@Override
String get(ResultSet rs, int col) throws SQLException {
return rs.getString(col);
}
@Override
String get(ResultSet rs, int col) throws SQLException {
return rs.getString(col);
}
@Override
void set(PreparedStatement ps, int col, String value)
throws SQLException {
ps.setString(col, value);
}
@Override
void set(PreparedStatement ps, int col, String value) throws SQLException {
ps.setString(col, value);
}
@SuppressWarnings("unchecked")
@Override
Funnel<String> funnel() {
Funnel<?> s = Funnels.unencodedCharsFunnel();
return (Funnel<String>) s;
}
};
@SuppressWarnings("unchecked")
@Override
Funnel<String> funnel() {
Funnel<?> s = Funnels.unencodedCharsFunnel();
return (Funnel<String>) s;
}
};
}
static class SqlStore<K, V> {
@@ -344,8 +337,7 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
private volatile BloomFilter<K> bloomFilter;
private int estimatedSize;
SqlStore(String jdbcUrl, TypeLiteral<K> keyType, long maxSize,
long expireAfterWrite) {
SqlStore(String jdbcUrl, TypeLiteral<K> keyType, long maxSize, long expireAfterWrite) {
this.url = jdbcUrl;
this.keyType = KeyType.create(keyType);
this.maxSize = maxSize;
@@ -401,9 +393,11 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
}
} catch (JdbcSQLException e) {
if (e.getCause() instanceof InvalidClassException) {
log.warn("Entries cached for " + url
+ " have an incompatible class and can't be deserialized. "
+ "Cache is flushed.");
log.warn(
"Entries cached for "
+ url
+ " have an incompatible class and can't be deserialized. "
+ "Cache is flushed.");
invalidateAll();
} else {
throw e;
@@ -464,8 +458,7 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
private static boolean isOldClassNameError(Throwable t) {
for (Throwable c : Throwables.getCausalChain(t)) {
if (c instanceof ClassNotFoundException
&& OLD_CLASS_NAMES.contains(c.getMessage())) {
if (c instanceof ClassNotFoundException && OLD_CLASS_NAMES.contains(c.getMessage())) {
return true;
}
}
@@ -575,21 +568,22 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
c = acquire();
try (Statement s = c.conn.createStatement()) {
long used = 0;
try (ResultSet r = s.executeQuery("SELECT"
+ " SUM(OCTET_LENGTH(k) + OCTET_LENGTH(v))"
+ " FROM data")) {
try (ResultSet r =
s.executeQuery("SELECT" + " SUM(OCTET_LENGTH(k) + OCTET_LENGTH(v))" + " FROM data")) {
used = r.next() ? r.getLong(1) : 0;
}
if (used <= maxSize) {
return;
}
try (ResultSet r = s.executeQuery("SELECT"
+ " k"
+ ",OCTET_LENGTH(k) + OCTET_LENGTH(v)"
+ ",created"
+ " FROM data"
+ " ORDER BY accessed")) {
try (ResultSet r =
s.executeQuery(
"SELECT"
+ " k"
+ ",OCTET_LENGTH(k) + OCTET_LENGTH(v)"
+ ",created"
+ " FROM data"
+ " ORDER BY accessed")) {
while (maxSize < used && r.next()) {
K key = keyType.get(r, 1);
Timestamp created = r.getTimestamp(3);
@@ -617,10 +611,12 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
try {
c = acquire();
try (Statement s = c.conn.createStatement();
ResultSet r = s.executeQuery("SELECT"
+ " COUNT(*)"
+ ",SUM(OCTET_LENGTH(k) + OCTET_LENGTH(v))"
+ " FROM data")) {
ResultSet r =
s.executeQuery(
"SELECT"
+ " COUNT(*)"
+ ",SUM(OCTET_LENGTH(k) + OCTET_LENGTH(v))"
+ " FROM data")) {
if (r.next()) {
size = r.getLong(1);
space = r.getLong(2);
@@ -671,12 +667,15 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
this.url = url;
this.conn = org.h2.Driver.load().connect(url, null);
try (Statement stmt = conn.createStatement()) {
stmt.execute("CREATE TABLE IF NOT EXISTS data"
+ "(k " + type.columnType() + " NOT NULL PRIMARY KEY HASH"
+ ",v OTHER NOT NULL"
+ ",created TIMESTAMP NOT NULL"
+ ",accessed TIMESTAMP NOT NULL"
+ ")");
stmt.execute(
"CREATE TABLE IF NOT EXISTS data"
+ "(k "
+ type.columnType()
+ " NOT NULL PRIMARY KEY HASH"
+ ",v OTHER NOT NULL"
+ ",created TIMESTAMP NOT NULL"
+ ",accessed TIMESTAMP NOT NULL"
+ ")");
}
}
@@ -718,7 +717,7 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
@Override
public void write(int b) {
sink.putByte((byte)b);
sink.putByte((byte) b);
}
@Override

View File

@@ -24,13 +24,11 @@ import com.google.common.util.concurrent.MoreExecutors;
import com.google.gerrit.server.cache.h2.H2CacheImpl.SqlStore;
import com.google.gerrit.server.cache.h2.H2CacheImpl.ValueHolder;
import com.google.inject.TypeLiteral;
import org.junit.Before;
import org.junit.Test;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.Before;
import org.junit.Test;
public class H2CacheTest {
private static int dbCnt;
@@ -43,13 +41,9 @@ public class H2CacheTest {
mem = CacheBuilder.newBuilder().build();
TypeLiteral<String> keyType = new TypeLiteral<String>() {};
SqlStore<String, Boolean> store = new SqlStore<>(
"jdbc:h2:mem:" + "Test_" + (++dbCnt),
keyType,
1 << 20,
0);
impl =
new H2CacheImpl<>(MoreExecutors.directExecutor(), store, keyType, mem);
SqlStore<String, Boolean> store =
new SqlStore<>("jdbc:h2:mem:" + "Test_" + (++dbCnt), keyType, 1 << 20, 0);
impl = new H2CacheImpl<>(MoreExecutors.directExecutor(), store, keyType, mem);
}
@Test
@@ -57,26 +51,32 @@ public class H2CacheTest {
assertNull(impl.getIfPresent("foo"));
final AtomicBoolean called = new AtomicBoolean();
assertTrue(impl.get("foo", new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
called.set(true);
return true;
}
}));
assertTrue(
impl.get(
"foo",
new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
called.set(true);
return true;
}
}));
assertTrue("used Callable", called.get());
assertTrue("exists in cache", impl.getIfPresent("foo"));
mem.invalidate("foo");
assertTrue("exists on disk", impl.getIfPresent("foo"));
called.set(false);
assertTrue(impl.get("foo", new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
called.set(true);
return true;
}
}));
assertTrue(
impl.get(
"foo",
new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
called.set(true);
return true;
}
}));
assertFalse("did not invoke Callable", called.get());
}
}