Use Guice to construct the self populating caches

Rather than explicitly coding new SelfPopulatingCache, configure
each cache with a binding to its EntryCreator.  This way we can
more easily rebind what a self-populating-cache means in another
server context, such as a cluster.

Change-Id: Icf9645fe46876af6b9c8dba0357155c07217d4b2
Signed-off-by: Shawn O. Pearce <sop@google.com>
(cherry picked from commit 0f5418c9eb)
Change-Id: Iecf9338b80729c828138d9d2dba211d86182667f
This commit is contained in:
Shawn O. Pearce 2010-06-09 10:14:56 -07:00
parent 623e49556f
commit 65b21e02ce
20 changed files with 1301 additions and 1222 deletions

View File

@ -250,7 +250,6 @@ except when the cache is full.
Default is `90 days` for most caches, except:
+
* `"ldap_groups"`: default is `1 hour`
* `"openid"`: default is `5 minutes`
* `"web_sessions"`: default is `12 hours`
[[cache.name.memoryLimit]]cache.<name>.memoryLimit::
@ -261,7 +260,6 @@ this is total number of items, not bytes of heap used.
Default is 1024 for most caches, except:
+
* `"diff"`: default is `128`
* `"openid"`: default is `64`
[[cache.name.diskLimit]]cache.<name>.diskLimit::
+
@ -345,13 +343,6 @@ Caches a mapping of LDAP username to Gerrit account identity. The
cache automatically updates when a user first creates their account
within Gerrit, so the cache expire time is largely irrelevant.
cache `"openid"`::
+
If OpenID authentication is enabled, caches the OpenID discovery
response by URL, for up to 5 minutes. This can reduce the time
required for OpenID authentication through very common providers,
such as Google Accounts.
cache `"projects"`::
+
Caches the project description records, from the `projects` table

View File

@ -14,33 +14,13 @@
package com.google.gerrit.httpd.auth.openid;
import static java.util.concurrent.TimeUnit.MINUTES;
import com.google.gerrit.httpd.rpc.RpcServletModule;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.CacheModule;
import com.google.inject.TypeLiteral;
import com.google.inject.servlet.ServletModule;
import java.util.List;
/** Servlets and RPC support related to OpenID authentication. */
public class OpenIdModule extends ServletModule {
@Override
protected void configureServlets() {
install(new CacheModule() {
@SuppressWarnings("unchecked")
@Override
protected void configure() {
final TypeLiteral<Cache<String, List>> type =
new TypeLiteral<Cache<String, List>>() {};
core(type, "openid") //
.maxAge(5, MINUTES) // don't cache too long, might be stale
.memoryLimit(64) // short TTL means we won't have many entries
;
}
});
serve("/" + OpenIdServiceImpl.RETURN_URL).with(OpenIdLoginServlet.class);
serve("/" + XrdsServlet.LOCATION).with(XrdsServlet.class);
filter("/").through(XrdsFilter.class);

View File

@ -26,8 +26,6 @@ import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.UrlEncoded;
import com.google.gerrit.server.account.AccountException;
import com.google.gerrit.server.account.AccountManager;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.SelfPopulatingCache;
import com.google.gerrit.server.config.AuthConfig;
import com.google.gerrit.server.config.CanonicalWebUrl;
import com.google.gerrit.server.config.ConfigUtil;
@ -37,7 +35,6 @@ import com.google.gwtorm.client.KeyUtil;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import org.eclipse.jgit.lib.Config;
import org.openid4java.consumer.ConsumerException;
@ -104,7 +101,6 @@ class OpenIdServiceImpl implements OpenIdService {
private final AccountManager accountManager;
private final ConsumerManager manager;
private final List<OpenIdProviderPattern> allowedOpenIDs;
private final SelfPopulatingCache<String, List> discoveryCache;
/** Maximum age, in seconds, before forcing re-authentication of account. */
private final int papeMaxAuthAge;
@ -113,7 +109,6 @@ class OpenIdServiceImpl implements OpenIdService {
OpenIdServiceImpl(final Provider<WebSession> cf,
final Provider<IdentifiedUser> iu,
@CanonicalWebUrl @Nullable final Provider<String> up,
@Named("openid") final Cache<String, List> openidCache,
@GerritServerConfig final Config config, final AuthConfig ac,
final AccountManager am) throws ConsumerException, MalformedURLException {
@ -149,19 +144,6 @@ class OpenIdServiceImpl implements OpenIdService {
allowedOpenIDs = ac.getAllowedOpenIDs();
papeMaxAuthAge = (int) ConfigUtil.getTimeUnit(config, //
"auth", null, "maxOpenIdSessionAge", -1, TimeUnit.SECONDS);
discoveryCache = new SelfPopulatingCache<String, List>(openidCache) {
@Override
protected List createEntry(final String url) throws Exception {
try {
final List<?> list = manager.discover(url);
return list != null && !list.isEmpty() ? list : null;
} catch (DiscoveryException e) {
log.error("Cannot discover OpenID " + url, e);
return null;
}
}
};
}
public void discover(final String openidIdentifier, final SignInMode mode,
@ -522,7 +504,13 @@ class OpenIdServiceImpl implements OpenIdService {
private State init(final String openidIdentifier, final SignInMode mode,
final boolean remember, final String returnToken) {
final List<?> list = discoveryCache.get(openidIdentifier);
final List<?> list;
try {
list = manager.discover(openidIdentifier);
} catch (DiscoveryException e) {
log.error("Cannot discover OpenID " + openidIdentifier, e);
return null;
}
if (list == null || list.isEmpty()) {
return null;
}

View File

@ -19,8 +19,7 @@ import com.google.gerrit.reviewdb.AccountExternalId;
import com.google.gerrit.reviewdb.ReviewDb;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.SelfPopulatingCache;
import com.google.gwtorm.client.OrmException;
import com.google.gerrit.server.cache.EntryCreator;
import com.google.gwtorm.client.SchemaFactory;
import com.google.inject.Inject;
import com.google.inject.Module;
@ -43,69 +42,73 @@ public class AccountByEmailCacheImpl implements AccountByEmailCache {
protected void configure() {
final TypeLiteral<Cache<String, Set<Account.Id>>> type =
new TypeLiteral<Cache<String, Set<Account.Id>>>() {};
core(type, CACHE_NAME);
core(type, CACHE_NAME).populateWith(Loader.class);
bind(AccountByEmailCacheImpl.class);
bind(AccountByEmailCache.class).to(AccountByEmailCacheImpl.class);
}
};
}
private final SchemaFactory<ReviewDb> schema;
private final SelfPopulatingCache<String, Set<Account.Id>> self;
private final Cache<String, Set<Account.Id>> cache;
@Inject
AccountByEmailCacheImpl(final SchemaFactory<ReviewDb> schema,
@Named(CACHE_NAME) final Cache<String, Set<Account.Id>> rawCache) {
this.schema = schema;
this.self = new SelfPopulatingCache<String, Set<Account.Id>>(rawCache) {
@Override
protected Set<Account.Id> createEntry(final String key) throws Exception {
return lookup(key);
}
@Override
protected Set<Account.Id> missing(final String key) {
return Collections.emptySet();
}
};
}
private Set<Account.Id> lookup(final String email) throws OrmException {
final ReviewDb db = schema.open();
try {
final HashSet<Account.Id> r = new HashSet<Account.Id>();
for (Account a : db.accounts().byPreferredEmail(email)) {
r.add(a.getId());
}
for (AccountExternalId a : db.accountExternalIds().byEmailAddress(email)) {
r.add(a.getAccountId());
}
return pack(r);
} finally {
db.close();
}
AccountByEmailCacheImpl(
@Named(CACHE_NAME) final Cache<String, Set<Account.Id>> cache) {
this.cache = cache;
}
public Set<Account.Id> get(final String email) {
return self.get(email);
return cache.get(email);
}
public void evict(final String email) {
self.remove(email);
cache.remove(email);
}
private static Set<Account.Id> pack(final Set<Account.Id> c) {
switch (c.size()) {
case 0:
return Collections.emptySet();
case 1:
return one(c);
default:
return Collections.unmodifiableSet(new HashSet<Account.Id>(c));
static class Loader extends EntryCreator<String, Set<Account.Id>> {
private final SchemaFactory<ReviewDb> schema;
@Inject
Loader(final SchemaFactory<ReviewDb> schema) {
this.schema = schema;
}
@Override
public Set<Account.Id> createEntry(final String email) throws Exception {
final ReviewDb db = schema.open();
try {
final HashSet<Account.Id> r = new HashSet<Account.Id>();
for (Account a : db.accounts().byPreferredEmail(email)) {
r.add(a.getId());
}
for (AccountExternalId a : db.accountExternalIds()
.byEmailAddress(email)) {
r.add(a.getAccountId());
}
return pack(r);
} finally {
db.close();
}
}
@Override
public Set<Account.Id> missing(final String key) {
return Collections.emptySet();
}
private static Set<Account.Id> pack(final Set<Account.Id> c) {
switch (c.size()) {
case 0:
return Collections.emptySet();
case 1:
return one(c);
default:
return Collections.unmodifiableSet(new HashSet<Account.Id>(c));
}
}
private static <T> Set<T> one(final Set<T> c) {
return Collections.singleton(c.iterator().next());
}
}
private static <T> Set<T> one(final Set<T> c) {
return Collections.singleton(c.iterator().next());
}
}

View File

@ -21,7 +21,7 @@ import com.google.gerrit.reviewdb.AccountGroupMember;
import com.google.gerrit.reviewdb.ReviewDb;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.SelfPopulatingCache;
import com.google.gerrit.server.cache.EntryCreator;
import com.google.gerrit.server.config.AuthConfig;
import com.google.gwtorm.client.OrmException;
import com.google.gwtorm.client.SchemaFactory;
@ -39,119 +39,35 @@ import java.util.Set;
/** Caches important (but small) account state to avoid database hits. */
@Singleton
public class AccountCacheImpl implements AccountCache {
private static final String CACHE_NAME = "accounts";
private static final String BYID_NAME = "accounts";
private static final String BYUSER_NAME = "accounts_byname";
public static Module module() {
return new CacheModule() {
@Override
protected void configure() {
final TypeLiteral<Cache<Object, AccountState>> type =
new TypeLiteral<Cache<Object, AccountState>>() {};
core(type, CACHE_NAME);
final TypeLiteral<Cache<Account.Id, AccountState>> byIdType =
new TypeLiteral<Cache<Account.Id, AccountState>>() {};
core(byIdType, BYID_NAME).populateWith(ByIdLoader.class);
final TypeLiteral<Cache<String, Account.Id>> byUsernameType =
new TypeLiteral<Cache<String, Account.Id>>() {};
core(byUsernameType, BYUSER_NAME).populateWith(ByNameLoader.class);
bind(AccountCacheImpl.class);
bind(AccountCache.class).to(AccountCacheImpl.class);
}
};
}
private final SchemaFactory<ReviewDb> schema;
private final GroupCache groupCache;
private final SelfPopulatingCache<Account.Id, AccountState> byId;
private final SelfPopulatingCache<String, Account.Id> byName;
private final Set<AccountGroup.Id> registered;
private final Set<AccountGroup.Id> anonymous;
private final Cache<Account.Id, AccountState> byId;
private final Cache<String, Account.Id> byName;
@Inject
AccountCacheImpl(final SchemaFactory<ReviewDb> sf, final AuthConfig auth,
final GroupCache groupCache,
@Named(CACHE_NAME) final Cache<Object, AccountState> rawCache) {
schema = sf;
registered = auth.getRegisteredGroups();
anonymous = auth.getAnonymousGroups();
this.groupCache = groupCache;
byId = new SelfPopulatingCache<Account.Id, AccountState>((Cache) rawCache) {
@Override
protected AccountState createEntry(Account.Id key) throws Exception {
return lookup(key);
}
@Override
protected AccountState missing(final Account.Id key) {
return missingAccount(key);
}
};
byName = new SelfPopulatingCache<String, Account.Id>((Cache) rawCache) {
@Override
protected Account.Id createEntry(String username) throws Exception {
return lookup(username);
}
};
}
private AccountState lookup(final Account.Id who) throws OrmException {
final ReviewDb db = schema.open();
try {
final AccountState state = load(db, who);
if (state.getUserName() != null) {
byName.put(state.getUserName(), state.getAccount().getId());
}
return state;
} finally {
db.close();
}
}
private Account.Id lookup(final String username) throws OrmException {
final ReviewDb db = schema.open();
try {
final AccountExternalId.Key key =
new AccountExternalId.Key(AccountExternalId.SCHEME_USERNAME, username);
final AccountExternalId id = db.accountExternalIds().get(key);
return id != null ? id.getAccountId() : null;
} finally {
db.close();
}
}
private AccountState load(final ReviewDb db, final Account.Id who)
throws OrmException {
final Account account = db.accounts().get(who);
if (account == null) {
// Account no longer exists? They are anonymous.
//
return missingAccount(who);
}
final Collection<AccountExternalId> externalIds =
Collections.unmodifiableCollection(db.accountExternalIds().byAccount(
who).toList());
Set<AccountGroup.Id> internalGroups = new HashSet<AccountGroup.Id>();
for (AccountGroupMember g : db.accountGroupMembers().byAccount(who)) {
final AccountGroup.Id groupId = g.getAccountGroupId();
final AccountGroup group = groupCache.get(groupId);
if (group != null && group.getType() == AccountGroup.Type.INTERNAL) {
internalGroups.add(groupId);
}
}
if (internalGroups.isEmpty()) {
internalGroups = registered;
} else {
internalGroups.addAll(registered);
internalGroups = Collections.unmodifiableSet(internalGroups);
}
return new AccountState(account, internalGroups, externalIds);
}
private AccountState missingAccount(final Account.Id accountId) {
final Account account = new Account(accountId);
final Collection<AccountExternalId> ids = Collections.emptySet();
return new AccountState(account, anonymous, ids);
AccountCacheImpl(@Named(BYID_NAME) Cache<Account.Id, AccountState> byId,
@Named(BYUSER_NAME) Cache<String, Account.Id> byUsername) {
this.byId = byId;
this.byName = byUsername;
}
public AccountState get(final Account.Id accountId) {
@ -161,7 +77,7 @@ public class AccountCacheImpl implements AccountCache {
@Override
public AccountState getByUsername(String username) {
Account.Id id = byName.get(username);
return id != null ? get(id) : null;
return id != null ? byId.get(id) : null;
}
public void evict(final Account.Id accountId) {
@ -171,4 +87,99 @@ public class AccountCacheImpl implements AccountCache {
public void evictByUsername(String username) {
byName.remove(username);
}
static class ByIdLoader extends EntryCreator<Account.Id, AccountState> {
private final SchemaFactory<ReviewDb> schema;
private final Set<AccountGroup.Id> registered;
private final Set<AccountGroup.Id> anonymous;
private final GroupCache groupCache;
private final Cache<String, Account.Id> byName;
@Inject
ByIdLoader(SchemaFactory<ReviewDb> sf, AuthConfig auth,
GroupCache groupCache,
@Named(BYUSER_NAME) Cache<String, Account.Id> byUsername) {
this.schema = sf;
this.registered = auth.getRegisteredGroups();
this.anonymous = auth.getAnonymousGroups();
this.groupCache = groupCache;
this.byName = byUsername;
}
@Override
public AccountState createEntry(final Account.Id key) throws Exception {
final ReviewDb db = schema.open();
try {
final AccountState state = load(db, key);
if (state.getUserName() != null) {
byName.put(state.getUserName(), state.getAccount().getId());
}
return state;
} finally {
db.close();
}
}
private AccountState load(final ReviewDb db, final Account.Id who)
throws OrmException {
final Account account = db.accounts().get(who);
if (account == null) {
// Account no longer exists? They are anonymous.
//
return missing(who);
}
final Collection<AccountExternalId> externalIds =
Collections.unmodifiableCollection(db.accountExternalIds().byAccount(
who).toList());
Set<AccountGroup.Id> internalGroups = new HashSet<AccountGroup.Id>();
for (AccountGroupMember g : db.accountGroupMembers().byAccount(who)) {
final AccountGroup.Id groupId = g.getAccountGroupId();
final AccountGroup group = groupCache.get(groupId);
if (group != null && group.getType() == AccountGroup.Type.INTERNAL) {
internalGroups.add(groupId);
}
}
if (internalGroups.isEmpty()) {
internalGroups = registered;
} else {
internalGroups.addAll(registered);
internalGroups = Collections.unmodifiableSet(internalGroups);
}
return new AccountState(account, internalGroups, externalIds);
}
@Override
public AccountState missing(final Account.Id accountId) {
final Account account = new Account(accountId);
final Collection<AccountExternalId> ids = Collections.emptySet();
return new AccountState(account, anonymous, ids);
}
}
static class ByNameLoader extends EntryCreator<String, Account.Id> {
private final SchemaFactory<ReviewDb> schema;
@Inject
ByNameLoader(final SchemaFactory<ReviewDb> sf) {
this.schema = sf;
}
@Override
public Account.Id createEntry(final String username) throws Exception {
final ReviewDb db = schema.open();
try {
final AccountExternalId.Key key = new AccountExternalId.Key( //
AccountExternalId.SCHEME_USERNAME, //
username);
final AccountExternalId id = db.accountExternalIds().get(key);
return id != null ? id.getAccountId() : null;
} finally {
db.close();
}
}
}
}

View File

@ -19,9 +19,8 @@ import com.google.gerrit.reviewdb.AccountGroupName;
import com.google.gerrit.reviewdb.ReviewDb;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.SelfPopulatingCache;
import com.google.gerrit.server.cache.EntryCreator;
import com.google.gerrit.server.config.AuthConfig;
import com.google.gwtorm.client.OrmException;
import com.google.gwtorm.client.SchemaFactory;
import com.google.inject.Inject;
import com.google.inject.Module;
@ -34,114 +33,45 @@ import java.util.Collection;
/** Tracks group objects in memory for efficient access. */
@Singleton
public class GroupCacheImpl implements GroupCache {
private static final String CACHE_NAME = "groups";
private static final String BYID_NAME = "groups";
private static final String BYNAME_NAME = "groups_byname";
private static final String BYEXT_NAME = "groups_byext";
public static Module module() {
return new CacheModule() {
@Override
protected void configure() {
final TypeLiteral<Cache<com.google.gwtorm.client.Key<?>, AccountGroup>> byId =
new TypeLiteral<Cache<com.google.gwtorm.client.Key<?>, AccountGroup>>() {};
core(byId, CACHE_NAME);
final TypeLiteral<Cache<AccountGroup.Id, AccountGroup>> byId =
new TypeLiteral<Cache<AccountGroup.Id, AccountGroup>>() {};
core(byId, BYID_NAME).populateWith(ByIdLoader.class);
final TypeLiteral<Cache<AccountGroup.NameKey, AccountGroup>> byName =
new TypeLiteral<Cache<AccountGroup.NameKey, AccountGroup>>() {};
core(byName, BYNAME_NAME).populateWith(ByNameLoader.class);
final TypeLiteral<Cache<AccountGroup.ExternalNameKey, Collection<AccountGroup>>> byExternalName =
new TypeLiteral<Cache<AccountGroup.ExternalNameKey, Collection<AccountGroup>>>() {};
core(byExternalName, BYEXT_NAME) //
.populateWith(ByExternalNameLoader.class);
bind(GroupCacheImpl.class);
bind(GroupCache.class).to(GroupCacheImpl.class);
}
};
}
private final SchemaFactory<ReviewDb> schema;
private final AccountGroup.Id administrators;
private final SelfPopulatingCache<AccountGroup.Id, AccountGroup> byId;
private final SelfPopulatingCache<AccountGroup.NameKey, AccountGroup> byName;
private final SelfPopulatingCache<AccountGroup.ExternalNameKey, Collection<AccountGroup>> byExternalName;
private final Cache<AccountGroup.Id, AccountGroup> byId;
private final Cache<AccountGroup.NameKey, AccountGroup> byName;
private final Cache<AccountGroup.ExternalNameKey, Collection<AccountGroup>> byExternalName;
@Inject
GroupCacheImpl(
final SchemaFactory<ReviewDb> sf,
final AuthConfig authConfig,
@Named(CACHE_NAME) final Cache<com.google.gwtorm.client.Key<?>, AccountGroup> rawAny) {
schema = sf;
administrators = authConfig.getAdministratorsGroup();
byId =
new SelfPopulatingCache<AccountGroup.Id, AccountGroup>((Cache) rawAny) {
@Override
public AccountGroup createEntry(final AccountGroup.Id key)
throws Exception {
return lookup(key);
}
@Override
protected AccountGroup missing(final AccountGroup.Id key) {
return missingGroup(key);
}
};
byName =
new SelfPopulatingCache<AccountGroup.NameKey, AccountGroup>(
(Cache) rawAny) {
@Override
public AccountGroup createEntry(final AccountGroup.NameKey key)
throws Exception {
return lookup(key);
}
};
byExternalName =
new SelfPopulatingCache<AccountGroup.ExternalNameKey, Collection<AccountGroup>>(
(Cache) rawAny) {
@Override
public Collection<AccountGroup> createEntry(
final AccountGroup.ExternalNameKey key) throws Exception {
return lookup(key);
}
};
}
private AccountGroup lookup(final AccountGroup.Id groupId)
throws OrmException {
final ReviewDb db = schema.open();
try {
final AccountGroup group = db.accountGroups().get(groupId);
if (group != null) {
return group;
} else {
return missingGroup(groupId);
}
} finally {
db.close();
}
}
private AccountGroup missingGroup(final AccountGroup.Id groupId) {
final AccountGroup.NameKey name =
new AccountGroup.NameKey("Deleted Group" + groupId.toString());
final AccountGroup g = new AccountGroup(name, groupId);
g.setType(AccountGroup.Type.SYSTEM);
g.setOwnerGroupId(administrators);
return g;
}
private AccountGroup lookup(final AccountGroup.NameKey name)
throws OrmException {
final AccountGroupName r;
final ReviewDb db = schema.open();
try {
r = db.accountGroupNames().get(name);
} finally {
db.close();
}
return r != null ? get(r.getId()) : null;
}
private Collection<AccountGroup> lookup(
final AccountGroup.ExternalNameKey name) throws OrmException {
final ReviewDb db = schema.open();
try {
return db.accountGroups().byExternalName(name).toList();
} finally {
db.close();
}
@Named(BYID_NAME) Cache<AccountGroup.Id, AccountGroup> byId,
@Named(BYNAME_NAME) Cache<AccountGroup.NameKey, AccountGroup> byName,
@Named(BYEXT_NAME) Cache<AccountGroup.ExternalNameKey, Collection<AccountGroup>> byExternalName) {
this.byId = byId;
this.byName = byName;
this.byExternalName = byExternalName;
}
public AccountGroup get(final AccountGroup.Id groupId) {
@ -166,4 +96,88 @@ public class GroupCacheImpl implements GroupCache {
final AccountGroup.ExternalNameKey externalName) {
return byExternalName.get(externalName);
}
static class ByIdLoader extends EntryCreator<AccountGroup.Id, AccountGroup> {
private final SchemaFactory<ReviewDb> schema;
private final AccountGroup.Id administrators;
@Inject
ByIdLoader(final SchemaFactory<ReviewDb> sf, final AuthConfig authConfig) {
schema = sf;
administrators = authConfig.getAdministratorsGroup();
}
@Override
public AccountGroup createEntry(final AccountGroup.Id key) throws Exception {
final ReviewDb db = schema.open();
try {
final AccountGroup group = db.accountGroups().get(key);
if (group != null) {
return group;
} else {
return missing(key);
}
} finally {
db.close();
}
}
@Override
public AccountGroup missing(final AccountGroup.Id key) {
final AccountGroup.NameKey name =
new AccountGroup.NameKey("Deleted Group" + key.toString());
final AccountGroup g = new AccountGroup(name, key);
g.setType(AccountGroup.Type.SYSTEM);
g.setOwnerGroupId(administrators);
return g;
}
}
static class ByNameLoader extends
EntryCreator<AccountGroup.NameKey, AccountGroup> {
private final SchemaFactory<ReviewDb> schema;
@Inject
ByNameLoader(final SchemaFactory<ReviewDb> sf) {
schema = sf;
}
@Override
public AccountGroup createEntry(final AccountGroup.NameKey key)
throws Exception {
final AccountGroupName r;
final ReviewDb db = schema.open();
try {
r = db.accountGroupNames().get(key);
if (r != null) {
return db.accountGroups().get(r.getId());
} else {
return null;
}
} finally {
db.close();
}
}
}
static class ByExternalNameLoader extends
EntryCreator<AccountGroup.ExternalNameKey, Collection<AccountGroup>> {
private final SchemaFactory<ReviewDb> schema;
@Inject
ByExternalNameLoader(final SchemaFactory<ReviewDb> sf) {
schema = sf;
}
@Override
public Collection<AccountGroup> createEntry(
final AccountGroup.ExternalNameKey key) throws Exception {
final ReviewDb db = schema.open();
try {
return db.accountGroups().byExternalName(key).toList();
} finally {
db.close();
}
}
}
}

View File

@ -0,0 +1,316 @@
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.auth.ldap;
import com.google.gerrit.common.data.ParamertizedString;
import com.google.gerrit.reviewdb.AccountGroup;
import com.google.gerrit.server.account.AccountException;
import com.google.gerrit.server.account.GroupCache;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.util.ssl.BlindSSLSocketFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.jgit.lib.Config;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.net.ssl.SSLSocketFactory;
@Singleton class Helper {
private final GroupCache groupCache;
private final Config config;
private final String server;
private final String username;
private final String password;
private final String referral;
private final boolean sslVerify;
private volatile LdapSchema ldapSchema;
@Inject
Helper(@GerritServerConfig final Config config, final GroupCache groupCache) {
this.groupCache = groupCache;
this.config = config;
this.server = LdapRealm.required(config, "server");
this.username = LdapRealm.optional(config, "username");
this.password = LdapRealm.optional(config, "password");
this.referral = LdapRealm.optional(config, "referral");
this.sslVerify = config.getBoolean("ldap", "sslverify", true);
}
private Properties createContextProperties() {
final Properties env = new Properties();
env.put(Context.INITIAL_CONTEXT_FACTORY, LdapRealm.LDAP);
env.put(Context.PROVIDER_URL, server);
if (server.startsWith("ldaps:") && !sslVerify) {
Class<? extends SSLSocketFactory> factory = BlindSSLSocketFactory.class;
env.put("java.naming.ldap.factory.socket", factory.getName());
}
return env;
}
DirContext open() throws NamingException {
final Properties env = createContextProperties();
if (username != null) {
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, username);
env.put(Context.SECURITY_CREDENTIALS, password != null ? password : "");
env.put(Context.REFERRAL, referral != null ? referral : "ignore");
}
return new InitialDirContext(env);
}
DirContext authenticate(String dn, String password) throws AccountException {
final Properties env = createContextProperties();
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, dn);
env.put(Context.SECURITY_CREDENTIALS, password != null ? password : "");
env.put(Context.REFERRAL, referral != null ? referral : "ignore");
try {
return new InitialDirContext(env);
} catch (NamingException e) {
throw new AccountException("Incorrect username or password", e);
}
}
LdapSchema getSchema(DirContext ctx) {
if (ldapSchema == null) {
synchronized (this) {
if (ldapSchema == null) {
ldapSchema = new LdapSchema(ctx);
}
}
}
return ldapSchema;
}
LdapQuery.Result findAccount(final Helper.LdapSchema schema,
final DirContext ctx, final String username) throws NamingException,
AccountException {
final HashMap<String, String> params = new HashMap<String, String>();
params.put(LdapRealm.USERNAME, username);
final List<LdapQuery.Result> res = new ArrayList<LdapQuery.Result>();
for (LdapQuery accountQuery : schema.accountQueryList) {
res.addAll(accountQuery.query(ctx, params));
}
switch (res.size()) {
case 0:
throw new AccountException("No such user:" + username);
case 1:
return res.get(0);
default:
throw new AccountException("Duplicate users: " + username);
}
}
Set<AccountGroup.Id> queryForGroups(final DirContext ctx,
final String username, LdapQuery.Result account)
throws NamingException, AccountException {
final LdapSchema schema = getSchema(ctx);
final Set<String> groupDNs = new HashSet<String>();
if (!schema.groupMemberQueryList.isEmpty()) {
final HashMap<String, String> params = new HashMap<String, String>();
if (schema.groupNeedsAccount) {
if (account == null) {
account = findAccount(schema, ctx, username);
}
for (String name : schema.groupMemberQueryList.get(0).getParameters()) {
params.put(name, account.get(name));
}
}
params.put(LdapRealm.USERNAME, username);
for (LdapQuery groupMemberQuery : schema.groupMemberQueryList) {
for (LdapQuery.Result r : groupMemberQuery.query(ctx, params)) {
recursivelyExpandGroups(groupDNs, schema, ctx, r.getDN());
}
}
}
if (schema.accountMemberField != null) {
if (account == null) {
account = findAccount(schema, ctx, username);
}
final Attribute groupAtt = account.getAll(schema.accountMemberField);
if (groupAtt != null) {
final NamingEnumeration<?> groups = groupAtt.getAll();
while (groups.hasMore()) {
final String nextDN = (String) groups.next();
recursivelyExpandGroups(groupDNs, schema, ctx, nextDN);
}
}
}
final Set<AccountGroup.Id> actual = new HashSet<AccountGroup.Id>();
for (String dn : groupDNs) {
for (AccountGroup group : groupCache
.get(new AccountGroup.ExternalNameKey(dn))) {
if (group.getType() == AccountGroup.Type.LDAP) {
actual.add(group.getId());
}
}
}
if (actual.isEmpty()) {
return Collections.emptySet();
} else {
return Collections.unmodifiableSet(actual);
}
}
private void recursivelyExpandGroups(final Set<String> groupDNs,
final LdapSchema schema, final DirContext ctx, final String groupDN) {
if (groupDNs.add(groupDN) && schema.accountMemberField != null) {
// Recursively identify the groups it is a member of.
//
try {
final Attribute in =
ctx.getAttributes(groupDN).get(schema.accountMemberField);
if (in != null) {
final NamingEnumeration<?> groups = in.getAll();
while (groups.hasMore()) {
final String nextDN = (String) groups.next();
recursivelyExpandGroups(groupDNs, schema, ctx, nextDN);
}
}
} catch (NamingException e) {
LdapRealm.log.warn("Could not find group " + groupDN, e);
}
}
}
class LdapSchema {
final LdapType type;
final ParamertizedString accountFullName;
final ParamertizedString accountEmailAddress;
final ParamertizedString accountSshUserName;
final String accountMemberField;
final List<LdapQuery> accountQueryList;
boolean groupNeedsAccount;
final List<String> groupBases;
final SearchScope groupScope;
final ParamertizedString groupPattern;
final List<LdapQuery> groupMemberQueryList;
LdapSchema(final DirContext ctx) {
type = discoverLdapType(ctx);
groupMemberQueryList = new ArrayList<LdapQuery>();
accountQueryList = new ArrayList<LdapQuery>();
final Set<String> accountAtts = new HashSet<String>();
// Group query
//
groupBases = LdapRealm.optionalList(config, "groupBase");
groupScope = LdapRealm.scope(config, "groupScope");
groupPattern = LdapRealm.paramString(config, "groupPattern", type.groupPattern());
final String groupMemberPattern =
LdapRealm.optdef(config, "groupMemberPattern", type.groupMemberPattern());
for (String groupBase : groupBases) {
if (groupMemberPattern != null) {
final LdapQuery groupMemberQuery =
new LdapQuery(groupBase, groupScope, new ParamertizedString(
groupMemberPattern), Collections.<String> emptySet());
if (groupMemberQuery.getParameters().isEmpty()) {
throw new IllegalArgumentException(
"No variables in ldap.groupMemberPattern");
}
for (final String name : groupMemberQuery.getParameters()) {
if (!LdapRealm.USERNAME.equals(name)) {
groupNeedsAccount = true;
accountAtts.add(name);
}
}
groupMemberQueryList.add(groupMemberQuery);
}
}
// Account query
//
accountFullName =
LdapRealm.paramString(config, "accountFullName", type.accountFullName());
if (accountFullName != null) {
accountAtts.addAll(accountFullName.getParameterNames());
}
accountEmailAddress =
LdapRealm.paramString(config, "accountEmailAddress", type
.accountEmailAddress());
if (accountEmailAddress != null) {
accountAtts.addAll(accountEmailAddress.getParameterNames());
}
accountSshUserName =
LdapRealm.paramString(config, "accountSshUserName", type.accountSshUserName());
if (accountSshUserName != null) {
accountAtts.addAll(accountSshUserName.getParameterNames());
}
accountMemberField =
LdapRealm.optdef(config, "accountMemberField", type.accountMemberField());
if (accountMemberField != null) {
accountAtts.add(accountMemberField);
}
final SearchScope accountScope = LdapRealm.scope(config, "accountScope");
final String accountPattern =
LdapRealm.reqdef(config, "accountPattern", type.accountPattern());
for (String accountBase : LdapRealm.requiredList(config, "accountBase")) {
final LdapQuery accountQuery =
new LdapQuery(accountBase, accountScope, new ParamertizedString(
accountPattern), accountAtts);
if (accountQuery.getParameters().isEmpty()) {
throw new IllegalArgumentException(
"No variables in ldap.accountPattern");
}
accountQueryList.add(accountQuery);
}
}
LdapType discoverLdapType(DirContext ctx) {
try {
return LdapType.guessType(ctx);
} catch (NamingException e) {
LdapRealm.log.warn("Cannot discover type of LDAP server at " + server
+ ", assuming the server is RFC 2307 compliant.", e);
return LdapType.RFC_2307;
}
}
}
}

View File

@ -34,11 +34,15 @@ public class LdapModule extends CacheModule {
protected void configure() {
final TypeLiteral<Cache<String, Set<AccountGroup.Id>>> groups =
new TypeLiteral<Cache<String, Set<AccountGroup.Id>>>() {};
core(groups, GROUP_CACHE).maxAge(1, HOURS) //
.populateWith(LdapRealm.MemberLoader.class);
final TypeLiteral<Cache<String, Account.Id>> usernames =
new TypeLiteral<Cache<String, Account.Id>>() {};
core(usernames, USERNAME_CACHE) //
.populateWith(LdapRealm.UserLoader.class);
core(groups, GROUP_CACHE).maxAge(1, HOURS);
core(usernames, USERNAME_CACHE);
bind(Realm.class).to(LdapRealm.class).in(Scopes.SINGLETON);
bind(Helper.class);
}
}

View File

@ -26,14 +26,13 @@ import com.google.gerrit.server.account.AccountException;
import com.google.gerrit.server.account.AccountState;
import com.google.gerrit.server.account.AuthRequest;
import com.google.gerrit.server.account.EmailExpander;
import com.google.gerrit.server.account.GroupCache;
import com.google.gerrit.server.account.Realm;
import com.google.gerrit.server.auth.ldap.Helper.LdapSchema;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.SelfPopulatingCache;
import com.google.gerrit.server.cache.EntryCreator;
import com.google.gerrit.server.config.AuthConfig;
import com.google.gerrit.server.config.ConfigUtil;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.util.ssl.BlindSSLSocketFactory;
import com.google.gwtorm.client.OrmException;
import com.google.gwtorm.client.SchemaFactory;
import com.google.inject.Inject;
@ -44,7 +43,6 @@ import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@ -52,62 +50,40 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.net.ssl.SSLSocketFactory;
@Singleton
class LdapRealm implements Realm {
private static final Logger log = LoggerFactory.getLogger(LdapRealm.class);
private static final String LDAP = "com.sun.jndi.ldap.LdapCtxFactory";
private static final String USERNAME = "username";
static final Logger log = LoggerFactory.getLogger(LdapRealm.class);
static final String LDAP = "com.sun.jndi.ldap.LdapCtxFactory";
static final String USERNAME = "username";
private static final String GROUPNAME = "groupname";
private final Config config;
private final String server;
private final String username;
private final String password;
private final String referral;
private final boolean sslVerify;
private final Helper helper;
private final AuthConfig authConfig;
private final SchemaFactory<ReviewDb> schema;
private final EmailExpander emailExpander;
private final SelfPopulatingCache<String, Account.Id> usernameCache;
private final Cache<String, Account.Id> usernameCache;
private final Set<Account.FieldName> readOnlyAccountFields;
private final GroupCache groupCache;
private final SelfPopulatingCache<String, Set<AccountGroup.Id>> membershipCache;
private volatile LdapSchema ldapSchema;
private final Cache<String, Set<AccountGroup.Id>> membershipCache;
@Inject
LdapRealm(
final Helper helper,
final AuthConfig authConfig,
final GroupCache groupCache,
final EmailExpander emailExpander,
final SchemaFactory<ReviewDb> schema,
@Named(LdapModule.GROUP_CACHE) final Cache<String, Set<AccountGroup.Id>> rawGroup,
@Named(LdapModule.USERNAME_CACHE) final Cache<String, Account.Id> rawUsername,
@Named(LdapModule.GROUP_CACHE) final Cache<String, Set<AccountGroup.Id>> membershipCache,
@Named(LdapModule.USERNAME_CACHE) final Cache<String, Account.Id> usernameCache,
@GerritServerConfig final Config config) {
this.config = config;
this.helper = helper;
this.authConfig = authConfig;
this.groupCache = groupCache;
this.emailExpander = emailExpander;
this.schema = schema;
this.usernameCache = usernameCache;
this.membershipCache = membershipCache;
this.server = required(config, "server");
this.username = optional(config, "username");
this.password = optional(config, "password");
this.referral = optional(config, "referral");
this.sslVerify = config.getBoolean("ldap", "sslverify", true);
this.readOnlyAccountFields = new HashSet<Account.FieldName>();
if (optdef(config, "accountFullName", "DEFAULT") != null) {
@ -116,38 +92,17 @@ class LdapRealm implements Realm {
if (optdef(config, "accountSshUserName", "DEFAULT") != null) {
readOnlyAccountFields.add(Account.FieldName.USER_NAME);
}
membershipCache =
new SelfPopulatingCache<String, Set<AccountGroup.Id>>(rawGroup) {
@Override
public Set<AccountGroup.Id> createEntry(final String username)
throws Exception {
return queryForGroups(username);
}
@Override
protected Set<AccountGroup.Id> missing(final String key) {
return Collections.emptySet();
}
};
usernameCache = new SelfPopulatingCache<String, Account.Id>(rawUsername) {
@Override
public Account.Id createEntry(final String username) throws Exception {
return queryForUsername(username);
}
};
}
private static SearchScope scope(final Config c, final String setting) {
static SearchScope scope(final Config c, final String setting) {
return ConfigUtil.getEnum(c, "ldap", null, setting, SearchScope.SUBTREE);
}
private static String optional(final Config config, final String name) {
static String optional(final Config config, final String name) {
return config.getString("ldap", null, name);
}
private static String required(final Config config, final String name) {
static String required(final Config config, final String name) {
final String v = optional(config, name);
if (v == null || "".equals(v)) {
throw new IllegalArgumentException("No ldap." + name + " configured");
@ -155,13 +110,13 @@ class LdapRealm implements Realm {
return v;
}
private static List<String> optionalList(final Config config,
static List<String> optionalList(final Config config,
final String name) {
String s[] = config.getStringList("ldap", null, name);
return Arrays.asList(s);
}
private static List<String> requiredList(final Config config,
static List<String> requiredList(final Config config,
final String name) {
List<String> vlist = optionalList(config, name);
@ -172,7 +127,7 @@ class LdapRealm implements Realm {
return vlist;
}
private static String optdef(final Config c, final String n, final String d) {
static String optdef(final Config c, final String n, final String d) {
final String[] v = c.getStringList("ldap", null, n);
if (v == null || v.length == 0) {
return d;
@ -185,7 +140,7 @@ class LdapRealm implements Realm {
}
}
private static String reqdef(final Config c, final String n, final String d) {
static String reqdef(final Config c, final String n, final String d) {
final String v = optdef(c, n, d);
if (v == null) {
throw new IllegalArgumentException("No ldap." + n + " configured");
@ -193,7 +148,7 @@ class LdapRealm implements Realm {
return v;
}
private static ParamertizedString paramString(Config c, String n, String d) {
static ParamertizedString paramString(Config c, String n, String d) {
String expression = optdef(c, n, d);
if (expression == null) {
return null;
@ -230,19 +185,19 @@ class LdapRealm implements Realm {
try {
final DirContext ctx;
if (authConfig.getAuthType() == AuthType.LDAP_BIND) {
ctx = authenticate(username, who.getPassword());
ctx = helper.authenticate(username, who.getPassword());
} else {
ctx = open();
ctx = helper.open();
}
try {
final LdapSchema schema = getSchema(ctx);
final LdapQuery.Result m = findAccount(schema, ctx, username);
final Helper.LdapSchema schema = helper.getSchema(ctx);
final LdapQuery.Result m = helper.findAccount(schema, ctx, username);
if (authConfig.getAuthType() == AuthType.LDAP) {
// We found the user account, but we need to verify
// the password matches it before we can continue.
//
authenticate(m.getDN(), who.getPassword());
helper.authenticate(m.getDN(), who.getPassword());
}
who.setDisplayName(apply(schema.accountFullName, m));
@ -265,7 +220,7 @@ class LdapRealm implements Realm {
// in the middle of authenticating the user, its likely we will
// need to know what access rights they have soon.
//
membershipCache.put(username, queryForGroups(ctx, username, m));
membershipCache.put(username, helper.queryForGroups(ctx, username, m));
return who;
} finally {
try {
@ -293,99 +248,6 @@ class LdapRealm implements Realm {
return r;
}
private Set<AccountGroup.Id> queryForGroups(final String username)
throws NamingException, AccountException {
final DirContext ctx = open();
try {
return queryForGroups(ctx, username, null);
} finally {
try {
ctx.close();
} catch (NamingException e) {
log.warn("Cannot close LDAP query handle", e);
}
}
}
private Set<AccountGroup.Id> queryForGroups(final DirContext ctx,
final String username, LdapQuery.Result account) throws NamingException,
AccountException {
final LdapSchema schema = getSchema(ctx);
final Set<String> groupDNs = new HashSet<String>();
if (!schema.groupMemberQueryList.isEmpty()) {
final HashMap<String, String> params = new HashMap<String, String>();
if (schema.groupNeedsAccount) {
if (account == null) {
account = findAccount(schema, ctx, username);
}
for (String name : schema.groupMemberQueryList.get(0).getParameters()) {
params.put(name, account.get(name));
}
}
params.put(USERNAME, username);
for (LdapQuery groupMemberQuery : schema.groupMemberQueryList) {
for (LdapQuery.Result r : groupMemberQuery.query(ctx, params)) {
recursivelyExpandGroups(groupDNs, schema, ctx, r.getDN());
}
}
}
if (schema.accountMemberField != null) {
if (account == null) {
account = findAccount(schema, ctx, username);
}
final Attribute groupAtt = account.getAll(schema.accountMemberField);
if (groupAtt != null) {
final NamingEnumeration<?> groups = groupAtt.getAll();
while (groups.hasMore()) {
final String nextDN = (String) groups.next();
recursivelyExpandGroups(groupDNs, schema, ctx, nextDN);
}
}
}
final Set<AccountGroup.Id> actual = new HashSet<AccountGroup.Id>();
for (String dn : groupDNs) {
for (AccountGroup group : groupCache
.get(new AccountGroup.ExternalNameKey(dn))) {
if (group.getType() == AccountGroup.Type.LDAP) {
actual.add(group.getId());
}
}
}
if (actual.isEmpty()) {
return Collections.emptySet();
} else {
return Collections.unmodifiableSet(actual);
}
}
private void recursivelyExpandGroups(final Set<String> groupDNs,
final LdapSchema schema, final DirContext ctx, final String groupDN) {
if (groupDNs.add(groupDN) && schema.accountMemberField != null) {
// Recursively identify the groups it is a member of.
//
try {
final Attribute in =
ctx.getAttributes(groupDN).get(schema.accountMemberField);
if (in != null) {
final NamingEnumeration<?> groups = in.getAll();
while (groups.hasMore()) {
final String nextDN = (String) groups.next();
recursivelyExpandGroups(groupDNs, schema, ctx, nextDN);
}
}
} catch (NamingException e) {
log.warn("Could not find group " + groupDN, e);
}
}
}
private static String findId(final Collection<AccountExternalId> ids) {
for (final AccountExternalId i : ids) {
@ -408,9 +270,9 @@ class LdapRealm implements Realm {
out = new HashSet<AccountGroup.ExternalNameKey>();
try {
final DirContext ctx = open();
final DirContext ctx = helper.open();
try {
final LdapSchema schema = getSchema(ctx);
final LdapSchema schema = helper.getSchema(ctx);
final ParamertizedString filter =
ParamertizedString.asis(schema.groupPattern
.replace(GROUPNAME, name).toString());
@ -435,192 +297,59 @@ class LdapRealm implements Realm {
return out;
}
private Account.Id queryForUsername(final String username) {
try {
final ReviewDb db = schema.open();
static class UserLoader extends EntryCreator<String, Account.Id> {
private final SchemaFactory<ReviewDb> schema;
@Inject
UserLoader(SchemaFactory<ReviewDb> schema) {
this.schema = schema;
}
@Override
public Account.Id createEntry(final String username) throws Exception {
try {
final AccountExternalId extId =
db.accountExternalIds().get(
new AccountExternalId.Key(SCHEME_GERRIT, username));
return extId != null ? extId.getAccountId() : null;
final ReviewDb db = schema.open();
try {
final AccountExternalId extId =
db.accountExternalIds().get(
new AccountExternalId.Key(SCHEME_GERRIT, username));
return extId != null ? extId.getAccountId() : null;
} finally {
db.close();
}
} catch (OrmException e) {
log.warn("Cannot query for username in database", e);
return null;
}
}
}
static class MemberLoader extends EntryCreator<String, Set<AccountGroup.Id>> {
private final Helper helper;
@Inject
MemberLoader(final Helper helper) {
this.helper = helper;
}
@Override
public Set<AccountGroup.Id> createEntry(final String username)
throws Exception {
final DirContext ctx = helper.open();
try {
return helper.queryForGroups(ctx, username, null);
} finally {
db.close();
}
} catch (OrmException e) {
log.warn("Cannot query for username in database", e);
return null;
}
}
private Properties createContextProperties() {
final Properties env = new Properties();
env.put(Context.INITIAL_CONTEXT_FACTORY, LDAP);
env.put(Context.PROVIDER_URL, server);
if (server.startsWith("ldaps:") && !sslVerify) {
Class<? extends SSLSocketFactory> factory = BlindSSLSocketFactory.class;
env.put("java.naming.ldap.factory.socket", factory.getName());
}
return env;
}
private DirContext open() throws NamingException {
final Properties env = createContextProperties();
if (username != null) {
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, username);
env.put(Context.SECURITY_CREDENTIALS, password != null ? password : "");
env.put(Context.REFERRAL, referral != null ? referral : "ignore");
}
return new InitialDirContext(env);
}
private DirContext authenticate(String dn, String password)
throws AccountException {
final Properties env = createContextProperties();
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, dn);
env.put(Context.SECURITY_CREDENTIALS, password != null ? password : "");
env.put(Context.REFERRAL, referral != null ? referral : "ignore");
try {
return new InitialDirContext(env);
} catch (NamingException e) {
throw new AccountException("Incorrect username or password", e);
}
}
private LdapQuery.Result findAccount(final LdapSchema schema,
final DirContext ctx, final String username) throws NamingException,
AccountException {
final HashMap<String, String> params = new HashMap<String, String>();
params.put(USERNAME, username);
final List<LdapQuery.Result> res = new ArrayList<LdapQuery.Result>();
for (LdapQuery accountQuery : schema.accountQueryList) {
res.addAll(accountQuery.query(ctx, params));
}
switch (res.size()) {
case 0:
throw new AccountException("No such user:" + username);
case 1:
return res.get(0);
default:
throw new AccountException("Duplicate users: " + username);
}
}
private LdapSchema getSchema(DirContext ctx) {
if (ldapSchema == null) {
synchronized (this) {
if (ldapSchema == null) {
ldapSchema = new LdapSchema(ctx);
try {
ctx.close();
} catch (NamingException e) {
log.warn("Cannot close LDAP query handle", e);
}
}
}
return ldapSchema;
}
private class LdapSchema {
final LdapType type;
final ParamertizedString accountFullName;
final ParamertizedString accountEmailAddress;
final ParamertizedString accountSshUserName;
final String accountMemberField;
final List<LdapQuery> accountQueryList;
boolean groupNeedsAccount;
final List<String> groupBases;
final SearchScope groupScope;
final ParamertizedString groupPattern;
final List<LdapQuery> groupMemberQueryList;
LdapSchema(final DirContext ctx) {
type = discoverLdapType(ctx);
groupMemberQueryList = new ArrayList<LdapQuery>();
accountQueryList = new ArrayList<LdapQuery>();
final Set<String> accountAtts = new HashSet<String>();
// Group query
//
groupBases = optionalList(config, "groupBase");
groupScope = scope(config, "groupScope");
groupPattern = paramString(config, "groupPattern", type.groupPattern());
final String groupMemberPattern =
optdef(config, "groupMemberPattern", type.groupMemberPattern());
for (String groupBase : groupBases) {
if (groupMemberPattern != null) {
final LdapQuery groupMemberQuery =
new LdapQuery(groupBase, groupScope, new ParamertizedString(
groupMemberPattern), Collections.<String> emptySet());
if (groupMemberQuery.getParameters().isEmpty()) {
throw new IllegalArgumentException(
"No variables in ldap.groupMemberPattern");
}
for (final String name : groupMemberQuery.getParameters()) {
if (!USERNAME.equals(name)) {
groupNeedsAccount = true;
accountAtts.add(name);
}
}
groupMemberQueryList.add(groupMemberQuery);
}
}
// Account query
//
accountFullName =
paramString(config, "accountFullName", type.accountFullName());
if (accountFullName != null) {
accountAtts.addAll(accountFullName.getParameterNames());
}
accountEmailAddress =
paramString(config, "accountEmailAddress", type.accountEmailAddress());
if (accountEmailAddress != null) {
accountAtts.addAll(accountEmailAddress.getParameterNames());
}
accountSshUserName =
paramString(config, "accountSshUserName", type.accountSshUserName());
if (accountSshUserName != null) {
accountAtts.addAll(accountSshUserName.getParameterNames());
}
accountMemberField =
optdef(config, "accountMemberField", type.accountMemberField());
if (accountMemberField != null) {
accountAtts.add(accountMemberField);
}
final SearchScope accountScope = scope(config, "accountScope");
final String accountPattern =
reqdef(config, "accountPattern", type.accountPattern());
for (String accountBase : requiredList(config, "accountBase")) {
final LdapQuery accountQuery =
new LdapQuery(accountBase, accountScope, new ParamertizedString(
accountPattern), accountAtts);
if (accountQuery.getParameters().isEmpty()) {
throw new IllegalArgumentException(
"No variables in ldap.accountPattern");
}
accountQueryList.add(accountQuery);
}
}
LdapType discoverLdapType(DirContext ctx) {
try {
return LdapType.guessType(ctx);
} catch (NamingException e) {
log.warn("Cannot discover type of LDAP server at " + server
+ ", assuming the server is RFC 2307 compliant.", e);
return LdapType.RFC_2307;
}
@Override
public Set<AccountGroup.Id> missing(final String key) {
return Collections.emptySet();
}
}
}

View File

@ -33,6 +33,9 @@ public interface Cache<K, V> {
/** Remove any existing value from the cache, no-op if not present. */
public void remove(K key);
/** Remove all cached items. */
public void removeAll();
/**
* Get the time an element will survive in the cache.
*

View File

@ -16,7 +16,10 @@ package com.google.gerrit.server.cache;
import com.google.inject.AbstractModule;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scopes;
import com.google.inject.TypeLiteral;
import com.google.inject.internal.UniqueAnnotations;
import com.google.inject.name.Names;
import java.io.Serializable;
@ -35,7 +38,8 @@ public abstract class CacheModule extends AbstractModule {
* @return binding to describe the cache. Caller must set at least the name on
* the returned binding.
*/
protected <K, V> UnnamedCacheBinding core(final TypeLiteral<Cache<K, V>> type) {
protected <K, V> UnnamedCacheBinding<K, V> core(
final TypeLiteral<Cache<K, V>> type) {
return core(Key.get(type));
}
@ -49,15 +53,15 @@ public abstract class CacheModule extends AbstractModule {
* and with {@code @Named} annotations.
* @return binding to describe the cache.
*/
protected <K, V> NamedCacheBinding core(final TypeLiteral<Cache<K, V>> type,
final String name) {
protected <K, V> NamedCacheBinding<K, V> core(
final TypeLiteral<Cache<K, V>> type, final String name) {
return core(Key.get(type, Names.named(name))).name(name);
}
private <K, V> UnnamedCacheBinding core(final Key<Cache<K, V>> key) {
private <K, V> UnnamedCacheBinding<K, V> core(final Key<Cache<K, V>> key) {
final boolean disk = false;
final CacheProvider<K, V> b = new CacheProvider<K, V>(disk);
bind(key).toProvider(b);
final CacheProvider<K, V> b = new CacheProvider<K, V>(disk, this);
bind(key).toProvider(b).in(Scopes.SINGLETON);
return b;
}
@ -72,7 +76,7 @@ public abstract class CacheModule extends AbstractModule {
* @return binding to describe the cache. Caller must set at least the name on
* the returned binding.
*/
protected <K extends Serializable, V extends Serializable> UnnamedCacheBinding disk(
protected <K extends Serializable, V extends Serializable> UnnamedCacheBinding<K, V> disk(
final TypeLiteral<Cache<K, V>> type) {
return disk(Key.get(type));
}
@ -87,15 +91,31 @@ public abstract class CacheModule extends AbstractModule {
* and with {@code @Named} annotations.
* @return binding to describe the cache.
*/
protected <K extends Serializable, V extends Serializable> NamedCacheBinding disk(
protected <K extends Serializable, V extends Serializable> NamedCacheBinding<K, V> disk(
final TypeLiteral<Cache<K, V>> type, final String name) {
return disk(Key.get(type, Names.named(name))).name(name);
}
private <K, V> UnnamedCacheBinding disk(final Key<Cache<K, V>> key) {
private <K, V> UnnamedCacheBinding<K, V> disk(final Key<Cache<K, V>> key) {
final boolean disk = true;
final CacheProvider<K, V> b = new CacheProvider<K, V>(disk);
bind(key).toProvider(b);
final CacheProvider<K, V> b = new CacheProvider<K, V>(disk, this);
bind(key).toProvider(b).in(Scopes.SINGLETON);
return b;
}
<K, V> Provider<EntryCreator<K, V>> getEntryCreator(CacheProvider<K, V> cp,
Class<? extends EntryCreator<K, V>> type) {
Key<EntryCreator<K, V>> key = newKey();
bind(key).to(type).in(Scopes.SINGLETON);
return getProvider(key);
}
@SuppressWarnings("unchecked")
private static <K, V> Key<EntryCreator<K, V>> newKey() {
return (Key<EntryCreator<K, V>>) newKeyImpl();
}
private static Key<?> newKeyImpl() {
return Key.get(EntryCreator.class, UniqueAnnotations.create());
}
}

View File

@ -27,7 +27,8 @@ import net.sf.ehcache.Ehcache;
import java.util.concurrent.TimeUnit;
final class CacheProvider<K, V> implements Provider<Cache<K, V>>,
NamedCacheBinding, UnnamedCacheBinding {
NamedCacheBinding<K, V>, UnnamedCacheBinding<K, V> {
private final CacheModule module;
private final boolean disk;
private int memoryLimit;
private int diskLimit;
@ -35,9 +36,11 @@ final class CacheProvider<K, V> implements Provider<Cache<K, V>>,
private EvictionPolicy evictionPolicy;
private String cacheName;
private ProxyEhcache cache;
private Provider<EntryCreator<K, V>> entryCreator;
CacheProvider(final boolean disk) {
CacheProvider(final boolean disk, CacheModule module) {
this.disk = disk;
this.module = module;
memoryLimit(1024);
maxAge(90, DAYS);
@ -84,7 +87,7 @@ final class CacheProvider<K, V> implements Provider<Cache<K, V>>,
return evictionPolicy;
}
public NamedCacheBinding name(final String name) {
public NamedCacheBinding<K, V> name(final String name) {
if (cacheName != null) {
throw new IllegalStateException("Cache name already set");
}
@ -92,12 +95,12 @@ final class CacheProvider<K, V> implements Provider<Cache<K, V>>,
return this;
}
public NamedCacheBinding memoryLimit(final int objects) {
public NamedCacheBinding<K, V> memoryLimit(final int objects) {
memoryLimit = objects;
return this;
}
public NamedCacheBinding diskLimit(final int objects) {
public NamedCacheBinding<K, V> diskLimit(final int objects) {
if (!disk) {
// TODO This should really be a compile time type error, but I'm
// too lazy to create the mess of permutations required to setup
@ -109,21 +112,30 @@ final class CacheProvider<K, V> implements Provider<Cache<K, V>>,
return this;
}
public NamedCacheBinding maxAge(final long duration, final TimeUnit unit) {
public NamedCacheBinding<K, V> maxAge(final long duration, final TimeUnit unit) {
maxAge = SECONDS.convert(duration, unit);
return this;
}
@Override
public NamedCacheBinding evictionPolicy(final EvictionPolicy policy) {
public NamedCacheBinding<K, V> evictionPolicy(final EvictionPolicy policy) {
evictionPolicy = policy;
return this;
}
public NamedCacheBinding<K, V> populateWith(
Class<? extends EntryCreator<K, V>> creator) {
entryCreator = module.getEntryCreator(this, creator);
return this;
}
public Cache<K, V> get() {
if (cache == null) {
throw new ProvisionException("Cache \"" + cacheName + "\" not available");
}
if (entryCreator != null) {
return new PopulatingCache<K, V>(cache, entryCreator.get());
}
return new SimpleCache<K, V>(cache);
}
}

View File

@ -0,0 +1,40 @@
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.cache;
/**
* Creates a cache entry on demand when its not found.
*
* @param <K> type of the cache's key.
* @param <V> type of the cache's value element.
*/
public abstract class EntryCreator<K, V> {
/**
* Invoked on a cache miss, to compute the cache entry.
*
* @param key entry whose content needs to be obtained.
* @return new cache content. The caller will automatically put this object
* into the cache.
* @throws Exception the cache content cannot be computed. No entry will be
* stored in the cache, and {@link #missing(Object)} will be invoked
* instead. Future requests for the same key will retry this method.
*/
public abstract V createEntry(K key) throws Exception;
/** Invoked when {@link #createEntry(Object)} fails, by default return null. */
public V missing(K key) {
return null;
}
}

View File

@ -17,16 +17,19 @@ package com.google.gerrit.server.cache;
import java.util.concurrent.TimeUnit;
/** Configure a cache declared within a {@link CacheModule} instance. */
public interface NamedCacheBinding {
public interface NamedCacheBinding<K, V> {
/** Set the number of objects to cache in memory. */
public NamedCacheBinding memoryLimit(int objects);
public NamedCacheBinding<K, V> memoryLimit(int objects);
/** Set the number of objects to cache in memory. */
public NamedCacheBinding diskLimit(int objects);
public NamedCacheBinding<K, V> diskLimit(int objects);
/** Set the time an element lives before being expired. */
public NamedCacheBinding maxAge(long duration, TimeUnit durationUnits);
public NamedCacheBinding<K, V> maxAge(long duration, TimeUnit durationUnits);
/** Set the eviction policy for elements when the cache is full. */
public NamedCacheBinding evictionPolicy(EvictionPolicy policy);
public NamedCacheBinding<K, V> evictionPolicy(EvictionPolicy policy);
/** Populate the cache with items from the EntryCreator. */
public NamedCacheBinding<K, V> populateWith(Class<? extends EntryCreator<K, V>> creator);
}

View File

@ -29,61 +29,39 @@ import java.util.concurrent.TimeUnit;
/**
* A decorator for {@link Cache} which automatically constructs missing entries.
* <p>
* On a cache miss {@link #createEntry(Object)} is invoked, allowing the
* application specific subclass to compute the entry and return it for caching.
* During a miss the cache takes a lock related to the missing key, ensuring
* that at most one thread performs the creation work, and other threads wait
* for the result. Concurrent creations are possible if two different keys miss
* and hash to different locks in the internal lock table.
* On a cache miss {@link EntryCreator#createEntry(Object)} is invoked, allowing
* the application specific subclass to compute the entry and return it for
* caching. During a miss the cache takes a lock related to the missing key,
* ensuring that at most one thread performs the creation work, and other
* threads wait for the result. Concurrent creations are possible if two
* different keys miss and hash to different locks in the internal lock table.
*
* @param <K> type of key used to name cache entries.
* @param <V> type of value stored within a cache entry.
*/
public abstract class SelfPopulatingCache<K, V> implements Cache<K, V> {
class PopulatingCache<K, V> implements Cache<K, V> {
private static final Logger log =
LoggerFactory.getLogger(SelfPopulatingCache.class);
LoggerFactory.getLogger(PopulatingCache.class);
private final net.sf.ehcache.constructs.blocking.SelfPopulatingCache self;
private final EntryCreator<K, V> creator;
/**
* Create a new cache which uses another cache to store entries.
*
* @param backingStore cache which will store the entries for this cache.
*/
@SuppressWarnings("unchecked")
public SelfPopulatingCache(final Cache<K, V> backingStore) {
final Ehcache s = ((SimpleCache) backingStore).getEhcache();
PopulatingCache(Ehcache s, EntryCreator<K, V> entryCreator) {
creator = entryCreator;
final CacheEntryFactory f = new CacheEntryFactory() {
@SuppressWarnings("unchecked")
@Override
public Object createEntry(Object key) throws Exception {
return SelfPopulatingCache.this.createEntry((K) key);
return creator.createEntry((K) key);
}
};
self = new net.sf.ehcache.constructs.blocking.SelfPopulatingCache(s, f);
}
/**
* Invoked on a cache miss, to compute the cache entry.
*
* @param key entry whose content needs to be obtained.
* @return new cache content. The caller will automatically put this object
* into the cache.
* @throws Exception the cache content cannot be computed. No entry will be
* stored in the cache, and {@link #missing(Object)} will be invoked
* instead. Future requests for the same key will retry this method.
*/
protected abstract V createEntry(K key) throws Exception;
/** Invoked when {@link #createEntry(Object)} fails, by default return null. */
protected V missing(K key) {
return null;
}
/**
* Get the element from the cache, or {@link #missing(Object)} if not found.
* Get the element from the cache, or {@link EntryCreator#missing(Object)} if not found.
* <p>
* The {@link #missing(Object)} method is only invoked if:
* The {@link EntryCreator#missing(Object)} method is only invoked if:
* <ul>
* <li>{@code key == null}, in which case the application should return a
* suitable return value that callers can accept, or throw a RuntimeException.
@ -99,7 +77,7 @@ public abstract class SelfPopulatingCache<K, V> implements Cache<K, V> {
@SuppressWarnings("unchecked")
public V get(final K key) {
if (key == null) {
return missing(key);
return creator.missing(key);
}
final Element m;
@ -107,12 +85,12 @@ public abstract class SelfPopulatingCache<K, V> implements Cache<K, V> {
m = self.get(key);
} catch (IllegalStateException err) {
log.error("Cannot lookup " + key + " in \"" + self.getName() + "\"", err);
return missing(key);
return creator.missing(key);
} catch (CacheException err) {
log.error("Cannot lookup " + key + " in \"" + self.getName() + "\"", err);
return missing(key);
return creator.missing(key);
}
return m != null ? (V) m.getObjectValue() : missing(key);
return m != null ? (V) m.getObjectValue() : creator.missing(key);
}
public void remove(final K key) {
@ -122,7 +100,7 @@ public abstract class SelfPopulatingCache<K, V> implements Cache<K, V> {
}
/** Remove all cached items, forcing them to be created again on demand. */
public void removeAll(){
public void removeAll() {
self.removeAll();
}

View File

@ -72,6 +72,10 @@ final class SimpleCache<K, V> implements Cache<K, V> {
}
}
public void removeAll() {
self.removeAll();
}
@Override
public long getTimeToLive(final TimeUnit unit) {
final long maxAge = self.getCacheConfiguration().getTimeToLiveSeconds();

View File

@ -16,7 +16,7 @@ package com.google.gerrit.server.cache;
/** Configure a cache declared within a {@link CacheModule} instance. */
public interface UnnamedCacheBinding {
public interface UnnamedCacheBinding<K, V> {
/** Set the name of the cache. */
public NamedCacheBinding name(String cacheName);
public NamedCacheBinding<K, V> name(String cacheName);
}

View File

@ -21,8 +21,8 @@ import com.google.gerrit.reviewdb.Project;
import com.google.gerrit.reviewdb.AccountDiffPreference.Whitespace;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.EntryCreator;
import com.google.gerrit.server.cache.EvictionPolicy;
import com.google.gerrit.server.cache.SelfPopulatingCache;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.inject.Inject;
@ -34,8 +34,6 @@ import com.google.inject.name.Named;
import org.eclipse.jgit.diff.Edit;
import org.eclipse.jgit.diff.MyersDiff;
import org.eclipse.jgit.diff.ReplaceEdit;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.Constants;
@ -62,7 +60,6 @@ import java.util.regex.Pattern;
@Singleton
public class PatchListCacheImpl implements PatchListCache {
private static final String CACHE_NAME = "diff";
private static final boolean dynamic = false;
private static final Pattern BLANK_LINE_RE =
Pattern.compile("^[ \\t]*(|[{}]|/\\*\\*?|\\*)[ \\t]*$");
@ -78,6 +75,7 @@ public class PatchListCacheImpl implements PatchListCache {
disk(type, CACHE_NAME) //
.memoryLimit(128) // very large items, cache only a few
.evictionPolicy(EvictionPolicy.LRU) // prefer most recent
.populateWith(Loader.class) //
;
bind(PatchListCacheImpl.class);
bind(PatchListCache.class).to(PatchListCacheImpl.class);
@ -85,33 +83,16 @@ public class PatchListCacheImpl implements PatchListCache {
};
}
private final GitRepositoryManager repoManager;
private final SelfPopulatingCache<PatchListKey, PatchList> self;
private final boolean computeIntraline;
private final Cache<PatchListKey, PatchList> cache;
@Inject
PatchListCacheImpl(final GitRepositoryManager grm,
@GerritServerConfig final Config config,
@Named(CACHE_NAME) final Cache<PatchListKey, PatchList> raw) {
repoManager = grm;
computeIntraline = config.getBoolean("cache", "diff", "intraline", true);
self = new SelfPopulatingCache<PatchListKey, PatchList>(raw) {
@Override
protected PatchList createEntry(final PatchListKey key) throws Exception {
return compute(key);
}
};
PatchListCacheImpl(
@Named(CACHE_NAME) final Cache<PatchListKey, PatchList> thecache) {
cache = thecache;
}
public PatchList get(final PatchListKey key) {
if (dynamic) {
try {
return compute(key);
} catch (Exception e) {
throw new RuntimeException("Cannot lookup " + key, e);
}
}
return self.get(key);
return cache.get(key);
}
public PatchList get(final Change change, final PatchSet patchSet) {
@ -126,417 +107,428 @@ public class PatchListCacheImpl implements PatchListCache {
return get(new PatchListKey(projectKey, a, b, whitespace));
}
private PatchList compute(final PatchListKey key)
throws MissingObjectException, IncorrectObjectTypeException, IOException {
final Repository repo = repoManager.openRepository(key.projectKey.get());
try {
return readPatchList(key, repo);
} finally {
repo.close();
}
}
static class Loader extends EntryCreator<PatchListKey, PatchList> {
private final GitRepositoryManager repoManager;
private final boolean computeIntraline;
private PatchList readPatchList(final PatchListKey key, final Repository repo)
throws IOException {
final RevWalk rw = new RevWalk(repo);
final RevCommit b = rw.parseCommit(key.getNewId());
final AnyObjectId a = aFor(key, repo, b);
final List<String> args = new ArrayList<String>();
args.add("git");
args.add("--git-dir=.");
args.add("diff-tree");
args.add("-M");
switch (key.getWhitespace()) {
case IGNORE_NONE:
break;
case IGNORE_SPACE_AT_EOL:
args.add("--ignore-space-at-eol");
break;
case IGNORE_SPACE_CHANGE:
args.add("--ignore-space-change");
break;
case IGNORE_ALL_SPACE:
args.add("--ignore-all-space");
break;
default:
throw new IOException("Unsupported whitespace " + key.getWhitespace());
}
if (a == null /* want combined diff */) {
args.add("--cc");
args.add(b.name());
} else {
args.add("--unified=1");
args.add(a.name());
args.add(b.name());
@Inject
Loader(GitRepositoryManager mgr, @GerritServerConfig Config config) {
repoManager = mgr;
computeIntraline = config.getBoolean("cache", "diff", "intraline", true);
}
final org.eclipse.jgit.patch.Patch p = new org.eclipse.jgit.patch.Patch();
final Process diffProcess = exec(repo, args);
try {
diffProcess.getOutputStream().close();
diffProcess.getErrorStream().close();
final InputStream in = diffProcess.getInputStream();
@Override
public PatchList createEntry(final PatchListKey key) throws Exception {
final Repository repo = repoManager.openRepository(key.projectKey.get());
try {
p.parse(in);
return readPatchList(key, repo);
} finally {
in.close();
repo.close();
}
} finally {
}
private PatchList readPatchList(final PatchListKey key,
final Repository repo) throws IOException {
final RevWalk rw = new RevWalk(repo);
final RevCommit b = rw.parseCommit(key.getNewId());
final AnyObjectId a = aFor(key, repo, b);
final List<String> args = new ArrayList<String>();
args.add("git");
args.add("--git-dir=.");
args.add("diff-tree");
args.add("-M");
switch (key.getWhitespace()) {
case IGNORE_NONE:
break;
case IGNORE_SPACE_AT_EOL:
args.add("--ignore-space-at-eol");
break;
case IGNORE_SPACE_CHANGE:
args.add("--ignore-space-change");
break;
case IGNORE_ALL_SPACE:
args.add("--ignore-all-space");
break;
default:
throw new IOException("Unsupported whitespace " + key.getWhitespace());
}
if (a == null /* want combined diff */) {
args.add("--cc");
args.add(b.name());
} else {
args.add("--unified=1");
args.add(a.name());
args.add(b.name());
}
final org.eclipse.jgit.patch.Patch p = new org.eclipse.jgit.patch.Patch();
final Process diffProcess = exec(repo, args);
try {
final int rc = diffProcess.waitFor();
if (rc != 0) {
throw new IOException("git diff-tree exited abnormally: " + rc);
diffProcess.getOutputStream().close();
diffProcess.getErrorStream().close();
final InputStream in = diffProcess.getInputStream();
try {
p.parse(in);
} finally {
in.close();
}
} finally {
try {
final int rc = diffProcess.waitFor();
if (rc != 0) {
throw new IOException("git diff-tree exited abnormally: " + rc);
}
} catch (InterruptedException ie) {
}
} catch (InterruptedException ie) {
}
RevTree aTree = a != null ? rw.parseTree(a) : null;
RevTree bTree = b.getTree();
final int cnt = p.getFiles().size();
final PatchListEntry[] entries = new PatchListEntry[cnt];
for (int i = 0; i < cnt; i++) {
entries[i] = newEntry(repo, aTree, bTree, p.getFiles().get(i));
}
return new PatchList(a, b, computeIntraline, entries);
}
RevTree aTree = a != null ? rw.parseTree(a) : null;
RevTree bTree = b.getTree();
private PatchListEntry newEntry(Repository repo, RevTree aTree,
RevTree bTree, FileHeader fileHeader) throws IOException {
final FileMode oldMode = fileHeader.getOldMode();
final FileMode newMode = fileHeader.getNewMode();
final int cnt = p.getFiles().size();
final PatchListEntry[] entries = new PatchListEntry[cnt];
for (int i = 0; i < cnt; i++) {
entries[i] = newEntry(repo, aTree, bTree, p.getFiles().get(i));
}
return new PatchList(a, b, computeIntraline, entries);
}
if (oldMode == FileMode.GITLINK || newMode == FileMode.GITLINK) {
return new PatchListEntry(fileHeader, Collections.<Edit> emptyList());
}
private PatchListEntry newEntry(Repository repo, RevTree aTree,
RevTree bTree, FileHeader fileHeader) throws IOException {
final FileMode oldMode = fileHeader.getOldMode();
final FileMode newMode = fileHeader.getNewMode();
if (aTree == null // want combined diff
|| fileHeader.getPatchType() != PatchType.UNIFIED
|| fileHeader.getHunks().isEmpty()) {
return new PatchListEntry(fileHeader, Collections.<Edit> emptyList());
}
if (oldMode == FileMode.GITLINK || newMode == FileMode.GITLINK) {
return new PatchListEntry(fileHeader, Collections.<Edit> emptyList());
}
List<Edit> edits = fileHeader.toEditList();
if (edits.isEmpty()) {
return new PatchListEntry(fileHeader, Collections.<Edit> emptyList());
}
if (!computeIntraline) {
return new PatchListEntry(fileHeader, edits);
}
if (aTree == null // want combined diff
|| fileHeader.getPatchType() != PatchType.UNIFIED
|| fileHeader.getHunks().isEmpty()) {
return new PatchListEntry(fileHeader, Collections.<Edit> emptyList());
}
switch (fileHeader.getChangeType()) {
case ADD:
case DELETE:
return new PatchListEntry(fileHeader, edits);
}
Text aContent = null;
Text bContent = null;
for (int i = 0; i < edits.size(); i++) {
Edit e = edits.get(i);
if (e.getType() == Edit.Type.REPLACE) {
if (aContent == null) {
edits = new ArrayList<Edit>(edits);
aContent = read(repo, fileHeader.getOldName(), aTree);
bContent = read(repo, fileHeader.getNewName(), bTree);
combineLineEdits(edits, aContent, bContent);
i = -1; // restart the entire scan after combining lines.
continue;
}
CharText a = new CharText(aContent, e.getBeginA(), e.getEndA());
CharText b = new CharText(bContent, e.getBeginB(), e.getEndB());
List<Edit> wordEdits = new MyersDiff(a, b).getEdits();
// Combine edits that are really close together. If they are
// just a few characters apart we tend to get better results
// by joining them together and taking the whole span.
//
for (int j = 0; j < wordEdits.size() - 1;) {
Edit c = wordEdits.get(j);
Edit n = wordEdits.get(j + 1);
if (n.getBeginA() - c.getEndA() <= 5
|| n.getBeginB() - c.getEndB() <= 5) {
int ab = c.getBeginA();
int ae = n.getEndA();
int bb = c.getBeginB();
int be = n.getEndB();
if (canCoalesce(a, c.getEndA(), n.getBeginA())
&& canCoalesce(b, c.getEndB(), n.getBeginB())) {
wordEdits.set(j, new Edit(ab, ae, bb, be));
wordEdits.remove(j + 1);
continue;
}
}
j++;
}
// Apply some simple rules to fix up some of the edits. Our
// logic above, along with our per-character difference tends
// to produce some crazy stuff.
//
for (int j = 0; j < wordEdits.size(); j++) {
Edit c = wordEdits.get(j);
int ab = c.getBeginA();
int ae = c.getEndA();
int bb = c.getBeginB();
int be = c.getEndB();
// Sometimes the diff generator produces an INSERT or DELETE
// right up against a REPLACE, but we only find this after
// we've also played some shifting games on the prior edit.
// If that happened to us, coalesce them together so we can
// correct this mess for the user. If we don't we wind up
// with silly stuff like "es" -> "es = Addresses".
//
if (1 < j) {
Edit p = wordEdits.get(j - 1);
if (p.getEndA() == ab || p.getEndB() == bb) {
if (p.getEndA() == ab && p.getBeginA() < p.getEndA()) {
ab = p.getBeginA();
}
if (p.getEndB() == bb && p.getBeginB() < p.getEndB()) {
bb = p.getBeginB();
}
wordEdits.remove(--j);
}
}
// We sometimes collapsed an edit together in a strange way,
// such that the edges of each text is identical. Fix by
// by dropping out that incorrectly replaced region.
//
while (ab < ae && bb < be && a.equals(ab, b, bb)) {
ab++;
bb++;
}
while (ab < ae && bb < be && a.equals(ae - 1, b, be - 1)) {
ae--;
be--;
}
// The leading part of an edit and its trailing part in the same
// text might be identical. Slide down that edit and use the tail
// rather than the leading bit. If however the edit is only on a
// whitespace block try to shift it to the left margin, assuming
// that it is an indentation change.
//
boolean aShift = true;
if (ab < ae && isOnlyWhitespace(a, ab, ae)) {
int lf = findLF(wordEdits, j, a, ab);
if (lf < ab && a.charAt(lf) == '\n') {
int nb = lf + 1;
int p = 0;
while (p < ae - ab) {
if (a.equals(ab + p, a, ab + p))
p++;
else
break;
}
if (p == ae - ab) {
ab = nb;
ae = nb + p;
aShift = false;
}
}
}
if (aShift) {
while (0 < ab && ab < ae && a.charAt(ab - 1) != '\n'
&& a.equals(ab - 1, a, ae - 1)) {
ab--;
ae--;
}
if (!a.isLineStart(ab) || !a.contains(ab, ae, '\n')) {
while (ab < ae && ae < a.size() && a.equals(ab, a, ae)) {
ab++;
ae++;
if (a.charAt(ae - 1) == '\n') {
break;
}
}
}
}
boolean bShift = true;
if (bb < be && isOnlyWhitespace(b, bb, be)) {
int lf = findLF(wordEdits, j, b, bb);
if (lf < bb && b.charAt(lf) == '\n') {
int nb = lf + 1;
int p = 0;
while (p < be - bb) {
if (b.equals(bb + p, b, bb + p))
p++;
else
break;
}
if (p == be - bb) {
bb = nb;
be = nb + p;
bShift = false;
}
}
}
if (bShift) {
while (0 < bb && bb < be && b.charAt(bb - 1) != '\n'
&& b.equals(bb - 1, b, be - 1)) {
bb--;
be--;
}
if (!b.isLineStart(bb) || !b.contains(bb, be, '\n')) {
while (bb < be && be < b.size() && b.equals(bb, b, be)) {
bb++;
be++;
if (b.charAt(be - 1) == '\n') {
break;
}
}
}
}
// If most of a line was modified except the LF was common, make
// the LF part of the modification region. This is easier to read.
//
if (ab < ae //
&& (ab == 0 || a.charAt(ab - 1) == '\n') //
&& ae < a.size() && a.charAt(ae) == '\n') {
ae++;
}
if (bb < be //
&& (bb == 0 || b.charAt(bb - 1) == '\n') //
&& be < b.size() && b.charAt(be) == '\n') {
be++;
}
wordEdits.set(j, new Edit(ab, ae, bb, be));
}
edits.set(i, new ReplaceEdit(e, wordEdits));
}
}
List<Edit> edits = fileHeader.toEditList();
if (edits.isEmpty()) {
return new PatchListEntry(fileHeader, Collections.<Edit> emptyList());
}
if (!computeIntraline) {
return new PatchListEntry(fileHeader, edits);
}
switch (fileHeader.getChangeType()) {
case ADD:
case DELETE:
return new PatchListEntry(fileHeader, edits);
}
private static void combineLineEdits(List<Edit> edits, Text a, Text b) {
for (int j = 0; j < edits.size() - 1;) {
Edit c = edits.get(j);
Edit n = edits.get(j + 1);
Text aContent = null;
Text bContent = null;
// Combine edits that are really close together. Right now our rule
// is, coalesce two line edits which are only one line apart if that
// common context line is either a "pointless line", or is identical
// on both sides and starts a new block of code. These are mostly
// block reindents to add or remove control flow operators.
//
final int ad = n.getBeginA() - c.getEndA();
final int bd = n.getBeginB() - c.getEndB();
if ((1 <= ad && isBlankLineGap(a, c.getEndA(), n.getBeginA()))
|| (1 <= bd && isBlankLineGap(b, c.getEndB(), n.getBeginB()))
|| (ad == 1 && bd == 1 && isControlBlockStart(a, c.getEndA()))) {
int ab = c.getBeginA();
int ae = n.getEndA();
for (int i = 0; i < edits.size(); i++) {
Edit e = edits.get(i);
int bb = c.getBeginB();
int be = n.getEndB();
if (e.getType() == Edit.Type.REPLACE) {
if (aContent == null) {
edits = new ArrayList<Edit>(edits);
aContent = read(repo, fileHeader.getOldName(), aTree);
bContent = read(repo, fileHeader.getNewName(), bTree);
combineLineEdits(edits, aContent, bContent);
i = -1; // restart the entire scan after combining lines.
edits.set(j, new Edit(ab, ae, bb, be));
edits.remove(j + 1);
continue;
}
CharText a = new CharText(aContent, e.getBeginA(), e.getEndA());
CharText b = new CharText(bContent, e.getBeginB(), e.getEndB());
j++;
}
}
List<Edit> wordEdits = new MyersDiff(a, b).getEdits();
// Combine edits that are really close together. If they are
// just a few characters apart we tend to get better results
// by joining them together and taking the whole span.
//
for (int j = 0; j < wordEdits.size() - 1;) {
Edit c = wordEdits.get(j);
Edit n = wordEdits.get(j + 1);
if (n.getBeginA() - c.getEndA() <= 5
|| n.getBeginB() - c.getEndB() <= 5) {
int ab = c.getBeginA();
int ae = n.getEndA();
int bb = c.getBeginB();
int be = n.getEndB();
if (canCoalesce(a, c.getEndA(), n.getBeginA())
&& canCoalesce(b, c.getEndB(), n.getBeginB())) {
wordEdits.set(j, new Edit(ab, ae, bb, be));
wordEdits.remove(j + 1);
continue;
}
}
j++;
private static boolean isBlankLineGap(Text a, int b, int e) {
for (; b < e; b++) {
if (!BLANK_LINE_RE.matcher(a.getLine(b)).matches()) {
return false;
}
}
return true;
}
// Apply some simple rules to fix up some of the edits. Our
// logic above, along with our per-character difference tends
// to produce some crazy stuff.
//
for (int j = 0; j < wordEdits.size(); j++) {
Edit c = wordEdits.get(j);
int ab = c.getBeginA();
int ae = c.getEndA();
private static boolean isControlBlockStart(Text a, int idx) {
final String l = a.getLine(idx);
return CONTROL_BLOCK_START_RE.matcher(l).find();
}
int bb = c.getBeginB();
int be = c.getEndB();
// Sometimes the diff generator produces an INSERT or DELETE
// right up against a REPLACE, but we only find this after
// we've also played some shifting games on the prior edit.
// If that happened to us, coalesce them together so we can
// correct this mess for the user. If we don't we wind up
// with silly stuff like "es" -> "es = Addresses".
//
if (1 < j) {
Edit p = wordEdits.get(j - 1);
if (p.getEndA() == ab || p.getEndB() == bb) {
if (p.getEndA() == ab && p.getBeginA() < p.getEndA()) {
ab = p.getBeginA();
}
if (p.getEndB() == bb && p.getBeginB() < p.getEndB()) {
bb = p.getBeginB();
}
wordEdits.remove(--j);
}
}
// We sometimes collapsed an edit together in a strange way,
// such that the edges of each text is identical. Fix by
// by dropping out that incorrectly replaced region.
//
while (ab < ae && bb < be && a.equals(ab, b, bb)) {
ab++;
bb++;
}
while (ab < ae && bb < be && a.equals(ae - 1, b, be - 1)) {
ae--;
be--;
}
// The leading part of an edit and its trailing part in the same
// text might be identical. Slide down that edit and use the tail
// rather than the leading bit. If however the edit is only on a
// whitespace block try to shift it to the left margin, assuming
// that it is an indentation change.
//
boolean aShift = true;
if (ab < ae && isOnlyWhitespace(a, ab, ae)) {
int lf = findLF(wordEdits, j, a, ab);
if (lf < ab && a.charAt(lf) == '\n') {
int nb = lf + 1;
int p = 0;
while (p < ae - ab) {
if (a.equals(ab + p, a, ab + p))
p++;
else
break;
}
if (p == ae - ab) {
ab = nb;
ae = nb + p;
aShift = false;
}
}
}
if (aShift) {
while (0 < ab && ab < ae && a.charAt(ab - 1) != '\n'
&& a.equals(ab - 1, a, ae - 1)) {
ab--;
ae--;
}
if (!a.isLineStart(ab) || !a.contains(ab, ae, '\n')) {
while (ab < ae && ae < a.size() && a.equals(ab, a, ae)) {
ab++;
ae++;
if (a.charAt(ae - 1) == '\n') {
break;
}
}
}
}
boolean bShift = true;
if (bb < be && isOnlyWhitespace(b, bb, be)) {
int lf = findLF(wordEdits, j, b, bb);
if (lf < bb && b.charAt(lf) == '\n') {
int nb = lf + 1;
int p = 0;
while (p < be - bb) {
if (b.equals(bb + p, b, bb + p))
p++;
else
break;
}
if (p == be - bb) {
bb = nb;
be = nb + p;
bShift = false;
}
}
}
if (bShift) {
while (0 < bb && bb < be && b.charAt(bb - 1) != '\n'
&& b.equals(bb - 1, b, be - 1)) {
bb--;
be--;
}
if (!b.isLineStart(bb) || !b.contains(bb, be, '\n')) {
while (bb < be && be < b.size() && b.equals(bb, b, be)) {
bb++;
be++;
if (b.charAt(be - 1) == '\n') {
break;
}
}
}
}
// If most of a line was modified except the LF was common, make
// the LF part of the modification region. This is easier to read.
//
if (ab < ae //
&& (ab == 0 || a.charAt(ab - 1) == '\n') //
&& ae < a.size() && a.charAt(ae) == '\n') {
ae++;
}
if (bb < be //
&& (bb == 0 || b.charAt(bb - 1) == '\n') //
&& be < b.size() && b.charAt(be) == '\n') {
be++;
}
wordEdits.set(j, new Edit(ab, ae, bb, be));
private static boolean canCoalesce(CharText a, int b, int e) {
while (b < e) {
if (a.charAt(b++) == '\n') {
return false;
}
}
return true;
}
edits.set(i, new ReplaceEdit(e, wordEdits));
private static int findLF(List<Edit> edits, int j, CharText t, int b) {
int lf = b;
int limit = 0 < j ? edits.get(j - 1).getEndB() : 0;
while (limit < lf && t.charAt(lf) != '\n') {
lf--;
}
return lf;
}
private static boolean isOnlyWhitespace(CharText t, final int b, final int e) {
for (int c = b; c < e; c++) {
if (!Character.isWhitespace(t.charAt(c))) {
return false;
}
}
return b < e;
}
private static Text read(Repository repo, String path, RevTree tree)
throws IOException {
TreeWalk tw = TreeWalk.forPath(repo, path, tree);
if (tw == null || tw.getFileMode(0).getObjectType() != Constants.OBJ_BLOB) {
return Text.EMPTY;
}
ObjectLoader ldr = repo.openObject(tw.getObjectId(0));
if (ldr == null) {
return Text.EMPTY;
}
return new Text(ldr.getCachedBytes());
}
private static AnyObjectId aFor(final PatchListKey key,
final Repository repo, final RevCommit b) throws IOException {
if (key.getOldId() != null) {
return key.getOldId();
}
switch (b.getParentCount()) {
case 0:
return emptyTree(repo);
case 1:
return b.getParent(0);
default:
// merge commit, return null to force combined diff behavior
return null;
}
}
return new PatchListEntry(fileHeader, edits);
}
private static void combineLineEdits(List<Edit> edits, Text a, Text b) {
for (int j = 0; j < edits.size() - 1;) {
Edit c = edits.get(j);
Edit n = edits.get(j + 1);
// Combine edits that are really close together. Right now our rule
// is, coalesce two line edits which are only one line apart if that
// common context line is either a "pointless line", or is identical
// on both sides and starts a new block of code. These are mostly
// block reindents to add or remove control flow operators.
//
final int ad = n.getBeginA() - c.getEndA();
final int bd = n.getBeginB() - c.getEndB();
if ((1 <= ad && isBlankLineGap(a, c.getEndA(), n.getBeginA()))
|| (1 <= bd && isBlankLineGap(b, c.getEndB(), n.getBeginB()))
|| (ad == 1 && bd == 1 && isControlBlockStart(a, c.getEndA()))) {
int ab = c.getBeginA();
int ae = n.getEndA();
int bb = c.getBeginB();
int be = n.getEndB();
edits.set(j, new Edit(ab, ae, bb, be));
edits.remove(j + 1);
continue;
}
j++;
}
}
private static boolean isBlankLineGap(Text a, int b, int e) {
for (; b < e; b++) {
if (!BLANK_LINE_RE.matcher(a.getLine(b)).matches()) {
return false;
}
}
return true;
}
private static boolean isControlBlockStart(Text a, int idx) {
final String l = a.getLine(idx);
return CONTROL_BLOCK_START_RE.matcher(l).find();
}
private static boolean canCoalesce(CharText a, int b, int e) {
while (b < e) {
if (a.charAt(b++) == '\n') {
return false;
}
}
return true;
}
private static int findLF(List<Edit> edits, int j, CharText t, int b) {
int lf = b;
int limit = 0 < j ? edits.get(j - 1).getEndB() : 0;
while (limit < lf && t.charAt(lf) != '\n') {
lf--;
}
return lf;
}
private static boolean isOnlyWhitespace(CharText t, final int b, final int e) {
for (int c = b; c < e; c++) {
if (!Character.isWhitespace(t.charAt(c))) {
return false;
}
}
return b < e;
}
private static Text read(Repository repo, String path, RevTree tree)
throws IOException {
TreeWalk tw = TreeWalk.forPath(repo, path, tree);
if (tw == null || tw.getFileMode(0).getObjectType() != Constants.OBJ_BLOB) {
return Text.EMPTY;
}
ObjectLoader ldr = repo.openObject(tw.getObjectId(0));
if (ldr == null) {
return Text.EMPTY;
}
return new Text(ldr.getCachedBytes());
}
private static AnyObjectId aFor(final PatchListKey key,
final Repository repo, final RevCommit b) throws IOException {
if (key.getOldId() != null) {
return key.getOldId();
private static Process exec(final Repository repo, final List<String> args)
throws IOException {
final String[] argv = args.toArray(new String[args.size()]);
return Runtime.getRuntime().exec(argv, null, repo.getDirectory());
}
switch (b.getParentCount()) {
case 0:
return emptyTree(repo);
case 1:
return b.getParent(0);
default:
// merge commit, return null to force combined diff behavior
return null;
private static ObjectId emptyTree(final Repository repo) throws IOException {
return new ObjectWriter(repo).writeCanonicalTree(new byte[0]);
}
}
private static Process exec(final Repository repo, final List<String> args)
throws IOException {
final String[] argv = args.toArray(new String[args.size()]);
return Runtime.getRuntime().exec(argv, null, repo.getDirectory());
}
private static ObjectId emptyTree(final Repository repo) throws IOException {
return new ObjectWriter(repo).writeCanonicalTree(new byte[0]);
}
}

View File

@ -19,9 +19,7 @@ import com.google.gerrit.reviewdb.RefRight;
import com.google.gerrit.reviewdb.ReviewDb;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.SelfPopulatingCache;
import com.google.gerrit.server.config.WildProjectName;
import com.google.gwtorm.client.OrmException;
import com.google.gerrit.server.cache.EntryCreator;
import com.google.gwtorm.client.SchemaFactory;
import com.google.inject.Inject;
import com.google.inject.Module;
@ -29,7 +27,6 @@ import com.google.inject.Singleton;
import com.google.inject.TypeLiteral;
import com.google.inject.name.Named;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@ -43,59 +40,20 @@ public class ProjectCacheImpl implements ProjectCache {
@Override
protected void configure() {
final TypeLiteral<Cache<Project.NameKey, ProjectState>> type =
new TypeLiteral<Cache<Project.NameKey, ProjectState>>() {};
core(type, CACHE_NAME);
new TypeLiteral<Cache<Project.NameKey, ProjectState>>() {};
core(type, CACHE_NAME).populateWith(Loader.class);
bind(ProjectCacheImpl.class);
bind(ProjectCache.class).to(ProjectCacheImpl.class);
}
};
}
private final ProjectState.Factory projectStateFactory;
private final SchemaFactory<ReviewDb> schema;
private final SelfPopulatingCache<Project.NameKey, ProjectState> byName;
private final Cache<Project.NameKey, ProjectState> byName;
@Inject
ProjectCacheImpl(final ProjectState.Factory psf,
final SchemaFactory<ReviewDb> sf,
@WildProjectName final Project.NameKey wp,
ProjectCacheImpl(
@Named(CACHE_NAME) final Cache<Project.NameKey, ProjectState> byName) {
projectStateFactory = psf;
schema = sf;
this.byName =
new SelfPopulatingCache<Project.NameKey, ProjectState>(byName) {
@Override
public ProjectState createEntry(final Project.NameKey key)
throws Exception {
return lookup(key);
}
};
}
/**
* Lookup for a state of a specified project on database
*
* @param key the project name key
* @return the project state
* @throws OrmException
*/
private ProjectState lookup(final Project.NameKey key) throws OrmException {
final ReviewDb db = schema.open();
try {
final Project p = db.projects().get(key);
if (p == null) {
return null;
}
final Collection<RefRight> rights =
Collections.unmodifiableCollection(db.refRights().byProject(
p.getNameKey()).toList());
return projectStateFactory.create(p, rights);
} finally {
db.close();
}
this.byName = byName;
}
/**
@ -119,4 +77,34 @@ public class ProjectCacheImpl implements ProjectCache {
public void evictAll() {
byName.removeAll();
}
static class Loader extends EntryCreator<Project.NameKey, ProjectState> {
private final ProjectState.Factory projectStateFactory;
private final SchemaFactory<ReviewDb> schema;
@Inject
Loader(ProjectState.Factory psf, SchemaFactory<ReviewDb> sf) {
projectStateFactory = psf;
schema = sf;
}
@Override
public ProjectState createEntry(Project.NameKey key) throws Exception {
final ReviewDb db = schema.open();
try {
final Project p = db.projects().get(key);
if (p == null) {
return null;
}
final Collection<RefRight> rights =
Collections.unmodifiableCollection(db.refRights().byProject(
p.getNameKey()).toList());
return projectStateFactory.create(p, rights);
} finally {
db.close();
}
}
}
}

View File

@ -22,7 +22,7 @@ import com.google.gerrit.reviewdb.AccountSshKey;
import com.google.gerrit.reviewdb.ReviewDb;
import com.google.gerrit.server.cache.Cache;
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.SelfPopulatingCache;
import com.google.gerrit.server.cache.EntryCreator;
import com.google.gerrit.server.ssh.SshKeyCache;
import com.google.gwtorm.client.OrmException;
import com.google.gwtorm.client.SchemaFactory;
@ -59,7 +59,7 @@ public class SshKeyCacheImpl implements SshKeyCache {
protected void configure() {
final TypeLiteral<Cache<String, Iterable<SshKeyCacheEntry>>> type =
new TypeLiteral<Cache<String, Iterable<SshKeyCacheEntry>>>() {};
core(type, CACHE_NAME);
core(type, CACHE_NAME).populateWith(Loader.class);
bind(SshKeyCacheImpl.class);
bind(SshKeyCache.class).to(SshKeyCacheImpl.class);
}
@ -71,33 +71,20 @@ public class SshKeyCacheImpl implements SshKeyCache {
.asList(new SshKeyCacheEntry[0]));
}
private final SchemaFactory<ReviewDb> schema;
private final SelfPopulatingCache<String, Iterable<SshKeyCacheEntry>> self;
private final Cache<String, Iterable<SshKeyCacheEntry>> cache;
@Inject
SshKeyCacheImpl(final SchemaFactory<ReviewDb> schema,
@Named(CACHE_NAME) final Cache<String, Iterable<SshKeyCacheEntry>> raw) {
this.schema = schema;
self = new SelfPopulatingCache<String, Iterable<SshKeyCacheEntry>>(raw) {
@Override
protected Iterable<SshKeyCacheEntry> createEntry(final String username)
throws Exception {
return lookup(username);
}
@Override
protected Iterable<SshKeyCacheEntry> missing(final String username) {
return Collections.emptyList();
}
};
SshKeyCacheImpl(
@Named(CACHE_NAME) final Cache<String, Iterable<SshKeyCacheEntry>> cache) {
this.cache = cache;
}
public Iterable<SshKeyCacheEntry> get(String username) {
return self.get(username);
return cache.get(username);
}
public void evict(String username) {
self.remove(username);
cache.remove(username);
}
@Override
@ -120,52 +107,68 @@ public class SshKeyCacheImpl implements SshKeyCache {
}
}
private Iterable<SshKeyCacheEntry> lookup(final String username)
throws Exception {
final ReviewDb db = schema.open();
try {
final AccountExternalId.Key key =
new AccountExternalId.Key(SCHEME_USERNAME, username);
final AccountExternalId user = db.accountExternalIds().get(key);
if (user == null) {
return NO_SUCH_USER;
}
static class Loader extends EntryCreator<String, Iterable<SshKeyCacheEntry>> {
private final SchemaFactory<ReviewDb> schema;
final List<SshKeyCacheEntry> kl = new ArrayList<SshKeyCacheEntry>(4);
for (AccountSshKey k : db.accountSshKeys().byAccount(user.getAccountId())) {
if (k.isValid()) {
add(db, kl, k);
@Inject
Loader(SchemaFactory<ReviewDb> schema) {
this.schema = schema;
}
@Override
public Iterable<SshKeyCacheEntry> createEntry(String username)
throws Exception {
final ReviewDb db = schema.open();
try {
final AccountExternalId.Key key =
new AccountExternalId.Key(SCHEME_USERNAME, username);
final AccountExternalId user = db.accountExternalIds().get(key);
if (user == null) {
return NO_SUCH_USER;
}
}
if (kl.isEmpty()) {
return NO_KEYS;
}
return Collections.unmodifiableList(kl);
} finally {
db.close();
}
}
private void add(ReviewDb db, List<SshKeyCacheEntry> kl, AccountSshKey k) {
try {
kl.add(new SshKeyCacheEntry(k.getKey(), SshUtil.parse(k)));
} catch (OutOfMemoryError e) {
// This is the only case where we assume the problem has nothing
// to do with the key object, and instead we must abort this load.
//
throw e;
} catch (Throwable e) {
markInvalid(db, k);
final List<SshKeyCacheEntry> kl = new ArrayList<SshKeyCacheEntry>(4);
for (AccountSshKey k : db.accountSshKeys().byAccount(
user.getAccountId())) {
if (k.isValid()) {
add(db, kl, k);
}
}
if (kl.isEmpty()) {
return NO_KEYS;
}
return Collections.unmodifiableList(kl);
} finally {
db.close();
}
}
}
private void markInvalid(final ReviewDb db, final AccountSshKey k) {
try {
log.info("Flagging SSH key " + k.getKey() + " invalid");
k.setInvalid();
db.accountSshKeys().update(Collections.singleton(k));
} catch (OrmException e) {
log.error("Failed to mark SSH key" + k.getKey() + " invalid", e);
@Override
public Iterable<SshKeyCacheEntry> missing(String username) {
return Collections.emptyList();
}
private void add(ReviewDb db, List<SshKeyCacheEntry> kl, AccountSshKey k) {
try {
kl.add(new SshKeyCacheEntry(k.getKey(), SshUtil.parse(k)));
} catch (OutOfMemoryError e) {
// This is the only case where we assume the problem has nothing
// to do with the key object, and instead we must abort this load.
//
throw e;
} catch (Throwable e) {
markInvalid(db, k);
}
}
private void markInvalid(final ReviewDb db, final AccountSshKey k) {
try {
log.info("Flagging SSH key " + k.getKey() + " invalid");
k.setInvalid();
db.accountSshKeys().update(Collections.singleton(k));
} catch (OrmException e) {
log.error("Failed to mark SSH key" + k.getKey() + " invalid", e);
}
}
}
}