Use try-with-resources statements
- instead of finally blocks - in cases of missing try-finally Change-Id: I94f481a33d8e6a3180c436245d6e95e4d525280c
This commit is contained in:
parent
b17ceb8587
commit
5e90c63dea
@ -71,8 +71,7 @@ public class AccountCreator {
|
||||
if (account != null) {
|
||||
return account;
|
||||
}
|
||||
ReviewDb db = reviewDbProvider.open();
|
||||
try {
|
||||
try (ReviewDb db = reviewDbProvider.open()) {
|
||||
Account.Id id = new Account.Id(db.nextAccountId());
|
||||
KeyPair sshKey = genSshKey();
|
||||
AccountSshKey key =
|
||||
@ -115,8 +114,6 @@ public class AccountCreator {
|
||||
new TestAccount(id, username, email, fullName, sshKey, httpPass);
|
||||
accounts.put(username, account);
|
||||
return account;
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -58,8 +58,7 @@ public class GcAssert {
|
||||
|
||||
private String[] getPackFiles(Project.NameKey p)
|
||||
throws RepositoryNotFoundException, IOException {
|
||||
Repository repo = repoManager.openRepository(p);
|
||||
try {
|
||||
try (Repository repo = repoManager.openRepository(p)) {
|
||||
File packDir = new File(repo.getDirectory(), "objects/pack");
|
||||
return packDir.list(new FilenameFilter() {
|
||||
@Override
|
||||
@ -67,8 +66,6 @@ public class GcAssert {
|
||||
return name.endsWith(".pack");
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -115,8 +115,7 @@ public class VisibleRefFilterIT extends AbstractDaemonTest {
|
||||
c2 = br.getChange().getId();
|
||||
r2 = changeRefPrefix(c2);
|
||||
|
||||
Repository repo = repoManager.openRepository(project);
|
||||
try {
|
||||
try (Repository repo = repoManager.openRepository(project)) {
|
||||
// master-tag -> master
|
||||
RefUpdate mtu = repo.updateRef("refs/tags/master-tag");
|
||||
mtu.setExpectedOldObjectId(ObjectId.zeroId());
|
||||
@ -128,8 +127,6 @@ public class VisibleRefFilterIT extends AbstractDaemonTest {
|
||||
btu.setExpectedOldObjectId(ObjectId.zeroId());
|
||||
btu.setNewObjectId(repo.getRef("refs/heads/branch").getObjectId());
|
||||
assertThat(btu.update()).isEqualTo(RefUpdate.Result.NEW);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -249,15 +249,12 @@ public abstract class AbstractSubmit extends AbstractDaemonTest {
|
||||
ChangeInfo c = get(changeId, CURRENT_REVISION);
|
||||
assertThat(c.currentRevision).isEqualTo(expectedId.name());
|
||||
assertThat(c.revisions.get(expectedId.name())._number).isEqualTo(expectedNum);
|
||||
Repository repo =
|
||||
repoManager.openRepository(new Project.NameKey(c.project));
|
||||
try {
|
||||
try (Repository repo =
|
||||
repoManager.openRepository(new Project.NameKey(c.project))) {
|
||||
Ref ref = repo.getRef(
|
||||
new PatchSet.Id(new Change.Id(c._number), expectedNum).toRefName());
|
||||
assertThat(ref).isNotNull();
|
||||
assertThat(ref.getObjectId()).isEqualTo(expectedId);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -263,13 +263,10 @@ public class CreateProjectIT extends AbstractDaemonTest {
|
||||
|
||||
private void assertHead(String projectName, String expectedRef)
|
||||
throws RepositoryNotFoundException, IOException {
|
||||
Repository repo =
|
||||
repoManager.openRepository(new Project.NameKey(projectName));
|
||||
try {
|
||||
try (Repository repo =
|
||||
repoManager.openRepository(new Project.NameKey(projectName))) {
|
||||
assertThat(repo.getRef(Constants.HEAD).getTarget().getName())
|
||||
.isEqualTo(expectedRef);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -330,12 +330,9 @@ public class LabelTypeIT extends AbstractDaemonTest {
|
||||
private void merge(PushOneCommit.Result r) throws Exception {
|
||||
revision(r).review(ReviewInput.approve());
|
||||
revision(r).submit();
|
||||
Repository repo = repoManager.openRepository(project);
|
||||
try {
|
||||
try (Repository repo = repoManager.openRepository(project)) {
|
||||
assertThat(repo.getRef("refs/heads/master").getObjectId()).isEqualTo(
|
||||
r.getCommitId());
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -258,15 +258,10 @@ public class H2CacheImpl<K, V> extends AbstractLoadingCache<K, V> implements
|
||||
|
||||
@Override
|
||||
public void funnel(K from, PrimitiveSink into) {
|
||||
try {
|
||||
ObjectOutputStream ser =
|
||||
new ObjectOutputStream(new SinkOutputStream(into));
|
||||
try {
|
||||
ser.writeObject(from);
|
||||
ser.flush();
|
||||
} finally {
|
||||
ser.close();
|
||||
}
|
||||
try (ObjectOutputStream ser =
|
||||
new ObjectOutputStream(new SinkOutputStream(into))) {
|
||||
ser.writeObject(from);
|
||||
ser.flush();
|
||||
} catch (IOException err) {
|
||||
throw new RuntimeException("Cannot hash as Serializable", err);
|
||||
}
|
||||
|
@ -77,19 +77,14 @@ public class CssLinker extends AbstractLinker {
|
||||
|
||||
private String name(final TreeLogger logger, final PublicResource r)
|
||||
throws UnableToCompleteException {
|
||||
final InputStream in = r.getContents(logger);
|
||||
final ByteArrayOutputStream tmp = new ByteArrayOutputStream();
|
||||
try {
|
||||
try {
|
||||
final byte[] buf = new byte[2048];
|
||||
int n;
|
||||
while ((n = in.read(buf)) >= 0) {
|
||||
tmp.write(buf, 0, n);
|
||||
}
|
||||
tmp.close();
|
||||
} finally {
|
||||
in.close();
|
||||
try (InputStream in = r.getContents(logger)) {
|
||||
final byte[] buf = new byte[2048];
|
||||
int n;
|
||||
while ((n = in.read(buf)) >= 0) {
|
||||
tmp.write(buf, 0, n);
|
||||
}
|
||||
tmp.close();
|
||||
} catch (IOException e) {
|
||||
final UnableToCompleteException ute = new UnableToCompleteException();
|
||||
ute.initCause(e);
|
||||
|
@ -104,11 +104,8 @@ class BecomeAnyAccountLoginServlet extends HttpServlet {
|
||||
rsp.setContentType("text/html");
|
||||
rsp.setCharacterEncoding(HtmlDomUtil.ENC.name());
|
||||
rsp.setContentLength(raw.length);
|
||||
final OutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (OutputStream out = rsp.getOutputStream()) {
|
||||
out.write(raw);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
return;
|
||||
}
|
||||
@ -129,13 +126,13 @@ class BecomeAnyAccountLoginServlet extends HttpServlet {
|
||||
} else {
|
||||
rsp.setContentType("text/html");
|
||||
rsp.setCharacterEncoding(HtmlDomUtil.ENC.name());
|
||||
final Writer out = rsp.getWriter();
|
||||
out.write("<html>");
|
||||
out.write("<body>");
|
||||
out.write("<h1>Account Not Found</h1>");
|
||||
out.write("</body>");
|
||||
out.write("</html>");
|
||||
out.close();
|
||||
try (Writer out = rsp.getWriter()) {
|
||||
out.write("<html>");
|
||||
out.write("<body>");
|
||||
out.write("<h1>Account Not Found</h1>");
|
||||
out.write("</body>");
|
||||
out.write("</html>");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,8 +144,7 @@ class BecomeAnyAccountLoginServlet extends HttpServlet {
|
||||
}
|
||||
|
||||
Element userlistElement = HtmlDomUtil.find(doc, "userlist");
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
ResultSet<Account> accounts = db.accounts().firstNById(100);
|
||||
for (Account a : accounts) {
|
||||
String displayName;
|
||||
@ -168,8 +164,6 @@ class BecomeAnyAccountLoginServlet extends HttpServlet {
|
||||
userlistElement.appendChild(linkElement);
|
||||
userlistElement.appendChild(doc.createElement("br"));
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
|
||||
return HtmlDomUtil.toUTF8(doc);
|
||||
@ -190,15 +184,10 @@ class BecomeAnyAccountLoginServlet extends HttpServlet {
|
||||
}
|
||||
|
||||
private AuthResult byUserName(final String userName) {
|
||||
try {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
AccountExternalId.Key key =
|
||||
new AccountExternalId.Key(SCHEME_USERNAME, userName);
|
||||
return auth(db.accountExternalIds().get(key));
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
try (ReviewDb db = schema.open()) {
|
||||
AccountExternalId.Key key =
|
||||
new AccountExternalId.Key(SCHEME_USERNAME, userName);
|
||||
return auth(db.accountExternalIds().get(key));
|
||||
} catch (OrmException e) {
|
||||
getServletContext().log("cannot query database", e);
|
||||
return null;
|
||||
@ -206,14 +195,9 @@ class BecomeAnyAccountLoginServlet extends HttpServlet {
|
||||
}
|
||||
|
||||
private AuthResult byPreferredEmail(final String email) {
|
||||
try {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
List<Account> matches = db.accounts().byPreferredEmail(email).toList();
|
||||
return matches.size() == 1 ? auth(matches.get(0)) : null;
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
try (ReviewDb db = schema.open()) {
|
||||
List<Account> matches = db.accounts().byPreferredEmail(email).toList();
|
||||
return matches.size() == 1 ? auth(matches.get(0)) : null;
|
||||
} catch (OrmException e) {
|
||||
getServletContext().log("cannot query database", e);
|
||||
return null;
|
||||
@ -227,13 +211,8 @@ class BecomeAnyAccountLoginServlet extends HttpServlet {
|
||||
} catch (NumberFormatException nfe) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
return auth(db.accounts().get(id));
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
try (ReviewDb db = schema.open()) {
|
||||
return auth(db.accounts().get(id));
|
||||
} catch (OrmException e) {
|
||||
getServletContext().log("cannot query database", e);
|
||||
return null;
|
||||
|
@ -111,11 +111,8 @@ class HttpAuthFilter implements Filter {
|
||||
rsp.setContentType("text/html");
|
||||
rsp.setCharacterEncoding(HtmlDomUtil.ENC.name());
|
||||
rsp.setContentLength(tosend.length);
|
||||
final OutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (OutputStream out = rsp.getOutputStream()) {
|
||||
out.write(tosend);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -104,12 +104,8 @@ class HttpLoginServlet extends HttpServlet {
|
||||
rsp.setContentType("text/html");
|
||||
rsp.setCharacterEncoding("UTF-8");
|
||||
rsp.setContentLength(bin.length);
|
||||
final ServletOutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (ServletOutputStream out = rsp.getOutputStream()) {
|
||||
out.write(bin);
|
||||
} finally {
|
||||
out.flush();
|
||||
out.close();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -92,11 +92,8 @@ class LdapLoginServlet extends HttpServlet {
|
||||
res.setContentType("text/html");
|
||||
res.setCharacterEncoding("UTF-8");
|
||||
res.setContentLength(bin.length);
|
||||
ServletOutputStream out = res.getOutputStream();
|
||||
try {
|
||||
try (ServletOutputStream out = res.getOutputStream()) {
|
||||
out.write(bin);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,11 +72,8 @@ class GitLogoServlet extends HttpServlet {
|
||||
rsp.setDateHeader("Last-Modified", modified);
|
||||
CacheHeaders.setCacheable(req, rsp, 5, TimeUnit.MINUTES);
|
||||
|
||||
final ServletOutputStream os = rsp.getOutputStream();
|
||||
try {
|
||||
try (ServletOutputStream os = rsp.getOutputStream()) {
|
||||
os.write(raw);
|
||||
} finally {
|
||||
os.close();
|
||||
}
|
||||
} else {
|
||||
CacheHeaders.setNotCacheable(rsp);
|
||||
|
@ -101,11 +101,8 @@ abstract class GitwebCssServlet extends HttpServlet {
|
||||
rsp.setDateHeader("Last-Modified", modified);
|
||||
CacheHeaders.setCacheable(req, rsp, 5, TimeUnit.MINUTES);
|
||||
|
||||
final ServletOutputStream os = rsp.getOutputStream();
|
||||
try {
|
||||
try (ServletOutputStream os = rsp.getOutputStream()) {
|
||||
os.write(toSend);
|
||||
} finally {
|
||||
os.close();
|
||||
}
|
||||
} else {
|
||||
CacheHeaders.setNotCacheable(rsp);
|
||||
|
@ -72,11 +72,8 @@ class GitwebJavaScriptServlet extends HttpServlet {
|
||||
rsp.setDateHeader("Last-Modified", modified);
|
||||
CacheHeaders.setCacheable(req, rsp, 5, TimeUnit.MINUTES);
|
||||
|
||||
final ServletOutputStream os = rsp.getOutputStream();
|
||||
try {
|
||||
try (ServletOutputStream os = rsp.getOutputStream()) {
|
||||
os.write(raw);
|
||||
} finally {
|
||||
os.close();
|
||||
}
|
||||
} else {
|
||||
CacheHeaders.setNotCacheable(rsp);
|
||||
|
@ -414,19 +414,14 @@ class GitwebServlet extends HttpServlet {
|
||||
return;
|
||||
}
|
||||
|
||||
final Repository repo;
|
||||
try {
|
||||
repo = repoManager.openRepository(nameKey);
|
||||
|
||||
try (@SuppressWarnings("UnusedDeclaration") // only open for existence-check
|
||||
Repository repo = repoManager.openRepository(nameKey)) {
|
||||
CacheHeaders.setNotCacheable(rsp);
|
||||
exec(req, rsp, project);
|
||||
} catch (RepositoryNotFoundException e) {
|
||||
getServletContext().log("Cannot open repository", e);
|
||||
rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
CacheHeaders.setNotCacheable(rsp);
|
||||
exec(req, rsp, project);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -476,25 +471,15 @@ class GitwebServlet extends HttpServlet {
|
||||
proc.getOutputStream().close();
|
||||
}
|
||||
|
||||
try {
|
||||
final InputStream in;
|
||||
try (InputStream in = new BufferedInputStream(proc.getInputStream(), bufferSize)) {
|
||||
readCgiHeaders(rsp, in);
|
||||
|
||||
in = new BufferedInputStream(proc.getInputStream(), bufferSize);
|
||||
try {
|
||||
readCgiHeaders(rsp, in);
|
||||
|
||||
final OutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
final byte[] buf = new byte[bufferSize];
|
||||
int n;
|
||||
while ((n = in.read(buf)) > 0) {
|
||||
out.write(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
out.close();
|
||||
try (OutputStream out = rsp.getOutputStream()) {
|
||||
final byte[] buf = new byte[bufferSize];
|
||||
int n;
|
||||
while ((n = in.read(buf)) > 0) {
|
||||
out.write(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// The browser has probably closed its input stream. We don't
|
||||
@ -651,16 +636,11 @@ class GitwebServlet extends HttpServlet {
|
||||
new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
final BufferedReader br =
|
||||
new BufferedReader(new InputStreamReader(in, "ISO-8859-1"));
|
||||
try {
|
||||
String line;
|
||||
while ((line = br.readLine()) != null) {
|
||||
log.error("CGI: " + line);
|
||||
}
|
||||
} finally {
|
||||
br.close();
|
||||
try (BufferedReader br =
|
||||
new BufferedReader(new InputStreamReader(in, "ISO-8859-1"))) {
|
||||
String line;
|
||||
while ((line = br.readLine()) != null) {
|
||||
log.error("CGI: " + line);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.debug("Unexpected error copying stderr from CGI", e);
|
||||
|
@ -411,18 +411,18 @@ class HttpPluginServlet extends HttpServlet
|
||||
|
||||
if (about != null) {
|
||||
InputStreamReader isr = new InputStreamReader(scanner.getInputStream(about));
|
||||
BufferedReader reader = new BufferedReader(isr);
|
||||
StringBuilder aboutContent = new StringBuilder();
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
line = line.trim();
|
||||
if (line.isEmpty()) {
|
||||
aboutContent.append("\n");
|
||||
} else {
|
||||
aboutContent.append(line).append("\n");
|
||||
try (BufferedReader reader = new BufferedReader(isr)) {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
line = line.trim();
|
||||
if (line.isEmpty()) {
|
||||
aboutContent.append("\n");
|
||||
} else {
|
||||
aboutContent.append(line).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
|
||||
// Only append the About section if there was anything in it
|
||||
if (aboutContent.toString().trim().length() > 0) {
|
||||
@ -641,11 +641,8 @@ class HttpPluginServlet extends HttpServlet
|
||||
private static byte[] readWholeEntry(PluginContentScanner scanner, PluginEntry entry)
|
||||
throws IOException {
|
||||
byte[] data = new byte[entry.getSize().get().intValue()];
|
||||
InputStream in = scanner.getInputStream(entry);
|
||||
try {
|
||||
try (InputStream in = scanner.getInputStream(entry)) {
|
||||
IO.readFully(in, data, 0, data.length);
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
@ -128,25 +128,20 @@ public class HostPageServlet extends HttpServlet {
|
||||
}
|
||||
|
||||
String src = "gerrit_ui/gerrit_ui.nocache.js";
|
||||
InputStream in = servletContext.getResourceAsStream("/" + src);
|
||||
if (in != null) {
|
||||
Hasher md = Hashing.md5().newHasher();
|
||||
try {
|
||||
try {
|
||||
final byte[] buf = new byte[1024];
|
||||
int n;
|
||||
while ((n = in.read(buf)) > 0) {
|
||||
md.putBytes(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
try (InputStream in = servletContext.getResourceAsStream("/" + src)) {
|
||||
if (in != null) {
|
||||
Hasher md = Hashing.md5().newHasher();
|
||||
final byte[] buf = new byte[1024];
|
||||
int n;
|
||||
while ((n = in.read(buf)) > 0) {
|
||||
md.putBytes(buf, 0, n);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new IOException("Failed reading " + src, e);
|
||||
src += "?content=" + md.hash().toString();
|
||||
} else {
|
||||
log.debug("No " + src + " in webapp root; keeping noncache.js URL");
|
||||
}
|
||||
src += "?content=" + md.hash().toString();
|
||||
} else {
|
||||
log.debug("No " + src + " in webapp root; keeping noncache.js URL");
|
||||
} catch (IOException e) {
|
||||
throw new IOException("Failed reading " + src, e);
|
||||
}
|
||||
|
||||
noCacheName = src;
|
||||
@ -224,11 +219,8 @@ public class HostPageServlet extends HttpServlet {
|
||||
rsp.setContentType("text/html");
|
||||
rsp.setCharacterEncoding(HtmlDomUtil.ENC.name());
|
||||
rsp.setContentLength(tosend.length);
|
||||
final OutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (OutputStream out = rsp.getOutputStream()) {
|
||||
out.write(tosend);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,11 +70,8 @@ public class LegacyGerritServlet extends HttpServlet {
|
||||
rsp.setContentType("text/html");
|
||||
rsp.setCharacterEncoding(HtmlDomUtil.ENC.name());
|
||||
rsp.setContentLength(tosend.length);
|
||||
final OutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (OutputStream out = rsp.getOutputStream()) {
|
||||
out.write(tosend);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -90,11 +90,8 @@ public class SshInfoServlet extends HttpServlet {
|
||||
CacheHeaders.setNotCacheable(rsp);
|
||||
rsp.setCharacterEncoding("UTF-8");
|
||||
rsp.setContentType("text/plain");
|
||||
final PrintWriter w = rsp.getWriter();
|
||||
try {
|
||||
try (PrintWriter w = rsp.getWriter()) {
|
||||
w.write(out);
|
||||
} finally {
|
||||
w.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -199,11 +199,8 @@ public class StaticServlet extends HttpServlet {
|
||||
rsp.setHeader(ETAG, r.etag);
|
||||
rsp.setContentType(r.contentType);
|
||||
rsp.setContentLength(tosend.length);
|
||||
final OutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (OutputStream out = rsp.getOutputStream()) {
|
||||
out.write(tosend);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,11 +82,8 @@ public class ToolServlet extends HttpServlet {
|
||||
rsp.setHeader(HDR_CACHE_CONTROL, "no-cache, must-revalidate");
|
||||
rsp.setContentType("application/octet-stream");
|
||||
rsp.setContentLength(tosend.length);
|
||||
final OutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (OutputStream out = rsp.getOutputStream()) {
|
||||
out.write(tosend);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,11 +145,8 @@ public class ToolServlet extends HttpServlet {
|
||||
rsp.setContentType("text/html");
|
||||
rsp.setCharacterEncoding("UTF-8");
|
||||
rsp.setContentLength(tosend.length);
|
||||
final OutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (OutputStream out = rsp.getOutputStream()) {
|
||||
out.write(tosend);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -522,8 +522,7 @@ public class RestApiServlet extends HttpServlet {
|
||||
IllegalArgumentException, NoSuchMethodException, IllegalAccessException,
|
||||
InstantiationException, InvocationTargetException, MethodNotAllowedException {
|
||||
if (isType(JSON_TYPE, req.getContentType())) {
|
||||
BufferedReader br = req.getReader();
|
||||
try {
|
||||
try (BufferedReader br = req.getReader()) {
|
||||
JsonReader json = new JsonReader(br);
|
||||
json.setLenient(true);
|
||||
|
||||
@ -537,8 +536,6 @@ public class RestApiServlet extends HttpServlet {
|
||||
return parseString(json.nextString(), type);
|
||||
}
|
||||
return OutputFormat.JSON.newGson().fromJson(json, type);
|
||||
} finally {
|
||||
br.close();
|
||||
}
|
||||
} else if (("PUT".equals(req.getMethod()) || "POST".equals(req.getMethod()))
|
||||
&& acceptsRawInput(type)) {
|
||||
@ -548,8 +545,7 @@ public class RestApiServlet extends HttpServlet {
|
||||
} else if (hasNoBody(req)) {
|
||||
return createInstance(type);
|
||||
} else if (isType("text/plain", req.getContentType())) {
|
||||
BufferedReader br = req.getReader();
|
||||
try {
|
||||
try (BufferedReader br = req.getReader()) {
|
||||
char[] tmp = new char[256];
|
||||
StringBuilder sb = new StringBuilder();
|
||||
int n;
|
||||
@ -557,8 +553,6 @@ public class RestApiServlet extends HttpServlet {
|
||||
sb.append(tmp, 0, n);
|
||||
}
|
||||
return parseString(sb.toString(), type);
|
||||
} finally {
|
||||
br.close();
|
||||
}
|
||||
} else if ("POST".equals(req.getMethod())
|
||||
&& isType(FORM_TYPE, req.getContentType())) {
|
||||
@ -772,11 +766,8 @@ public class RestApiServlet extends HttpServlet {
|
||||
}
|
||||
|
||||
if (req == null || !"HEAD".equals(req.getMethod())) {
|
||||
OutputStream dst = res.getOutputStream();
|
||||
try {
|
||||
try (OutputStream dst = res.getOutputStream()) {
|
||||
bin.writeTo(dst);
|
||||
} finally {
|
||||
dst.close();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
@ -1078,9 +1069,9 @@ public class RestApiServlet extends HttpServlet {
|
||||
private static BinaryResult compress(BinaryResult bin)
|
||||
throws IOException {
|
||||
TemporaryBuffer.Heap buf = heap(HEAP_EST_SIZE, 20 << 20);
|
||||
GZIPOutputStream gz = new GZIPOutputStream(buf);
|
||||
bin.writeTo(gz);
|
||||
gz.close();
|
||||
try (GZIPOutputStream gz = new GZIPOutputStream(buf)) {
|
||||
bin.writeTo(gz);
|
||||
}
|
||||
return asBinaryResult(buf).setContentType(bin.getContentType());
|
||||
}
|
||||
|
||||
|
@ -102,16 +102,11 @@ public final class GerritLauncher {
|
||||
return "";
|
||||
}
|
||||
|
||||
try {
|
||||
final JarFile jar = new JarFile(me);
|
||||
try {
|
||||
Manifest mf = jar.getManifest();
|
||||
Attributes att = mf.getMainAttributes();
|
||||
String val = att.getValue(Attributes.Name.IMPLEMENTATION_VERSION);
|
||||
return val != null ? val : "";
|
||||
} finally {
|
||||
jar.close();
|
||||
}
|
||||
try (JarFile jar = new JarFile(me)) {
|
||||
Manifest mf = jar.getManifest();
|
||||
Attributes att = mf.getMainAttributes();
|
||||
String val = att.getValue(Attributes.Name.IMPLEMENTATION_VERSION);
|
||||
return val != null ? val : "";
|
||||
} catch (IOException e) {
|
||||
return "";
|
||||
}
|
||||
@ -202,28 +197,23 @@ public final class GerritLauncher {
|
||||
}
|
||||
|
||||
final SortedMap<String, URL> jars = new TreeMap<>();
|
||||
try {
|
||||
final ZipFile zf = new ZipFile(path);
|
||||
try {
|
||||
final Enumeration<? extends ZipEntry> e = zf.entries();
|
||||
while (e.hasMoreElements()) {
|
||||
final ZipEntry ze = e.nextElement();
|
||||
if (ze.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
try (ZipFile zf = new ZipFile(path)) {
|
||||
final Enumeration<? extends ZipEntry> e = zf.entries();
|
||||
while (e.hasMoreElements()) {
|
||||
final ZipEntry ze = e.nextElement();
|
||||
if (ze.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
String name = ze.getName();
|
||||
if (name.startsWith("WEB-INF/lib/")) {
|
||||
String name = ze.getName();
|
||||
if (name.startsWith("WEB-INF/lib/")) {
|
||||
extractJar(zf, ze, jars);
|
||||
} else if (name.startsWith("WEB-INF/pgm-lib/")) {
|
||||
// Some Prolog tools are restricted.
|
||||
if (prologCompiler || !name.startsWith("WEB-INF/pgm-lib/prolog-")) {
|
||||
extractJar(zf, ze, jars);
|
||||
} else if (name.startsWith("WEB-INF/pgm-lib/")) {
|
||||
// Some Prolog tools are restricted.
|
||||
if (prologCompiler || !name.startsWith("WEB-INF/pgm-lib/prolog-")) {
|
||||
extractJar(zf, ze, jars);
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
zf.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new IOException("Cannot obtain libraries from " + path, e);
|
||||
@ -257,20 +247,13 @@ public final class GerritLauncher {
|
||||
private static void extractJar(ZipFile zf, ZipEntry ze,
|
||||
SortedMap<String, URL> jars) throws IOException {
|
||||
File tmp = createTempFile(safeName(ze), ".jar");
|
||||
FileOutputStream out = new FileOutputStream(tmp);
|
||||
try {
|
||||
InputStream in = zf.getInputStream(ze);
|
||||
try {
|
||||
byte[] buf = new byte[4096];
|
||||
int n;
|
||||
while ((n = in.read(buf, 0, buf.length)) > 0) {
|
||||
out.write(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
try (FileOutputStream out = new FileOutputStream(tmp);
|
||||
InputStream in = zf.getInputStream(ze)) {
|
||||
byte[] buf = new byte[4096];
|
||||
int n;
|
||||
while ((n = in.read(buf, 0, buf.length)) > 0) {
|
||||
out.write(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
|
||||
String name = ze.getName();
|
||||
@ -363,24 +346,16 @@ public final class GerritLauncher {
|
||||
final CodeSource src =
|
||||
GerritLauncher.class.getProtectionDomain().getCodeSource();
|
||||
if (src != null) {
|
||||
try {
|
||||
final InputStream in = src.getLocation().openStream();
|
||||
try {
|
||||
final File tmp = createTempFile("gerrit_", ".zip");
|
||||
final FileOutputStream out = new FileOutputStream(tmp);
|
||||
try {
|
||||
final byte[] buf = new byte[4096];
|
||||
int n;
|
||||
while ((n = in.read(buf, 0, buf.length)) > 0) {
|
||||
out.write(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
out.close();
|
||||
try (InputStream in = src.getLocation().openStream()) {
|
||||
final File tmp = createTempFile("gerrit_", ".zip");
|
||||
try (FileOutputStream out = new FileOutputStream(tmp)) {
|
||||
final byte[] buf = new byte[4096];
|
||||
int n;
|
||||
while ((n = in.read(buf, 0, buf.length)) > 0) {
|
||||
out.write(buf, 0, n);
|
||||
}
|
||||
return tmp;
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
return tmp;
|
||||
} catch (IOException e) {
|
||||
// Nope, that didn't work.
|
||||
//
|
||||
|
@ -319,11 +319,8 @@ class LoginForm extends HttpServlet {
|
||||
res.setContentType("text/html");
|
||||
res.setCharacterEncoding("UTF-8");
|
||||
res.setContentLength(bin.length);
|
||||
ServletOutputStream out = res.getOutputStream();
|
||||
try {
|
||||
try (ServletOutputStream out = res.getOutputStream()) {
|
||||
out.write(bin);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -63,11 +63,8 @@ class XrdsServlet extends HttpServlet {
|
||||
rsp.setContentType("application/xrds+xml");
|
||||
rsp.setCharacterEncoding(ENC);
|
||||
|
||||
final ServletOutputStream out = rsp.getOutputStream();
|
||||
try {
|
||||
try (ServletOutputStream out = rsp.getOutputStream()) {
|
||||
out.write(raw);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -40,13 +40,12 @@ public class Cat extends AbstractProgram {
|
||||
name = "WEB-INF/" + fileName;
|
||||
}
|
||||
|
||||
final InputStream in = open(name);
|
||||
if (in == null) {
|
||||
System.err.println("error: no such file " + fileName);
|
||||
return 1;
|
||||
}
|
||||
try (InputStream in = open(name)) {
|
||||
if (in == null) {
|
||||
System.err.println("error: no such file " + fileName);
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
try {
|
||||
final byte[] buf = new byte[4096];
|
||||
int n;
|
||||
@ -56,8 +55,6 @@ public class Cat extends AbstractProgram {
|
||||
} finally {
|
||||
System.out.flush();
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
@ -60,14 +60,11 @@ public class LocalUsernamesToLowerCase extends SiteProgram {
|
||||
manager.start();
|
||||
dbInjector.injectMembers(this);
|
||||
|
||||
final ReviewDb db = database.open();
|
||||
try {
|
||||
try (ReviewDb db = database.open()) {
|
||||
todo = db.accountExternalIds().all().toList();
|
||||
synchronized (monitor) {
|
||||
monitor.beginTask("Converting local username", todo.size());
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
|
||||
final List<Worker> workers = new ArrayList<>(threads);
|
||||
|
@ -26,8 +26,7 @@ import java.util.zip.ZipFile;
|
||||
public class Ls extends AbstractProgram {
|
||||
@Override
|
||||
public int run() throws IOException {
|
||||
final ZipFile zf = new ZipFile(GerritLauncher.getDistributionArchive());
|
||||
try {
|
||||
try (ZipFile zf = new ZipFile(GerritLauncher.getDistributionArchive())) {
|
||||
final Enumeration<? extends ZipEntry> e = zf.entries();
|
||||
while (e.hasMoreElements()) {
|
||||
final ZipEntry ze = e.nextElement();
|
||||
@ -48,8 +47,6 @@ public class Ls extends AbstractProgram {
|
||||
System.out.println(name);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
zf.close();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
@ -47,14 +47,11 @@ public class ProtoGen extends AbstractProgram {
|
||||
PrintWriter out = new PrintWriter(
|
||||
new BufferedWriter(new OutputStreamWriter(o, "UTF-8")))) {
|
||||
String header;
|
||||
InputStream in = getClass().getResourceAsStream("ProtoGenHeader.txt");
|
||||
try {
|
||||
try (InputStream in = getClass().getResourceAsStream("ProtoGenHeader.txt")) {
|
||||
ByteBuffer buf = IO.readWholeStream(in, 1024);
|
||||
int ptr = buf.arrayOffset() + buf.position();
|
||||
int len = buf.remaining();
|
||||
header = new String(buf.array(), ptr, len, "UTF-8");
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
|
||||
String version = com.google.gerrit.common.Version.getVersion();
|
||||
|
@ -92,11 +92,9 @@ public class ProtobufImport extends SiteProgram {
|
||||
});
|
||||
dbInjector.injectMembers(this);
|
||||
|
||||
ReviewDb db = schemaFactory.open();
|
||||
|
||||
ProgressMonitor progress = new TextProgressMonitor();
|
||||
progress.beginTask("Importing entities", ProgressMonitor.UNKNOWN);
|
||||
try {
|
||||
try (ReviewDb db = schemaFactory.open()) {
|
||||
for (RelationModel model
|
||||
: new JavaSchemaModel(ReviewDb.class).getRelations()) {
|
||||
relations.put(model.getRelationID(), Relation.create(model, db));
|
||||
@ -119,8 +117,6 @@ public class ProtobufImport extends SiteProgram {
|
||||
}
|
||||
}
|
||||
progress.endTask();
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
@ -113,13 +113,11 @@ public class RebuildNotedb extends SiteProgram {
|
||||
sysInjector.getInstance(GitRepositoryManager.class);
|
||||
final Project.NameKey allUsersName =
|
||||
sysInjector.getInstance(AllUsersName.class);
|
||||
final Repository allUsersRepo =
|
||||
repoManager.openMetadataRepository(allUsersName);
|
||||
try {
|
||||
try (Repository allUsersRepo =
|
||||
repoManager.openMetadataRepository(allUsersName)) {
|
||||
deleteDraftRefs(allUsersRepo);
|
||||
for (final Project.NameKey project : changesByProject.keySet()) {
|
||||
final Repository repo = repoManager.openMetadataRepository(project);
|
||||
try {
|
||||
try (Repository repo = repoManager.openMetadataRepository(project)) {
|
||||
final BatchRefUpdate bru = repo.getRefDatabase().newBatchUpdate();
|
||||
final BatchRefUpdate bruForDrafts =
|
||||
allUsersRepo.getRefDatabase().newBatchUpdate();
|
||||
@ -158,12 +156,8 @@ public class RebuildNotedb extends SiteProgram {
|
||||
log.error("Error rebuilding notedb", e);
|
||||
ok.set(false);
|
||||
break;
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
allUsersRepo.close();
|
||||
}
|
||||
|
||||
double t = sw.elapsed(TimeUnit.MILLISECONDS) / 1000d;
|
||||
@ -231,16 +225,13 @@ public class RebuildNotedb extends SiteProgram {
|
||||
// rebuilder threads to use the full connection pool.
|
||||
SchemaFactory<ReviewDb> schemaFactory = sysInjector.getInstance(Key.get(
|
||||
new TypeLiteral<SchemaFactory<ReviewDb>>() {}));
|
||||
ReviewDb db = schemaFactory.open();
|
||||
Multimap<Project.NameKey, Change> changesByProject =
|
||||
ArrayListMultimap.create();
|
||||
try {
|
||||
try (ReviewDb db = schemaFactory.open()) {
|
||||
for (Change c : db.changes().all()) {
|
||||
changesByProject.put(c.getProject(), c);
|
||||
}
|
||||
return changesByProject;
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,21 +149,18 @@ public class Reindex extends SiteProgram {
|
||||
}
|
||||
|
||||
private int indexAll() throws Exception {
|
||||
ReviewDb db = sysInjector.getInstance(ReviewDb.class);
|
||||
ProgressMonitor pm = new TextProgressMonitor();
|
||||
pm.start(1);
|
||||
pm.beginTask("Collecting projects", ProgressMonitor.UNKNOWN);
|
||||
Set<Project.NameKey> projects = Sets.newTreeSet();
|
||||
int changeCount = 0;
|
||||
try {
|
||||
try (ReviewDb db = sysInjector.getInstance(ReviewDb.class)) {
|
||||
for (Change change : db.changes().all()) {
|
||||
changeCount++;
|
||||
if (projects.add(change.getProject())) {
|
||||
pm.update(1);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
pm.endTask();
|
||||
|
||||
|
@ -81,8 +81,7 @@ public class Rulec extends SiteProgram {
|
||||
|
||||
boolean error = false;
|
||||
for (Project.NameKey project : names) {
|
||||
Repository git = gitManager.openRepository(project);
|
||||
try {
|
||||
try (Repository git = gitManager.openRepository(project)) {
|
||||
switch (jarFactory.create(git).call()) {
|
||||
case NO_RULES:
|
||||
if (!all || projectNames.contains(project.get())) {
|
||||
@ -105,8 +104,6 @@ public class Rulec extends SiteProgram {
|
||||
System.err.println("fatal: " + err.getMessage());
|
||||
}
|
||||
error = true;
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -56,11 +56,8 @@ class HiddenErrorHandler extends ErrorHandler {
|
||||
try {
|
||||
CacheHeaders.setNotCacheable(res);
|
||||
} finally {
|
||||
ServletOutputStream out = res.getOutputStream();
|
||||
try {
|
||||
try (ServletOutputStream out = res.getOutputStream()) {
|
||||
out.write(msg);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -520,8 +520,7 @@ public class JettyServer {
|
||||
}
|
||||
|
||||
private static void unpack(File srcwar, File dstwar) throws IOException {
|
||||
final ZipFile zf = new ZipFile(srcwar);
|
||||
try {
|
||||
try (ZipFile zf = new ZipFile(srcwar)) {
|
||||
final Enumeration<? extends ZipEntry> e = zf.entries();
|
||||
while (e.hasMoreElements()) {
|
||||
final ZipEntry ze = e.nextElement();
|
||||
@ -539,24 +538,15 @@ public class JettyServer {
|
||||
mkdir(rawtmp.getParentFile());
|
||||
rawtmp.deleteOnExit();
|
||||
|
||||
final FileOutputStream rawout = new FileOutputStream(rawtmp);
|
||||
try {
|
||||
final InputStream in = zf.getInputStream(ze);
|
||||
try {
|
||||
final byte[] buf = new byte[4096];
|
||||
int n;
|
||||
while ((n = in.read(buf, 0, buf.length)) > 0) {
|
||||
rawout.write(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
try (FileOutputStream rawout = new FileOutputStream(rawtmp);
|
||||
InputStream in = zf.getInputStream(ze)) {
|
||||
final byte[] buf = new byte[4096];
|
||||
int n;
|
||||
while ((n = in.read(buf, 0, buf.length)) > 0) {
|
||||
rawout.write(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
rawout.close();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
zf.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -632,14 +622,14 @@ public class JettyServer {
|
||||
CacheHeaders.setNotCacheable(res);
|
||||
|
||||
Escaper html = HtmlEscapers.htmlEscaper();
|
||||
PrintWriter w = res.getWriter();
|
||||
w.write("<html><title>BUILD FAILED</title><body>");
|
||||
w.format("<h1>%s FAILED</h1>", html.escape(rule));
|
||||
w.write("<pre>");
|
||||
w.write(html.escape(RawParseUtils.decode(why)));
|
||||
w.write("</pre>");
|
||||
w.write("</body></html>");
|
||||
w.close();
|
||||
try (PrintWriter w = res.getWriter()) {
|
||||
w.write("<html><title>BUILD FAILED</title><body>");
|
||||
w.format("<h1>%s FAILED</h1>", html.escape(rule));
|
||||
w.write("<pre>");
|
||||
w.write(html.escape(RawParseUtils.decode(why)));
|
||||
w.write("</pre>");
|
||||
w.write("</body></html>");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -667,12 +657,10 @@ public class JettyServer {
|
||||
long start = TimeUtil.nowMs();
|
||||
Process rebuild = proc.start();
|
||||
byte[] out;
|
||||
InputStream in = rebuild.getInputStream();
|
||||
try {
|
||||
try (InputStream in = rebuild.getInputStream()) {
|
||||
out = ByteStreams.toByteArray(in);
|
||||
} finally {
|
||||
rebuild.getOutputStream().close();
|
||||
in.close();
|
||||
}
|
||||
|
||||
int status;
|
||||
@ -692,12 +680,9 @@ public class JettyServer {
|
||||
private static Properties loadBuckProperties(File gen)
|
||||
throws FileNotFoundException, IOException {
|
||||
Properties properties = new Properties();
|
||||
InputStream in = new FileInputStream(
|
||||
new File(new File(gen, "tools"), "buck.properties"));
|
||||
try {
|
||||
try (InputStream in = new FileInputStream(
|
||||
new File(new File(gen, "tools"), "buck.properties"))) {
|
||||
properties.load(in);
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
return properties;
|
||||
}
|
||||
|
@ -68,8 +68,7 @@ public class InitAdminUser implements InitStep {
|
||||
return;
|
||||
}
|
||||
|
||||
ReviewDb db = dbFactory.open();
|
||||
try {
|
||||
try (ReviewDb db = dbFactory.open()) {
|
||||
if (db.accounts().anyAccounts().toList().isEmpty()) {
|
||||
ui.header("Gerrit Administrator");
|
||||
if (ui.yesno(true, "Create administrator user")) {
|
||||
@ -111,8 +110,6 @@ public class InitAdminUser implements InitStep {
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -171,13 +171,10 @@ public class InitPlugins implements InitStep {
|
||||
}
|
||||
|
||||
private static String getVersion(Path plugin) throws IOException {
|
||||
JarFile jarFile = new JarFile(plugin.toFile());
|
||||
try {
|
||||
try (JarFile jarFile = new JarFile(plugin.toFile())) {
|
||||
Manifest manifest = jarFile.getManifest();
|
||||
Attributes main = manifest.getMainAttributes();
|
||||
return main.getValue(Attributes.Name.IMPLEMENTATION_VERSION);
|
||||
} finally {
|
||||
jarFile.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -79,11 +79,8 @@ public class AllProjectsConfig extends VersionedMetaData {
|
||||
public AllProjectsConfig load() throws IOException, ConfigInvalidException {
|
||||
File path = getPath();
|
||||
if (path != null) {
|
||||
Repository repo = new FileRepository(path);
|
||||
try {
|
||||
try (Repository repo = new FileRepository(path)) {
|
||||
load(repo);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
return this;
|
||||
|
@ -139,11 +139,8 @@ public class PrologCompiler implements Callable<PrologCompiler.Status> {
|
||||
// Any leak of tmp caused by this method failing will be cleaned
|
||||
// up by our caller when tempDir is recursively deleted.
|
||||
File tmp = File.createTempFile("rules", ".pl", tempDir);
|
||||
FileOutputStream out = new FileOutputStream(tmp);
|
||||
try {
|
||||
try (FileOutputStream out = new FileOutputStream(tmp)) {
|
||||
git.open(blobId).copyTo(out);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
return tmp;
|
||||
}
|
||||
@ -157,9 +154,8 @@ public class PrologCompiler implements Callable<PrologCompiler.Status> {
|
||||
|
||||
DiagnosticCollector<JavaFileObject> diagnostics =
|
||||
new DiagnosticCollector<>();
|
||||
StandardJavaFileManager fileManager =
|
||||
compiler.getStandardFileManager(diagnostics, null, null);
|
||||
try {
|
||||
try (StandardJavaFileManager fileManager =
|
||||
compiler.getStandardFileManager(diagnostics, null, null)) {
|
||||
Iterable<? extends JavaFileObject> compilationUnits = fileManager
|
||||
.getJavaFileObjectsFromFiles(getAllFiles(tempDir, ".java"));
|
||||
ArrayList<String> options = new ArrayList<>();
|
||||
@ -195,8 +191,6 @@ public class PrologCompiler implements Callable<PrologCompiler.Status> {
|
||||
}
|
||||
throw new CompileException(msg.toString());
|
||||
}
|
||||
} finally {
|
||||
fileManager.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -247,8 +241,7 @@ public class PrologCompiler implements Callable<PrologCompiler.Status> {
|
||||
jarAdd.setTime(now);
|
||||
out.putNextEntry(jarAdd);
|
||||
if (f.isFile()) {
|
||||
FileInputStream in = new FileInputStream(f);
|
||||
try {
|
||||
try (FileInputStream in = new FileInputStream(f)) {
|
||||
while (true) {
|
||||
int nRead = in.read(buffer, 0, buffer.length);
|
||||
if (nRead <= 0) {
|
||||
@ -256,8 +249,6 @@ public class PrologCompiler implements Callable<PrologCompiler.Status> {
|
||||
}
|
||||
out.write(buffer, 0, nRead);
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
}
|
||||
out.closeEntry();
|
||||
|
@ -943,15 +943,10 @@ public class ChangeHookRunner implements ChangeHooks, EventDispatcher,
|
||||
|
||||
ps = pb.start();
|
||||
ps.getOutputStream().close();
|
||||
InputStream is = ps.getInputStream();
|
||||
String output = null;
|
||||
try {
|
||||
try (InputStream is = ps.getInputStream()) {
|
||||
output = readOutput(is);
|
||||
} finally {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException closeErr) {
|
||||
}
|
||||
ps.waitFor();
|
||||
result = new HookResult(ps.exitValue(), output);
|
||||
}
|
||||
|
@ -265,27 +265,19 @@ public class RulesCache {
|
||||
|
||||
private String read(Project.NameKey project, ObjectId rulesId)
|
||||
throws CompileException {
|
||||
Repository git;
|
||||
try {
|
||||
git = gitMgr.openRepository(project);
|
||||
} catch (RepositoryNotFoundException e) {
|
||||
throw new CompileException("Cannot open repository " + project, e);
|
||||
try (Repository git = gitMgr.openRepository(project)) {
|
||||
try {
|
||||
ObjectLoader ldr = git.open(rulesId, Constants.OBJ_BLOB);
|
||||
byte[] raw = ldr.getCachedBytes(SRC_LIMIT);
|
||||
return RawParseUtils.decode(raw);
|
||||
} catch (LargeObjectException e) {
|
||||
throw new CompileException("rules of " + project + " are too large", e);
|
||||
} catch (RuntimeException | IOException e) {
|
||||
throw new CompileException("Cannot load rules of " + project, e);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new CompileException("Cannot open repository " + project, e);
|
||||
}
|
||||
try {
|
||||
ObjectLoader ldr = git.open(rulesId, Constants.OBJ_BLOB);
|
||||
byte[] raw = ldr.getCachedBytes(SRC_LIMIT);
|
||||
return RawParseUtils.decode(raw);
|
||||
} catch (LargeObjectException e) {
|
||||
throw new CompileException("rules of " + project + " are too large", e);
|
||||
} catch (RuntimeException e) {
|
||||
throw new CompileException("Cannot load rules of " + project, e);
|
||||
} catch (IOException e) {
|
||||
throw new CompileException("Cannot load rules of " + project, e);
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
|
||||
private BufferingPrologControl newEmptyMachine(ClassLoader cl) {
|
||||
|
@ -106,9 +106,8 @@ public class ApprovalCopier {
|
||||
TreeMap<Integer, PatchSet> patchSets = getPatchSets(cd);
|
||||
NavigableSet<Integer> allPsIds = patchSets.navigableKeySet();
|
||||
|
||||
Repository repo =
|
||||
repoManager.openRepository(project.getProject().getNameKey());
|
||||
try {
|
||||
try (Repository repo =
|
||||
repoManager.openRepository(project.getProject().getNameKey())) {
|
||||
// Walk patch sets strictly less than current in descending order.
|
||||
Collection<PatchSet> allPrior = patchSets.descendingMap()
|
||||
.tailMap(ps.getId().get(), false)
|
||||
@ -132,8 +131,6 @@ public class ApprovalCopier {
|
||||
}
|
||||
}
|
||||
return labelNormalizer.normalize(ctl, byUser.values()).getNormalized();
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new OrmException(e);
|
||||
|
@ -382,8 +382,7 @@ public class ChangeUtil {
|
||||
throw new NoSuchChangeException(patchSetId.getParentKey());
|
||||
}
|
||||
|
||||
Repository repo = gitManager.openRepository(change.getProject());
|
||||
try {
|
||||
try (Repository repo = gitManager.openRepository(change.getProject())) {
|
||||
RefUpdate update = repo.updateRef(patch.getRefName());
|
||||
update.setForceUpdate(true);
|
||||
update.disableRefLog();
|
||||
@ -399,8 +398,6 @@ public class ChangeUtil {
|
||||
" in " + repo.getDirectory() + ": " + update.getResult());
|
||||
}
|
||||
gitRefUpdated.fire(change.getProject(), update, ReceiveCommand.Type.DELETE);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
|
||||
ReviewDb db = this.db.get();
|
||||
|
@ -364,19 +364,11 @@ public class PatchLineCommentsUtil {
|
||||
}
|
||||
|
||||
private Set<String> getRefNamesAllUsers(String prefix) throws OrmException {
|
||||
Repository repo;
|
||||
try {
|
||||
repo = repoManager.openRepository(allUsers);
|
||||
} catch (IOException e) {
|
||||
throw new OrmException(e);
|
||||
}
|
||||
try {
|
||||
try (Repository repo = repoManager.openRepository(allUsers)) {
|
||||
RefDatabase refDb = repo.getRefDatabase();
|
||||
return refDb.getRefs(prefix).keySet();
|
||||
} catch (IOException e) {
|
||||
throw new OrmException(e);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -39,15 +39,12 @@ public class ProjectUtil {
|
||||
public static boolean branchExists(final GitRepositoryManager repoManager,
|
||||
final Branch.NameKey branch) throws RepositoryNotFoundException,
|
||||
IOException {
|
||||
final Repository repo = repoManager.openRepository(branch.getParentKey());
|
||||
try {
|
||||
try (Repository repo = repoManager.openRepository(branch.getParentKey())) {
|
||||
boolean exists = repo.getRefDatabase().exactRef(branch.get()) != null;
|
||||
if (!exists) {
|
||||
exists = repo.getFullBranch().equals(branch.get());
|
||||
}
|
||||
return exists;
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -92,8 +92,7 @@ public class AccountByEmailCacheImpl implements AccountByEmailCache {
|
||||
|
||||
@Override
|
||||
public Set<Account.Id> load(String email) throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
Set<Account.Id> r = Sets.newHashSet();
|
||||
for (Account a : db.accounts().byPreferredEmail(email)) {
|
||||
r.add(a.getId());
|
||||
@ -103,8 +102,6 @@ public class AccountByEmailCacheImpl implements AccountByEmailCache {
|
||||
r.add(a.getAccountId());
|
||||
}
|
||||
return ImmutableSet.copyOf(r);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,16 +142,13 @@ public class AccountCacheImpl implements AccountCache {
|
||||
|
||||
@Override
|
||||
public AccountState load(Account.Id key) throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
final AccountState state = load(db, key);
|
||||
String user = state.getUserName();
|
||||
if (user != null) {
|
||||
byName.put(user, Optional.of(state.getAccount().getId()));
|
||||
}
|
||||
return state;
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -192,8 +189,7 @@ public class AccountCacheImpl implements AccountCache {
|
||||
|
||||
@Override
|
||||
public Optional<Account.Id> load(String username) throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
final AccountExternalId.Key key = new AccountExternalId.Key( //
|
||||
AccountExternalId.SCHEME_USERNAME, //
|
||||
username);
|
||||
@ -202,8 +198,6 @@ public class AccountCacheImpl implements AccountCache {
|
||||
return Optional.of(id.getAccountId());
|
||||
}
|
||||
return Optional.absent();
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -83,13 +83,10 @@ public class AccountManager {
|
||||
*/
|
||||
public Account.Id lookup(String externalId) throws AccountException {
|
||||
try {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
AccountExternalId ext =
|
||||
db.accountExternalIds().get(new AccountExternalId.Key(externalId));
|
||||
return ext != null ? ext.getAccountId() : null;
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
} catch (OrmException e) {
|
||||
throw new AccountException("Cannot lookup account " + externalId, e);
|
||||
@ -107,8 +104,7 @@ public class AccountManager {
|
||||
public AuthResult authenticate(AuthRequest who) throws AccountException {
|
||||
who = realm.authenticate(who);
|
||||
try {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
AccountExternalId.Key key = id(who);
|
||||
AccountExternalId id = db.accountExternalIds().get(key);
|
||||
if (id == null) {
|
||||
@ -128,8 +124,6 @@ public class AccountManager {
|
||||
return new AuthResult(id.getAccountId(), key, false);
|
||||
}
|
||||
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
} catch (OrmException e) {
|
||||
throw new AccountException("Authentication error", e);
|
||||
@ -324,8 +318,7 @@ public class AccountManager {
|
||||
*/
|
||||
public AuthResult link(Account.Id to, AuthRequest who)
|
||||
throws AccountException, OrmException {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
who = realm.link(db, to, who);
|
||||
|
||||
AccountExternalId.Key key = id(who);
|
||||
@ -357,8 +350,6 @@ public class AccountManager {
|
||||
|
||||
return new AuthResult(to, key, false);
|
||||
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -377,8 +368,7 @@ public class AccountManager {
|
||||
*/
|
||||
public AuthResult updateLink(Account.Id to, AuthRequest who) throws OrmException,
|
||||
AccountException {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
AccountExternalId.Key key = id(who);
|
||||
List<AccountExternalId.Key> filteredKeysByScheme =
|
||||
filterKeysByScheme(key.getScheme(), db.accountExternalIds()
|
||||
@ -390,8 +380,6 @@ public class AccountManager {
|
||||
}
|
||||
byIdCache.evict(to);
|
||||
return link(to, who);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -417,8 +405,7 @@ public class AccountManager {
|
||||
*/
|
||||
public AuthResult unlink(Account.Id from, AuthRequest who)
|
||||
throws AccountException, OrmException {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
who = realm.unlink(db, from, who);
|
||||
|
||||
AccountExternalId.Key key = id(who);
|
||||
@ -446,8 +433,6 @@ public class AccountManager {
|
||||
|
||||
return new AuthResult(from, key, false);
|
||||
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,14 +86,11 @@ public class GetPreferences implements RestReadView<AccountResource> {
|
||||
throw new ResourceNotFoundException();
|
||||
}
|
||||
|
||||
Repository git = gitMgr.openRepository(allUsersName);
|
||||
try {
|
||||
try (Repository git = gitMgr.openRepository(allUsersName)) {
|
||||
VersionedAccountPreferences p =
|
||||
VersionedAccountPreferences.forUser(rsrc.getUser().getAccountId());
|
||||
p.load(git);
|
||||
return new PreferenceInfo(a.getGeneralPreferences(), p, git);
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,13 +152,8 @@ public class GroupCacheImpl implements GroupCache {
|
||||
|
||||
@Override
|
||||
public Iterable<AccountGroup> all() {
|
||||
try {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
return Collections.unmodifiableList(db.accountGroups().all().toList());
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
try (ReviewDb db = schema.open()) {
|
||||
return Collections.unmodifiableList(db.accountGroups().all().toList());
|
||||
} catch (OrmException e) {
|
||||
log.warn("Cannot list internal groups", e);
|
||||
return Collections.emptyList();
|
||||
@ -187,11 +182,8 @@ public class GroupCacheImpl implements GroupCache {
|
||||
@Override
|
||||
public Optional<AccountGroup> load(final AccountGroup.Id key)
|
||||
throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
return Optional.fromNullable(db.accountGroups().get(key));
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -207,16 +199,13 @@ public class GroupCacheImpl implements GroupCache {
|
||||
@Override
|
||||
public Optional<AccountGroup> load(String name)
|
||||
throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
AccountGroup.NameKey key = new AccountGroup.NameKey(name);
|
||||
AccountGroupName r = db.accountGroupNames().get(key);
|
||||
if (r != null) {
|
||||
return Optional.fromNullable(db.accountGroups().get(r.getId()));
|
||||
}
|
||||
return Optional.absent();
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -232,8 +221,7 @@ public class GroupCacheImpl implements GroupCache {
|
||||
@Override
|
||||
public Optional<AccountGroup> load(String uuid)
|
||||
throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
List<AccountGroup> r;
|
||||
|
||||
r = db.accountGroups().byUUID(new AccountGroup.UUID(uuid)).toList();
|
||||
@ -244,8 +232,6 @@ public class GroupCacheImpl implements GroupCache {
|
||||
} else {
|
||||
throw new OrmDuplicateKeyException("Duplicate group UUID " + uuid);
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -144,8 +144,7 @@ public class GroupIncludeCacheImpl implements GroupIncludeCache {
|
||||
|
||||
@Override
|
||||
public Set<AccountGroup.UUID> load(AccountGroup.UUID key) throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
List<AccountGroup> group = db.accountGroups().byUUID(key).toList();
|
||||
if (group.size() != 1) {
|
||||
return Collections.emptySet();
|
||||
@ -157,8 +156,6 @@ public class GroupIncludeCacheImpl implements GroupIncludeCache {
|
||||
ids.add(agi.getIncludeUUID());
|
||||
}
|
||||
return ImmutableSet.copyOf(ids);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -174,8 +171,7 @@ public class GroupIncludeCacheImpl implements GroupIncludeCache {
|
||||
|
||||
@Override
|
||||
public Set<AccountGroup.UUID> load(AccountGroup.UUID key) throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
Set<AccountGroup.Id> ids = Sets.newHashSet();
|
||||
for (AccountGroupById agi : db.accountGroupById()
|
||||
.byIncludeUUID(key)) {
|
||||
@ -187,8 +183,6 @@ public class GroupIncludeCacheImpl implements GroupIncludeCache {
|
||||
groupArray.add(g.getGroupUUID());
|
||||
}
|
||||
return ImmutableSet.copyOf(groupArray);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -204,8 +198,7 @@ public class GroupIncludeCacheImpl implements GroupIncludeCache {
|
||||
|
||||
@Override
|
||||
public Set<AccountGroup.UUID> load(String key) throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
Set<AccountGroup.UUID> ids = Sets.newHashSet();
|
||||
for (AccountGroupById agi : db.accountGroupById().all()) {
|
||||
if (!AccountGroup.isInternalGroup(agi.getIncludeUUID())) {
|
||||
@ -213,8 +206,6 @@ public class GroupIncludeCacheImpl implements GroupIncludeCache {
|
||||
}
|
||||
}
|
||||
return ImmutableSet.copyOf(ids);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -307,8 +307,7 @@ public class LdapRealm extends AbstractRealm {
|
||||
|
||||
@Override
|
||||
public Optional<Account.Id> load(String username) throws Exception {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
final AccountExternalId extId =
|
||||
db.accountExternalIds().get(
|
||||
new AccountExternalId.Key(SCHEME_GERRIT, username));
|
||||
@ -316,8 +315,6 @@ public class LdapRealm extends AbstractRealm {
|
||||
return Optional.of(extId.getAccountId());
|
||||
}
|
||||
return Optional.absent();
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -109,8 +109,7 @@ public class Mergeable implements RestReadView<RevisionResource> {
|
||||
}
|
||||
result.submitType = rec.type;
|
||||
|
||||
Repository git = gitManager.openRepository(change.getProject());
|
||||
try {
|
||||
try (Repository git = gitManager.openRepository(change.getProject())) {
|
||||
ObjectId commit = toId(ps);
|
||||
if (commit == null) {
|
||||
result.mergeable = false;
|
||||
@ -150,8 +149,6 @@ public class Mergeable implements RestReadView<RevisionResource> {
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -356,25 +356,19 @@ public class RebaseChange {
|
||||
|
||||
public boolean canRebase(Project.NameKey project, PatchSet.Id patchSetId,
|
||||
Branch.NameKey branch) {
|
||||
Repository git;
|
||||
try {
|
||||
git = gitManager.openRepository(project);
|
||||
} catch (RepositoryNotFoundException err) {
|
||||
return false;
|
||||
try (Repository git = gitManager.openRepository(project)) {
|
||||
try (RevWalk rw = new RevWalk(git)) {
|
||||
findBaseRevision(patchSetId, db.get(), branch, git, rw);
|
||||
return true;
|
||||
} catch (InvalidChangeOperationException e) {
|
||||
return false;
|
||||
} catch (OrmException | IOException e) {
|
||||
log.warn("Error checking if patch set " + patchSetId + " on " + branch
|
||||
+ " can be rebased", e);
|
||||
return false;
|
||||
}
|
||||
} catch (IOException err) {
|
||||
return false;
|
||||
}
|
||||
try (RevWalk rw = new RevWalk(git)) {
|
||||
findBaseRevision(patchSetId, db.get(), branch, git, rw);
|
||||
return true;
|
||||
} catch (InvalidChangeOperationException e) {
|
||||
return false;
|
||||
} catch (OrmException | IOException e) {
|
||||
log.warn("Error checking if patch set " + patchSetId + " on " + branch
|
||||
+ " can be rebased", e);
|
||||
return false;
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -41,14 +41,11 @@ public class GetPreferences implements RestReadView<ConfigResource> {
|
||||
@Override
|
||||
public PreferenceInfo apply(ConfigResource rsrc)
|
||||
throws IOException, ConfigInvalidException {
|
||||
Repository git = gitMgr.openRepository(allUsersName);
|
||||
try {
|
||||
try (Repository git = gitMgr.openRepository(allUsersName)) {
|
||||
VersionedAccountPreferences p =
|
||||
VersionedAccountPreferences.forDefault();
|
||||
p.load(git);
|
||||
return new PreferenceInfo(null, p, git);
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -176,11 +176,10 @@ class EncryptedContactStore implements ContactStore {
|
||||
final byte[] zText = compress(name, date, rawText);
|
||||
|
||||
final ByteArrayOutputStream buf = new ByteArrayOutputStream();
|
||||
final ArmoredOutputStream aout = new ArmoredOutputStream(buf);
|
||||
final OutputStream cout = cpk().open(aout, zText.length);
|
||||
cout.write(zText);
|
||||
cout.close();
|
||||
aout.close();
|
||||
try (ArmoredOutputStream aout = new ArmoredOutputStream(buf);
|
||||
OutputStream cout = cpk().open(aout, zText.length)) {
|
||||
cout.write(zText);
|
||||
}
|
||||
|
||||
return buf.toByteArray();
|
||||
}
|
||||
@ -195,12 +194,13 @@ class EncryptedContactStore implements ContactStore {
|
||||
}
|
||||
|
||||
comdg = new PGPCompressedDataGenerator(PGPCompressedData.ZIP);
|
||||
final OutputStream out =
|
||||
try (OutputStream out =
|
||||
new PGPLiteralDataGenerator().open(comdg.open(buf),
|
||||
PGPLiteralData.BINARY, fileName, len, fileDate);
|
||||
out.write(plainText);
|
||||
out.close();
|
||||
comdg.close();
|
||||
PGPLiteralData.BINARY, fileName, len, fileDate)) {
|
||||
out.write(plainText);
|
||||
} finally {
|
||||
comdg.close(); // PGPCompressedDataGenerator doesn't implement Closable
|
||||
}
|
||||
return buf.toByteArray();
|
||||
}
|
||||
|
||||
@ -220,30 +220,25 @@ class EncryptedContactStore implements ContactStore {
|
||||
field(b, "Full-Name", account.getFullName());
|
||||
field(b, "Preferred-Email", account.getPreferredEmail());
|
||||
|
||||
try {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
for (final AccountExternalId e : db.accountExternalIds().byAccount(
|
||||
account.getId())) {
|
||||
final StringBuilder oistr = new StringBuilder();
|
||||
if (e.getEmailAddress() != null && e.getEmailAddress().length() > 0) {
|
||||
if (oistr.length() > 0) {
|
||||
oistr.append(' ');
|
||||
}
|
||||
oistr.append(e.getEmailAddress());
|
||||
try (ReviewDb db = schema.open()) {
|
||||
for (final AccountExternalId e : db.accountExternalIds().byAccount(
|
||||
account.getId())) {
|
||||
final StringBuilder oistr = new StringBuilder();
|
||||
if (e.getEmailAddress() != null && e.getEmailAddress().length() > 0) {
|
||||
if (oistr.length() > 0) {
|
||||
oistr.append(' ');
|
||||
}
|
||||
if (e.isScheme(AccountExternalId.SCHEME_MAILTO)) {
|
||||
if (oistr.length() > 0) {
|
||||
oistr.append(' ');
|
||||
}
|
||||
oistr.append('<');
|
||||
oistr.append(e.getExternalId());
|
||||
oistr.append('>');
|
||||
}
|
||||
field(b, "Identity", oistr.toString());
|
||||
oistr.append(e.getEmailAddress());
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
if (e.isScheme(AccountExternalId.SCHEME_MAILTO)) {
|
||||
if (oistr.length() > 0) {
|
||||
oistr.append(' ');
|
||||
}
|
||||
oistr.append('<');
|
||||
oistr.append(e.getExternalId());
|
||||
oistr.append('>');
|
||||
}
|
||||
field(b, "Identity", oistr.toString());
|
||||
}
|
||||
} catch (OrmException e) {
|
||||
throw new ContactInformationStoreException(e);
|
||||
|
@ -55,11 +55,8 @@ public class HttpContactStoreConnection implements ContactStoreConnection {
|
||||
throw new IOException("Connection failed: " + conn.getResponseCode());
|
||||
}
|
||||
final byte[] dst = new byte[2];
|
||||
final InputStream in = conn.getInputStream();
|
||||
try {
|
||||
try (InputStream in = conn.getInputStream()) {
|
||||
IO.readFully(in, dst, 0, 2);
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
if (dst[0] != 'O' || dst[1] != 'K') {
|
||||
throw new IOException("Store failed: " + dst[0] + dst[1]);
|
||||
|
@ -156,17 +156,10 @@ public class MarkdownFormatter {
|
||||
throw new FileNotFoundException("Resource " + name);
|
||||
}
|
||||
file.set("file".equals(url.getProtocol()));
|
||||
InputStream in = url.openStream();
|
||||
try {
|
||||
TemporaryBuffer.Heap tmp = new TemporaryBuffer.Heap(128 * 1024);
|
||||
try {
|
||||
tmp.copy(in);
|
||||
return new String(tmp.toByteArray(), "UTF-8");
|
||||
} finally {
|
||||
tmp.close();
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
try (InputStream in = url.openStream();
|
||||
TemporaryBuffer.Heap tmp = new TemporaryBuffer.Heap(128 * 1024)) {
|
||||
tmp.copy(in);
|
||||
return new String(tmp.toByteArray(), "UTF-8");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -110,19 +110,16 @@ public class QueryDocumentationExecutor {
|
||||
return null;
|
||||
}
|
||||
|
||||
ZipInputStream zip = new ZipInputStream(index);
|
||||
try {
|
||||
try (ZipInputStream zip = new ZipInputStream(index)) {
|
||||
ZipEntry entry;
|
||||
while ((entry = zip.getNextEntry()) != null) {
|
||||
IndexOutput out = dir.createOutput(entry.getName(), null);
|
||||
int count;
|
||||
while ((count = zip.read(buffer)) != -1) {
|
||||
out.writeBytes(buffer, count);
|
||||
try (IndexOutput out = dir.createOutput(entry.getName(), null)) {
|
||||
int count;
|
||||
while ((count = zip.read(buffer)) != -1) {
|
||||
out.writeBytes(buffer, count);
|
||||
}
|
||||
}
|
||||
out.close();
|
||||
}
|
||||
} finally {
|
||||
zip.close();
|
||||
}
|
||||
// We must NOT call dir.close() here, as DirectoryReader.open() expects an opened directory.
|
||||
return dir;
|
||||
|
@ -180,11 +180,8 @@ public class ChangeEditUtil {
|
||||
public void delete(ChangeEdit edit)
|
||||
throws IOException {
|
||||
Change change = edit.getChange();
|
||||
Repository repo = gitManager.openRepository(change.getProject());
|
||||
try {
|
||||
try (Repository repo = gitManager.openRepository(change.getProject())) {
|
||||
deleteRef(repo, edit);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
indexer.index(db.get(), change);
|
||||
}
|
||||
|
@ -229,39 +229,34 @@ public class EventFactory {
|
||||
public void addDependencies(ChangeAttribute ca, Change change) {
|
||||
ca.dependsOn = new ArrayList<>();
|
||||
ca.neededBy = new ArrayList<>();
|
||||
try {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
final PatchSet.Id psId = change.currentPatchSetId();
|
||||
for (PatchSetAncestor a : db.patchSetAncestors().ancestorsOf(psId)) {
|
||||
for (PatchSet p :
|
||||
db.patchSets().byRevision(a.getAncestorRevision())) {
|
||||
Change c = db.changes().get(p.getId().getParentKey());
|
||||
ca.dependsOn.add(newDependsOn(c, p));
|
||||
}
|
||||
try (ReviewDb db = schema.open()) {
|
||||
final PatchSet.Id psId = change.currentPatchSetId();
|
||||
for (PatchSetAncestor a : db.patchSetAncestors().ancestorsOf(psId)) {
|
||||
for (PatchSet p :
|
||||
db.patchSets().byRevision(a.getAncestorRevision())) {
|
||||
Change c = db.changes().get(p.getId().getParentKey());
|
||||
ca.dependsOn.add(newDependsOn(c, p));
|
||||
}
|
||||
}
|
||||
|
||||
final PatchSet ps = db.patchSets().get(psId);
|
||||
if (ps == null) {
|
||||
log.error("Error while generating the list of descendants for"
|
||||
+ " PatchSet " + psId + ": Cannot find PatchSet entry in"
|
||||
+ " database.");
|
||||
} else {
|
||||
final RevId revId = ps.getRevision();
|
||||
for (PatchSetAncestor a : db.patchSetAncestors().descendantsOf(revId)) {
|
||||
final PatchSet p = db.patchSets().get(a.getPatchSet());
|
||||
if (p == null) {
|
||||
log.error("Error while generating the list of descendants for"
|
||||
+ " revision " + revId.get() + ": Cannot find PatchSet entry in"
|
||||
+ " database for " + a.getPatchSet());
|
||||
continue;
|
||||
}
|
||||
final Change c = db.changes().get(p.getId().getParentKey());
|
||||
ca.neededBy.add(newNeededBy(c, p));
|
||||
final PatchSet ps = db.patchSets().get(psId);
|
||||
if (ps == null) {
|
||||
log.error("Error while generating the list of descendants for"
|
||||
+ " PatchSet " + psId + ": Cannot find PatchSet entry in"
|
||||
+ " database.");
|
||||
} else {
|
||||
final RevId revId = ps.getRevision();
|
||||
for (PatchSetAncestor a : db.patchSetAncestors().descendantsOf(revId)) {
|
||||
final PatchSet p = db.patchSets().get(a.getPatchSet());
|
||||
if (p == null) {
|
||||
log.error("Error while generating the list of descendants for"
|
||||
+ " revision " + revId.get() + ": Cannot find PatchSet entry in"
|
||||
+ " database for " + a.getPatchSet());
|
||||
continue;
|
||||
}
|
||||
final Change c = db.changes().get(p.getId().getParentKey());
|
||||
ca.neededBy.add(newNeededBy(c, p));
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
} catch (OrmException e) {
|
||||
// Squash DB exceptions and leave dependency lists partially filled.
|
||||
@ -401,38 +396,33 @@ public class EventFactory {
|
||||
p.createdOn = patchSet.getCreatedOn().getTime() / 1000L;
|
||||
p.isDraft = patchSet.isDraft();
|
||||
final PatchSet.Id pId = patchSet.getId();
|
||||
try {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
p.parents = new ArrayList<>();
|
||||
for (PatchSetAncestor a : db.patchSetAncestors().ancestorsOf(
|
||||
patchSet.getId())) {
|
||||
p.parents.add(a.getAncestorRevision().get());
|
||||
}
|
||||
|
||||
UserIdentity author = psInfoFactory.get(db, pId).getAuthor();
|
||||
if (author.getAccount() == null) {
|
||||
p.author = new AccountAttribute();
|
||||
p.author.email = author.getEmail();
|
||||
p.author.name = author.getName();
|
||||
p.author.username = "";
|
||||
} else {
|
||||
p.author = asAccountAttribute(author.getAccount());
|
||||
}
|
||||
|
||||
Change change = db.changes().get(pId.getParentKey());
|
||||
List<Patch> list =
|
||||
patchListCache.get(change, patchSet).toPatchList(pId);
|
||||
for (Patch pe : list) {
|
||||
if (!Patch.COMMIT_MSG.equals(pe.getFileName())) {
|
||||
p.sizeDeletions -= pe.getDeletions();
|
||||
p.sizeInsertions += pe.getInsertions();
|
||||
}
|
||||
}
|
||||
p.kind = changeKindCache.getChangeKind(db, change, patchSet);
|
||||
} finally {
|
||||
db.close();
|
||||
try (ReviewDb db = schema.open()) {
|
||||
p.parents = new ArrayList<>();
|
||||
for (PatchSetAncestor a : db.patchSetAncestors().ancestorsOf(
|
||||
patchSet.getId())) {
|
||||
p.parents.add(a.getAncestorRevision().get());
|
||||
}
|
||||
|
||||
UserIdentity author = psInfoFactory.get(db, pId).getAuthor();
|
||||
if (author.getAccount() == null) {
|
||||
p.author = new AccountAttribute();
|
||||
p.author.email = author.getEmail();
|
||||
p.author.name = author.getName();
|
||||
p.author.username = "";
|
||||
} else {
|
||||
p.author = asAccountAttribute(author.getAccount());
|
||||
}
|
||||
|
||||
Change change = db.changes().get(pId.getParentKey());
|
||||
List<Patch> list =
|
||||
patchListCache.get(change, patchSet).toPatchList(pId);
|
||||
for (Patch pe : list) {
|
||||
if (!Patch.COMMIT_MSG.equals(pe.getFileName())) {
|
||||
p.sizeDeletions -= pe.getDeletions();
|
||||
p.sizeInsertions += pe.getInsertions();
|
||||
}
|
||||
}
|
||||
p.kind = changeKindCache.getChangeKind(db, change, patchSet);
|
||||
} catch (OrmException e) {
|
||||
log.error("Cannot load patch set data for " + patchSet.getId(), e);
|
||||
} catch (PatchSetInfoNotAvailableException e) {
|
||||
|
@ -86,9 +86,7 @@ public class GarbageCollection {
|
||||
GarbageCollectionResult.Error.Type.GC_ALREADY_SCHEDULED, projectName));
|
||||
}
|
||||
for (Project.NameKey p : projectsToGc) {
|
||||
Repository repo = null;
|
||||
try {
|
||||
repo = repoManager.openRepository(p);
|
||||
try (Repository repo = repoManager.openRepository(p)) {
|
||||
logGcConfiguration(p, repo, aggressive);
|
||||
print(writer, "collecting garbage for \"" + p + "\":\n");
|
||||
GarbageCollectCommand gc = Git.wrap(repo).gc();
|
||||
@ -110,9 +108,6 @@ public class GarbageCollection {
|
||||
result.addError(new GarbageCollectionResult.Error(
|
||||
GarbageCollectionResult.Error.Type.GC_FAILED, p));
|
||||
} finally {
|
||||
if (repo != null) {
|
||||
repo.close();
|
||||
}
|
||||
gcQueue.gcFinished(p);
|
||||
}
|
||||
}
|
||||
|
@ -302,11 +302,8 @@ public class LocalDiskRepositoryManager implements GitRepositoryManager {
|
||||
@Override
|
||||
public String getProjectDescription(final Project.NameKey name)
|
||||
throws RepositoryNotFoundException, IOException {
|
||||
final Repository e = openRepository(name);
|
||||
try {
|
||||
try (Repository e = openRepository(name)) {
|
||||
return getProjectDescription(e);
|
||||
} finally {
|
||||
e.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -337,31 +334,26 @@ public class LocalDiskRepositoryManager implements GitRepositoryManager {
|
||||
final String description) {
|
||||
// Update git's description file, in case gitweb is being used
|
||||
//
|
||||
try {
|
||||
final Repository e = openRepository(name);
|
||||
try {
|
||||
final String old = getProjectDescription(e);
|
||||
if ((old == null && description == null)
|
||||
|| (old != null && old.equals(description))) {
|
||||
return;
|
||||
}
|
||||
try (Repository e = openRepository(name)) {
|
||||
final String old = getProjectDescription(e);
|
||||
if ((old == null && description == null)
|
||||
|| (old != null && old.equals(description))) {
|
||||
return;
|
||||
}
|
||||
|
||||
final LockFile f = new LockFile(new File(e.getDirectory(), "description"), FS.DETECTED);
|
||||
if (f.lock()) {
|
||||
String d = description;
|
||||
if (d != null) {
|
||||
d = d.trim();
|
||||
if (d.length() > 0) {
|
||||
d += "\n";
|
||||
}
|
||||
} else {
|
||||
d = "";
|
||||
final LockFile f = new LockFile(new File(e.getDirectory(), "description"), FS.DETECTED);
|
||||
if (f.lock()) {
|
||||
String d = description;
|
||||
if (d != null) {
|
||||
d = d.trim();
|
||||
if (d.length() > 0) {
|
||||
d += "\n";
|
||||
}
|
||||
f.write(Constants.encode(d));
|
||||
f.commit();
|
||||
} else {
|
||||
d = "";
|
||||
}
|
||||
} finally {
|
||||
e.close();
|
||||
f.write(Constants.encode(d));
|
||||
f.commit();
|
||||
}
|
||||
} catch (RepositoryNotFoundException e) {
|
||||
log.error("Cannot update description for " + name, e);
|
||||
|
@ -1031,13 +1031,8 @@ public class MergeOp {
|
||||
@Override
|
||||
public void run() {
|
||||
PatchSet patchSet;
|
||||
try {
|
||||
ReviewDb reviewDb = schemaFactory.open();
|
||||
try {
|
||||
patchSet = reviewDb.patchSets().get(c.currentPatchSetId());
|
||||
} finally {
|
||||
reviewDb.close();
|
||||
}
|
||||
try (ReviewDb reviewDb = schemaFactory.open()) {
|
||||
patchSet = reviewDb.patchSets().get(c.currentPatchSetId());
|
||||
} catch (Exception e) {
|
||||
logError("Cannot send email for submitted patch set " + c.getId(), e);
|
||||
return;
|
||||
@ -1193,13 +1188,8 @@ public class MergeOp {
|
||||
@Override
|
||||
public void run() {
|
||||
PatchSet patchSet;
|
||||
try {
|
||||
ReviewDb reviewDb = schemaFactory.open();
|
||||
try {
|
||||
patchSet = reviewDb.patchSets().get(c.currentPatchSetId());
|
||||
} finally {
|
||||
reviewDb.close();
|
||||
}
|
||||
try (ReviewDb reviewDb = schemaFactory.open()) {
|
||||
patchSet = reviewDb.patchSets().get(c.currentPatchSetId());
|
||||
} catch (Exception e) {
|
||||
logError("Cannot send email notifications about merge failure", e);
|
||||
return;
|
||||
|
@ -1719,11 +1719,8 @@ public class ReceiveCommits {
|
||||
if (caller == Thread.currentThread()) {
|
||||
insertChange(db);
|
||||
} else {
|
||||
ReviewDb db = schemaFactory.open();
|
||||
try {
|
||||
try (ReviewDb db = schemaFactory.open()) {
|
||||
insertChange(db);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
synchronized (newProgress) {
|
||||
@ -2093,11 +2090,8 @@ public class ReceiveCommits {
|
||||
} else if (caller == Thread.currentThread()) {
|
||||
return insertPatchSet(db);
|
||||
} else {
|
||||
ReviewDb db = schemaFactory.open();
|
||||
try {
|
||||
try (ReviewDb db = schemaFactory.open()) {
|
||||
return insertPatchSet(db);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
@ -2347,11 +2341,8 @@ public class ReceiveCommits {
|
||||
if (caller == Thread.currentThread()) {
|
||||
updateGroups(db);
|
||||
} else {
|
||||
ReviewDb db = schemaFactory.open();
|
||||
try {
|
||||
try (ReviewDb db = schemaFactory.open()) {
|
||||
updateGroups(db);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
@ -95,8 +95,8 @@ public class ScanningChangeCacheImpl implements ChangeCache {
|
||||
|
||||
@Override
|
||||
public List<Change> load(Project.NameKey key) throws Exception {
|
||||
Repository repo = repoManager.openRepository(key);
|
||||
try (ManualRequestContext ctx = requestContext.open()) {
|
||||
try (Repository repo = repoManager.openRepository(key);
|
||||
ManualRequestContext ctx = requestContext.open()) {
|
||||
ReviewDb db = ctx.getReviewDbProvider().get();
|
||||
Map<String, Ref> refs =
|
||||
repo.getRefDatabase().getRefs(RefNames.REFS_CHANGES);
|
||||
@ -114,8 +114,6 @@ public class ScanningChangeCacheImpl implements ChangeCache {
|
||||
Iterables.addAll(changes, db.changes().get(batch));
|
||||
}
|
||||
return changes;
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -67,13 +67,8 @@ class DbGroupMemberAuditListener implements GroupMemberAuditListener {
|
||||
new AccountGroupMemberAudit(m, me, TimeUtil.nowTs());
|
||||
auditInserts.add(audit);
|
||||
}
|
||||
try {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
db.accountGroupMembersAudit().insert(auditInserts);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
try (ReviewDb db = schema.open()) {
|
||||
db.accountGroupMembersAudit().insert(auditInserts);
|
||||
} catch (OrmException e) {
|
||||
logOrmExceptionForAccounts(
|
||||
"Cannot log add accounts to group event performed by user", me,
|
||||
@ -86,33 +81,28 @@ class DbGroupMemberAuditListener implements GroupMemberAuditListener {
|
||||
Collection<AccountGroupMember> removed) {
|
||||
List<AccountGroupMemberAudit> auditInserts = Lists.newLinkedList();
|
||||
List<AccountGroupMemberAudit> auditUpdates = Lists.newLinkedList();
|
||||
try {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
for (AccountGroupMember m : removed) {
|
||||
AccountGroupMemberAudit audit = null;
|
||||
for (AccountGroupMemberAudit a : db.accountGroupMembersAudit()
|
||||
.byGroupAccount(m.getAccountGroupId(), m.getAccountId())) {
|
||||
if (a.isActive()) {
|
||||
audit = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (audit != null) {
|
||||
audit.removed(me, TimeUtil.nowTs());
|
||||
auditUpdates.add(audit);
|
||||
} else {
|
||||
audit = new AccountGroupMemberAudit(m, me, TimeUtil.nowTs());
|
||||
audit.removedLegacy();
|
||||
auditInserts.add(audit);
|
||||
try (ReviewDb db = schema.open()) {
|
||||
for (AccountGroupMember m : removed) {
|
||||
AccountGroupMemberAudit audit = null;
|
||||
for (AccountGroupMemberAudit a : db.accountGroupMembersAudit()
|
||||
.byGroupAccount(m.getAccountGroupId(), m.getAccountId())) {
|
||||
if (a.isActive()) {
|
||||
audit = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
db.accountGroupMembersAudit().update(auditUpdates);
|
||||
db.accountGroupMembersAudit().insert(auditInserts);
|
||||
} finally {
|
||||
db.close();
|
||||
|
||||
if (audit != null) {
|
||||
audit.removed(me, TimeUtil.nowTs());
|
||||
auditUpdates.add(audit);
|
||||
} else {
|
||||
audit = new AccountGroupMemberAudit(m, me, TimeUtil.nowTs());
|
||||
audit.removedLegacy();
|
||||
auditInserts.add(audit);
|
||||
}
|
||||
}
|
||||
db.accountGroupMembersAudit().update(auditUpdates);
|
||||
db.accountGroupMembersAudit().insert(auditInserts);
|
||||
} catch (OrmException e) {
|
||||
logOrmExceptionForAccounts(
|
||||
"Cannot log delete accounts from group event performed by user", me,
|
||||
@ -129,13 +119,8 @@ class DbGroupMemberAuditListener implements GroupMemberAuditListener {
|
||||
new AccountGroupByIdAud(groupInclude, me, TimeUtil.nowTs());
|
||||
includesAudit.add(audit);
|
||||
}
|
||||
try {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
db.accountGroupByIdAud().insert(includesAudit);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
try (ReviewDb db = schema.open()) {
|
||||
db.accountGroupByIdAud().insert(includesAudit);
|
||||
} catch (OrmException e) {
|
||||
logOrmExceptionForGroups(
|
||||
"Cannot log add groups to group event performed by user", me, added,
|
||||
@ -147,28 +132,23 @@ class DbGroupMemberAuditListener implements GroupMemberAuditListener {
|
||||
public void onDeleteGroupsFromGroup(Account.Id me,
|
||||
Collection<AccountGroupById> removed) {
|
||||
final List<AccountGroupByIdAud> auditUpdates = Lists.newLinkedList();
|
||||
try {
|
||||
ReviewDb db = schema.open();
|
||||
try {
|
||||
for (final AccountGroupById g : removed) {
|
||||
AccountGroupByIdAud audit = null;
|
||||
for (AccountGroupByIdAud a : db.accountGroupByIdAud()
|
||||
.byGroupInclude(g.getGroupId(), g.getIncludeUUID())) {
|
||||
if (a.isActive()) {
|
||||
audit = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (audit != null) {
|
||||
audit.removed(me, TimeUtil.nowTs());
|
||||
auditUpdates.add(audit);
|
||||
try (ReviewDb db = schema.open()) {
|
||||
for (final AccountGroupById g : removed) {
|
||||
AccountGroupByIdAud audit = null;
|
||||
for (AccountGroupByIdAud a : db.accountGroupByIdAud()
|
||||
.byGroupInclude(g.getGroupId(), g.getIncludeUUID())) {
|
||||
if (a.isActive()) {
|
||||
audit = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
db.accountGroupByIdAud().update(auditUpdates);
|
||||
} finally {
|
||||
db.close();
|
||||
|
||||
if (audit != null) {
|
||||
audit.removed(me, TimeUtil.nowTs());
|
||||
auditUpdates.add(audit);
|
||||
}
|
||||
}
|
||||
db.accountGroupByIdAud().update(auditUpdates);
|
||||
} catch (OrmException e) {
|
||||
logOrmExceptionForGroups(
|
||||
"Cannot log delete groups from group event performed by user", me,
|
||||
|
@ -235,12 +235,12 @@ public class SiteIndexer {
|
||||
@Override
|
||||
public Void call() throws Exception {
|
||||
Multimap<ObjectId, ChangeData> byId = ArrayListMultimap.create();
|
||||
Repository repo = null;
|
||||
ReviewDb db = null;
|
||||
try {
|
||||
repo = repoManager.openRepository(project);
|
||||
// TODO(dborowitz): Opening all repositories in a live server may be
|
||||
// wasteful; see if we can determine which ones it is safe to close
|
||||
// with RepositoryCache.close(repo).
|
||||
try (Repository repo = repoManager.openRepository(project);
|
||||
ReviewDb db = schemaFactory.open()) {
|
||||
Map<String, Ref> refs = repo.getRefDatabase().getRefs(ALL);
|
||||
db = schemaFactory.open();
|
||||
for (Change c : changeCache.get(project)) {
|
||||
Ref r = refs.get(c.currentPatchSetId().toRefName());
|
||||
if (r != null) {
|
||||
@ -256,16 +256,6 @@ public class SiteIndexer {
|
||||
verboseWriter).call();
|
||||
} catch (RepositoryNotFoundException rnfe) {
|
||||
log.error(rnfe.getMessage());
|
||||
} finally {
|
||||
if (db != null) {
|
||||
db.close();
|
||||
}
|
||||
if (repo != null) {
|
||||
repo.close();
|
||||
}
|
||||
// TODO(dborowitz): Opening all repositories in a live server may be
|
||||
// wasteful; see if we can determine which ones it is safe to close
|
||||
// with RepositoryCache.close(repo).
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -406,27 +406,23 @@ public abstract class ChangeEmail extends NotificationEmail {
|
||||
TemporaryBuffer.Heap buf =
|
||||
new TemporaryBuffer.Heap(Math.min(HEAP_EST_SIZE, maxSize), maxSize);
|
||||
try (DiffFormatter fmt = new DiffFormatter(buf)) {
|
||||
Repository git;
|
||||
try {
|
||||
git = args.server.openRepository(change.getProject());
|
||||
try (Repository git = args.server.openRepository(change.getProject())) {
|
||||
try {
|
||||
fmt.setRepository(git);
|
||||
fmt.setDetectRenames(true);
|
||||
fmt.format(patchList.getOldId(), patchList.getNewId());
|
||||
return RawParseUtils.decode(buf.toByteArray());
|
||||
} catch (IOException e) {
|
||||
if (JGitText.get().inMemoryBufferLimitExceeded.equals(e.getMessage())) {
|
||||
return "";
|
||||
}
|
||||
log.error("Cannot format patch", e);
|
||||
return "";
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("Cannot open repository to format patch", e);
|
||||
return "";
|
||||
}
|
||||
try {
|
||||
fmt.setRepository(git);
|
||||
fmt.setDetectRenames(true);
|
||||
fmt.format(patchList.getOldId(), patchList.getNewId());
|
||||
return RawParseUtils.decode(buf.toByteArray());
|
||||
} catch (IOException e) {
|
||||
if (JGitText.get().inMemoryBufferLimitExceeded.equals(e.getMessage())) {
|
||||
return "";
|
||||
}
|
||||
log.error("Cannot format patch", e);
|
||||
return "";
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -116,8 +116,7 @@ public class CommentSender extends ReplyToChangeSender {
|
||||
|
||||
public String getInlineComments(int lines) {
|
||||
StringBuilder cmts = new StringBuilder();
|
||||
final Repository repo = getRepository();
|
||||
try {
|
||||
try (Repository repo = getRepository()) {
|
||||
PatchList patchList = null;
|
||||
if (repo != null) {
|
||||
try {
|
||||
@ -164,10 +163,6 @@ public class CommentSender extends ReplyToChangeSender {
|
||||
}
|
||||
cmts.append("\n\n");
|
||||
}
|
||||
} finally {
|
||||
if (repo != null) {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
return cmts.toString();
|
||||
}
|
||||
|
@ -192,8 +192,8 @@ public class SmtpEmailSender implements EmailSender {
|
||||
}
|
||||
}
|
||||
|
||||
Writer w = client.sendMessageData();
|
||||
if (w == null) {
|
||||
Writer messageDataWriter = client.sendMessageData();
|
||||
if (messageDataWriter == null) {
|
||||
/* Include rejected recipient error messages here to not lose that
|
||||
* information. That piece of the puzzle is vital if zero recipients
|
||||
* are accepted and the server consequently rejects the DATA command.
|
||||
@ -201,21 +201,20 @@ public class SmtpEmailSender implements EmailSender {
|
||||
throw new EmailException(rejected + "Server " + smtpHost
|
||||
+ " rejected DATA command: " + client.getReplyString());
|
||||
}
|
||||
w = new BufferedWriter(w);
|
||||
|
||||
for (Map.Entry<String, EmailHeader> h : hdrs.entrySet()) {
|
||||
if (!h.getValue().isEmpty()) {
|
||||
w.write(h.getKey());
|
||||
w.write(": ");
|
||||
h.getValue().write(w);
|
||||
w.write("\r\n");
|
||||
try (Writer w = new BufferedWriter(messageDataWriter)) {
|
||||
for (Map.Entry<String, EmailHeader> h : hdrs.entrySet()) {
|
||||
if (!h.getValue().isEmpty()) {
|
||||
w.write(h.getKey());
|
||||
w.write(": ");
|
||||
h.getValue().write(w);
|
||||
w.write("\r\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
w.write("\r\n");
|
||||
w.write(body);
|
||||
w.flush();
|
||||
w.close();
|
||||
w.write("\r\n");
|
||||
w.write(body);
|
||||
w.flush();
|
||||
}
|
||||
|
||||
if (!client.completePendingCommand()) {
|
||||
throw new EmailException("Server " + smtpHost
|
||||
|
@ -54,19 +54,11 @@ public abstract class AbstractChangeNotes<T> extends VersionedMetaData {
|
||||
loadDefaults();
|
||||
return self();
|
||||
}
|
||||
Repository repo;
|
||||
try {
|
||||
repo = repoManager.openMetadataRepository(getProjectName());
|
||||
} catch (IOException e) {
|
||||
throw new OrmException(e);
|
||||
}
|
||||
try {
|
||||
try (Repository repo = repoManager.openMetadataRepository(getProjectName())) {
|
||||
load(repo);
|
||||
loaded = true;
|
||||
} catch (ConfigInvalidException | IOException e) {
|
||||
throw new OrmException(e);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
return self();
|
||||
}
|
||||
@ -77,15 +69,9 @@ public abstract class AbstractChangeNotes<T> extends VersionedMetaData {
|
||||
} else if (!migration.enabled()) {
|
||||
return null;
|
||||
}
|
||||
Repository repo;
|
||||
try {
|
||||
repo = repoManager.openMetadataRepository(getProjectName());
|
||||
try {
|
||||
Ref ref = repo.getRefDatabase().exactRef(getRefName());
|
||||
return ref != null ? ref.getObjectId() : null;
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
try (Repository repo = repoManager.openMetadataRepository(getProjectName())) {
|
||||
Ref ref = repo.getRefDatabase().exactRef(getRefName());
|
||||
return ref != null ? ref.getObjectId() : null;
|
||||
} catch (IOException e) {
|
||||
throw new OrmException(e);
|
||||
}
|
||||
|
@ -93,13 +93,10 @@ public abstract class AbstractChangeUpdate extends VersionedMetaData {
|
||||
|
||||
private void load() throws IOException {
|
||||
if (migration.writeChanges() && getRevision() == null) {
|
||||
Repository repo = repoManager.openMetadataRepository(getProjectName());
|
||||
try {
|
||||
try (Repository repo = repoManager.openMetadataRepository(getProjectName())) {
|
||||
load(repo);
|
||||
} catch (ConfigInvalidException e) {
|
||||
throw new IOException(e);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -439,82 +439,82 @@ public class CommentsInNotesUtil {
|
||||
public byte[] buildNote(List<PatchLineComment> comments) {
|
||||
ByteArrayOutputStream buf = new ByteArrayOutputStream();
|
||||
OutputStreamWriter streamWriter = new OutputStreamWriter(buf, UTF_8);
|
||||
PrintWriter writer = new PrintWriter(streamWriter);
|
||||
PatchLineComment first = comments.get(0);
|
||||
try (PrintWriter writer = new PrintWriter(streamWriter)) {
|
||||
PatchLineComment first = comments.get(0);
|
||||
|
||||
short side = first.getSide();
|
||||
PatchSet.Id psId = PatchLineCommentsUtil.getCommentPsId(first);
|
||||
appendHeaderField(writer, side == 0
|
||||
? BASE_PATCH_SET
|
||||
: PATCH_SET,
|
||||
Integer.toString(psId.get()));
|
||||
appendHeaderField(writer, REVISION, first.getRevId().get());
|
||||
short side = first.getSide();
|
||||
PatchSet.Id psId = PatchLineCommentsUtil.getCommentPsId(first);
|
||||
appendHeaderField(writer, side == 0
|
||||
? BASE_PATCH_SET
|
||||
: PATCH_SET,
|
||||
Integer.toString(psId.get()));
|
||||
appendHeaderField(writer, REVISION, first.getRevId().get());
|
||||
|
||||
String currentFilename = null;
|
||||
String currentFilename = null;
|
||||
|
||||
for (PatchLineComment c : comments) {
|
||||
PatchSet.Id currentPsId = PatchLineCommentsUtil.getCommentPsId(c);
|
||||
checkArgument(psId.equals(currentPsId),
|
||||
"All comments being added must all have the same PatchSet.Id. The"
|
||||
+ "comment below does not have the same PatchSet.Id as the others "
|
||||
+ "(%s).\n%s", psId.toString(), c.toString());
|
||||
checkArgument(side == c.getSide(),
|
||||
"All comments being added must all have the same side. The"
|
||||
+ "comment below does not have the same side as the others "
|
||||
+ "(%s).\n%s", side, c.toString());
|
||||
String commentFilename =
|
||||
QuotedString.GIT_PATH.quote(c.getKey().getParentKey().getFileName());
|
||||
for (PatchLineComment c : comments) {
|
||||
PatchSet.Id currentPsId = PatchLineCommentsUtil.getCommentPsId(c);
|
||||
checkArgument(psId.equals(currentPsId),
|
||||
"All comments being added must all have the same PatchSet.Id. The"
|
||||
+ "comment below does not have the same PatchSet.Id as the others "
|
||||
+ "(%s).\n%s", psId.toString(), c.toString());
|
||||
checkArgument(side == c.getSide(),
|
||||
"All comments being added must all have the same side. The"
|
||||
+ "comment below does not have the same side as the others "
|
||||
+ "(%s).\n%s", side, c.toString());
|
||||
String commentFilename =
|
||||
QuotedString.GIT_PATH.quote(c.getKey().getParentKey().getFileName());
|
||||
|
||||
if (!commentFilename.equals(currentFilename)) {
|
||||
currentFilename = commentFilename;
|
||||
writer.print("File: ");
|
||||
writer.print(commentFilename);
|
||||
if (!commentFilename.equals(currentFilename)) {
|
||||
currentFilename = commentFilename;
|
||||
writer.print("File: ");
|
||||
writer.print(commentFilename);
|
||||
writer.print("\n\n");
|
||||
}
|
||||
|
||||
// The CommentRange field for a comment is allowed to be null.
|
||||
// If it is indeed null, then in the first line, we simply use the line
|
||||
// number field for a comment instead. If it isn't null, we write the
|
||||
// comment range itself.
|
||||
CommentRange range = c.getRange();
|
||||
if (range != null) {
|
||||
writer.print(range.getStartLine());
|
||||
writer.print(':');
|
||||
writer.print(range.getStartCharacter());
|
||||
writer.print('-');
|
||||
writer.print(range.getEndLine());
|
||||
writer.print(':');
|
||||
writer.print(range.getEndCharacter());
|
||||
} else {
|
||||
writer.print(c.getLine());
|
||||
}
|
||||
writer.print("\n");
|
||||
|
||||
writer.print(formatTime(serverIdent, c.getWrittenOn()));
|
||||
writer.print("\n");
|
||||
|
||||
PersonIdent ident =
|
||||
newIdent(accountCache.get(c.getAuthor()).getAccount(),
|
||||
c.getWrittenOn());
|
||||
String nameString = ident.getName() + " <" + ident.getEmailAddress()
|
||||
+ ">";
|
||||
appendHeaderField(writer, AUTHOR, nameString);
|
||||
|
||||
String parent = c.getParentUuid();
|
||||
if (parent != null) {
|
||||
appendHeaderField(writer, PARENT, parent);
|
||||
}
|
||||
|
||||
appendHeaderField(writer, UUID, c.getKey().get());
|
||||
|
||||
byte[] messageBytes = c.getMessage().getBytes(UTF_8);
|
||||
appendHeaderField(writer, LENGTH,
|
||||
Integer.toString(messageBytes.length));
|
||||
|
||||
writer.print(c.getMessage());
|
||||
writer.print("\n\n");
|
||||
}
|
||||
|
||||
// The CommentRange field for a comment is allowed to be null.
|
||||
// If it is indeed null, then in the first line, we simply use the line
|
||||
// number field for a comment instead. If it isn't null, we write the
|
||||
// comment range itself.
|
||||
CommentRange range = c.getRange();
|
||||
if (range != null) {
|
||||
writer.print(range.getStartLine());
|
||||
writer.print(':');
|
||||
writer.print(range.getStartCharacter());
|
||||
writer.print('-');
|
||||
writer.print(range.getEndLine());
|
||||
writer.print(':');
|
||||
writer.print(range.getEndCharacter());
|
||||
} else {
|
||||
writer.print(c.getLine());
|
||||
}
|
||||
writer.print("\n");
|
||||
|
||||
writer.print(formatTime(serverIdent, c.getWrittenOn()));
|
||||
writer.print("\n");
|
||||
|
||||
PersonIdent ident =
|
||||
newIdent(accountCache.get(c.getAuthor()).getAccount(),
|
||||
c.getWrittenOn());
|
||||
String nameString = ident.getName() + " <" + ident.getEmailAddress()
|
||||
+ ">";
|
||||
appendHeaderField(writer, AUTHOR, nameString);
|
||||
|
||||
String parent = c.getParentUuid();
|
||||
if (parent != null) {
|
||||
appendHeaderField(writer, PARENT, parent);
|
||||
}
|
||||
|
||||
appendHeaderField(writer, UUID, c.getKey().get());
|
||||
|
||||
byte[] messageBytes = c.getMessage().getBytes(UTF_8);
|
||||
appendHeaderField(writer, LENGTH,
|
||||
Integer.toString(messageBytes.length));
|
||||
|
||||
writer.print(c.getMessage());
|
||||
writer.print("\n\n");
|
||||
}
|
||||
writer.close();
|
||||
return buf.toByteArray();
|
||||
}
|
||||
|
||||
|
@ -163,8 +163,7 @@ public class PatchList implements Serializable {
|
||||
|
||||
private void writeObject(final ObjectOutputStream output) throws IOException {
|
||||
final ByteArrayOutputStream buf = new ByteArrayOutputStream();
|
||||
final DeflaterOutputStream out = new DeflaterOutputStream(buf);
|
||||
try {
|
||||
try (DeflaterOutputStream out = new DeflaterOutputStream(buf)) {
|
||||
writeCanBeNull(out, oldId);
|
||||
writeNotNull(out, newId);
|
||||
writeVarInt32(out, againstParent ? 1 : 0);
|
||||
@ -174,16 +173,13 @@ public class PatchList implements Serializable {
|
||||
for (PatchListEntry p : patches) {
|
||||
p.writeTo(out);
|
||||
}
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
writeBytes(output, buf.toByteArray());
|
||||
}
|
||||
|
||||
private void readObject(final ObjectInputStream input) throws IOException {
|
||||
final ByteArrayInputStream buf = new ByteArrayInputStream(readBytes(input));
|
||||
final InflaterInputStream in = new InflaterInputStream(buf);
|
||||
try {
|
||||
try (InflaterInputStream in = new InflaterInputStream(buf)) {
|
||||
oldId = readCanBeNull(in);
|
||||
newId = readNotNull(in);
|
||||
againstParent = readVarInt32(in) != 0;
|
||||
@ -195,8 +191,6 @@ public class PatchList implements Serializable {
|
||||
all[i] = PatchListEntry.readFrom(in);
|
||||
}
|
||||
patches = all;
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -106,11 +106,8 @@ public class PatchListLoader extends CacheLoader<PatchListKey, PatchList> {
|
||||
@Override
|
||||
public PatchList load(final PatchListKey key) throws IOException,
|
||||
PatchListNotAvailableException {
|
||||
final Repository repo = repoManager.openRepository(key.projectKey);
|
||||
try {
|
||||
try (Repository repo = repoManager.openRepository(key.projectKey)) {
|
||||
return readPatchList(key, repo);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -384,11 +381,8 @@ public class PatchListLoader extends CacheLoader<PatchListKey, PatchList> {
|
||||
fmt.formatMerge(buf, p, "BASE", oursName, theirsName, "UTF-8");
|
||||
buf.close();
|
||||
|
||||
InputStream in = buf.openInputStream();
|
||||
try {
|
||||
try (InputStream in = buf.openInputStream()) {
|
||||
resolved.put(entry.getKey(), ins.insert(Constants.OBJ_BLOB, buf.length(), in));
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
} finally {
|
||||
buf.destroy();
|
||||
|
@ -155,9 +155,25 @@ public class PatchScriptFactory implements Callable<PatchScript> {
|
||||
throw new NoSuchChangeException(changeId);
|
||||
}
|
||||
|
||||
final Repository git;
|
||||
try {
|
||||
git = repoManager.openRepository(projectKey);
|
||||
try (Repository git = repoManager.openRepository(projectKey)) {
|
||||
try {
|
||||
final PatchList list = listFor(keyFor(diffPrefs.getIgnoreWhitespace()));
|
||||
final PatchScriptBuilder b = newBuilder(list, git);
|
||||
final PatchListEntry content = list.get(fileName);
|
||||
|
||||
loadCommentsAndHistory(content.getChangeType(), //
|
||||
content.getOldName(), //
|
||||
content.getNewName());
|
||||
|
||||
return b.toPatchScript(content, comments, history);
|
||||
} catch (PatchListNotAvailableException e) {
|
||||
throw new NoSuchChangeException(changeId, e);
|
||||
} catch (IOException e) {
|
||||
log.error("File content unavailable", e);
|
||||
throw new NoSuchChangeException(changeId, e);
|
||||
} catch (org.eclipse.jgit.errors.LargeObjectException err) {
|
||||
throw new LargeObjectException("File content is too large", err);
|
||||
}
|
||||
} catch (RepositoryNotFoundException e) {
|
||||
log.error("Repository " + projectKey + " not found", e);
|
||||
throw new NoSuchChangeException(changeId, e);
|
||||
@ -165,26 +181,6 @@ public class PatchScriptFactory implements Callable<PatchScript> {
|
||||
log.error("Cannot open repository " + projectKey, e);
|
||||
throw new NoSuchChangeException(changeId, e);
|
||||
}
|
||||
try {
|
||||
final PatchList list = listFor(keyFor(diffPrefs.getIgnoreWhitespace()));
|
||||
final PatchScriptBuilder b = newBuilder(list, git);
|
||||
final PatchListEntry content = list.get(fileName);
|
||||
|
||||
loadCommentsAndHistory(content.getChangeType(), //
|
||||
content.getOldName(), //
|
||||
content.getNewName());
|
||||
|
||||
return b.toPatchScript(content, comments, history);
|
||||
} catch (PatchListNotAvailableException e) {
|
||||
throw new NoSuchChangeException(changeId, e);
|
||||
} catch (IOException e) {
|
||||
log.error("File content unavailable", e);
|
||||
throw new NoSuchChangeException(changeId, e);
|
||||
} catch (org.eclipse.jgit.errors.LargeObjectException err) {
|
||||
throw new LargeObjectException("File content is too large", err);
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
|
||||
private PatchListKey keyFor(final Whitespace whitespace) {
|
||||
|
@ -79,13 +79,8 @@ public class PatchSetInfoFactory {
|
||||
|
||||
public PatchSetInfo get(Change change, PatchSet patchSet)
|
||||
throws PatchSetInfoNotAvailableException {
|
||||
Repository repo;
|
||||
try {
|
||||
repo = repoManager.openRepository(change.getProject());
|
||||
} catch (IOException e) {
|
||||
throw new PatchSetInfoNotAvailableException(e);
|
||||
}
|
||||
try (RevWalk rw = new RevWalk(repo)) {
|
||||
try (Repository repo = repoManager.openRepository(change.getProject());
|
||||
RevWalk rw = new RevWalk(repo)) {
|
||||
final RevCommit src =
|
||||
rw.parseCommit(ObjectId.fromString(patchSet.getRevision().get()));
|
||||
PatchSetInfo info = get(src, patchSet.getId());
|
||||
@ -93,8 +88,6 @@ public class PatchSetInfoFactory {
|
||||
return info;
|
||||
} catch (IOException e) {
|
||||
throw new PatchSetInfoNotAvailableException(e);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -185,11 +185,8 @@ public class JarScanner implements PluginContentScanner {
|
||||
private static byte[] read(JarFile jarFile, JarEntry entry)
|
||||
throws IOException {
|
||||
byte[] data = new byte[(int) entry.getSize()];
|
||||
InputStream in = jarFile.getInputStream(entry);
|
||||
try {
|
||||
try (InputStream in = jarFile.getInputStream(entry)) {
|
||||
IO.readFully(in, data, 0, data.length);
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
@ -117,8 +117,7 @@ public class CreateBranch implements RestModifyView<ProjectResource, Input> {
|
||||
|
||||
final Branch.NameKey name = new Branch.NameKey(rsrc.getNameKey(), ref);
|
||||
final RefControl refControl = rsrc.getControl().controlForRef(name);
|
||||
final Repository repo = repoManager.openRepository(rsrc.getNameKey());
|
||||
try {
|
||||
try (Repository repo = repoManager.openRepository(rsrc.getNameKey())) {
|
||||
final ObjectId revid = parseBaseRevision(repo, rsrc.getNameKey(), input.revision);
|
||||
final RevWalk rw = verifyConnected(repo, revid);
|
||||
RevObject object = rw.parseAny(revid);
|
||||
@ -184,8 +183,6 @@ public class CreateBranch implements RestModifyView<ProjectResource, Input> {
|
||||
}
|
||||
} catch (InvalidRevisionException e) {
|
||||
throw new BadRequestException("invalid revision \"" + input.revision + "\"");
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -229,8 +229,7 @@ public class CreateProject implements RestModifyView<TopLevelResource, ProjectIn
|
||||
final String head =
|
||||
args.permissionsOnly ? RefNames.REFS_CONFIG
|
||||
: args.branch.get(0);
|
||||
Repository repo = repoManager.createRepository(nameKey);
|
||||
try {
|
||||
try (Repository repo = repoManager.createRepository(nameKey)) {
|
||||
NewProjectCreatedListener.Event event = new NewProjectCreatedListener.Event() {
|
||||
@Override
|
||||
public String getProjectName() {
|
||||
@ -262,8 +261,6 @@ public class CreateProject implements RestModifyView<TopLevelResource, ProjectIn
|
||||
}
|
||||
|
||||
return projectCache.get(nameKey).getProject();
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
} catch (RepositoryCaseMismatchException e) {
|
||||
throw new ResourceConflictException("Cannot create " + nameKey.get()
|
||||
@ -273,16 +270,11 @@ public class CreateProject implements RestModifyView<TopLevelResource, ProjectIn
|
||||
} catch (RepositoryNotFoundException badName) {
|
||||
throw new BadRequestException("invalid project name: " + nameKey);
|
||||
} catch (IllegalStateException err) {
|
||||
try {
|
||||
Repository repo = repoManager.openRepository(nameKey);
|
||||
try {
|
||||
if (repo.getObjectDatabase().exists()) {
|
||||
throw new ResourceConflictException("project \"" + nameKey + "\" exists");
|
||||
}
|
||||
throw err;
|
||||
} finally {
|
||||
repo.close();
|
||||
try (Repository repo = repoManager.openRepository(nameKey)) {
|
||||
if (repo.getObjectDatabase().exists()) {
|
||||
throw new ResourceConflictException("project \"" + nameKey + "\" exists");
|
||||
}
|
||||
throw err;
|
||||
} catch (IOException ioErr) {
|
||||
String msg = "Cannot create " + nameKey;
|
||||
log.error(msg, err);
|
||||
|
@ -132,21 +132,15 @@ class DashboardsCollection implements
|
||||
throw new ResourceNotFoundException(id);
|
||||
}
|
||||
|
||||
Repository git;
|
||||
try {
|
||||
git = gitManager.openRepository(ctl.getProject().getNameKey());
|
||||
} catch (RepositoryNotFoundException e) {
|
||||
throw new ResourceNotFoundException(id);
|
||||
}
|
||||
try {
|
||||
try (Repository git = gitManager.openRepository(ctl.getProject().getNameKey())) {
|
||||
ObjectId objId = git.resolve(ref + ":" + path);
|
||||
if (objId == null) {
|
||||
throw new ResourceNotFoundException(id);
|
||||
}
|
||||
BlobBasedConfig cfg = new BlobBasedConfig(null, git, objId);
|
||||
return new DashboardResource(myCtl, ref, path, cfg, false);
|
||||
} finally {
|
||||
git.close();
|
||||
} catch (RepositoryNotFoundException e) {
|
||||
throw new ResourceNotFoundException(id);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,8 +82,7 @@ public class DeleteBranch implements RestModifyView<BranchResource, Input>{
|
||||
+ " has open changes");
|
||||
}
|
||||
|
||||
Repository r = repoManager.openRepository(rsrc.getNameKey());
|
||||
try {
|
||||
try (Repository r = repoManager.openRepository(rsrc.getNameKey())) {
|
||||
RefUpdate.Result result;
|
||||
RefUpdate u = r.updateRef(rsrc.getRef());
|
||||
u.setForceUpdate(true);
|
||||
@ -129,8 +128,6 @@ public class DeleteBranch implements RestModifyView<BranchResource, Input>{
|
||||
log.error("Cannot delete " + rsrc.getBranchKey() + ": " + result.name());
|
||||
throw new ResourceConflictException("cannot delete branch: " + result.name());
|
||||
}
|
||||
} finally {
|
||||
r.close();
|
||||
}
|
||||
return Response.none();
|
||||
}
|
||||
|
@ -93,8 +93,7 @@ class DeleteBranches implements RestModifyView<ProjectResource, Input> {
|
||||
public Response<?> apply(ProjectResource project, Input input)
|
||||
throws OrmException, IOException, ResourceConflictException {
|
||||
input = Input.init(input);
|
||||
Repository r = repoManager.openRepository(project.getNameKey());
|
||||
try {
|
||||
try (Repository r = repoManager.openRepository(project.getNameKey())) {
|
||||
BatchRefUpdate batchUpdate = r.getRefDatabase().newBatchUpdate();
|
||||
for (String branch : input.branches) {
|
||||
batchUpdate.addCommand(createDeleteCommand(project, r, branch));
|
||||
@ -113,8 +112,6 @@ class DeleteBranches implements RestModifyView<ProjectResource, Input> {
|
||||
if (errorMessages.length() > 0) {
|
||||
throw new ResourceConflictException(errorMessages.toString());
|
||||
}
|
||||
} finally {
|
||||
r.close();
|
||||
}
|
||||
return Response.none();
|
||||
}
|
||||
|
@ -78,8 +78,7 @@ public class GetReflog implements RestReadView<BranchResource> {
|
||||
throw new AuthException("not project owner");
|
||||
}
|
||||
|
||||
Repository repo = repoManager.openRepository(rsrc.getNameKey());
|
||||
try {
|
||||
try (Repository repo = repoManager.openRepository(rsrc.getNameKey())) {
|
||||
ReflogReader r = repo.getReflogReader(rsrc.getRef());
|
||||
if (r == null) {
|
||||
throw new ResourceNotFoundException(rsrc.getRef());
|
||||
@ -108,8 +107,6 @@ public class GetReflog implements RestReadView<BranchResource> {
|
||||
public ReflogEntryInfo apply(ReflogEntry e) {
|
||||
return new ReflogEntryInfo(e);
|
||||
}});
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,18 +45,13 @@ public class GetStatistics implements RestReadView<ProjectResource> {
|
||||
@Override
|
||||
public RepositoryStatistics apply(ProjectResource rsrc)
|
||||
throws ResourceNotFoundException, ResourceConflictException {
|
||||
try {
|
||||
Repository repo = repoManager.openRepository(rsrc.getNameKey());
|
||||
try {
|
||||
GarbageCollectCommand gc = Git.wrap(repo).gc();
|
||||
return new RepositoryStatistics(gc.getStatistics());
|
||||
} catch (GitAPIException e) {
|
||||
throw new ResourceConflictException(e.getMessage());
|
||||
} catch (JGitInternalException e) {
|
||||
throw new ResourceConflictException(e.getMessage());
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
try (Repository repo = repoManager.openRepository(rsrc.getNameKey())) {
|
||||
GarbageCollectCommand gc = Git.wrap(repo).gc();
|
||||
return new RepositoryStatistics(gc.getStatistics());
|
||||
} catch (GitAPIException e) {
|
||||
throw new ResourceConflictException(e.getMessage());
|
||||
} catch (JGitInternalException e) {
|
||||
throw new ResourceConflictException(e.getMessage());
|
||||
} catch (IOException e) {
|
||||
throw new ResourceNotFoundException(rsrc.getName());
|
||||
}
|
||||
|
@ -343,8 +343,7 @@ public class ListProjects implements RestReadView<TopLevelResource> {
|
||||
|
||||
try {
|
||||
if (!showBranch.isEmpty()) {
|
||||
Repository git = repoManager.openRepository(projectName);
|
||||
try {
|
||||
try (Repository git = repoManager.openRepository(projectName)) {
|
||||
if (!type.matches(git)) {
|
||||
continue;
|
||||
}
|
||||
@ -363,17 +362,12 @@ public class ListProjects implements RestReadView<TopLevelResource> {
|
||||
info.branches.put(showBranch.get(i), ref.getObjectId().name());
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
} else if (!showTree && type != FilterType.ALL) {
|
||||
Repository git = repoManager.openRepository(projectName);
|
||||
try {
|
||||
try (Repository git = repoManager.openRepository(projectName)) {
|
||||
if (!type.matches(git)) {
|
||||
continue;
|
||||
}
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -511,20 +505,15 @@ public class ListProjects implements RestReadView<TopLevelResource> {
|
||||
private List<Ref> getBranchRefs(Project.NameKey projectName,
|
||||
ProjectControl projectControl) {
|
||||
Ref[] result = new Ref[showBranch.size()];
|
||||
try {
|
||||
Repository git = repoManager.openRepository(projectName);
|
||||
try {
|
||||
for (int i = 0; i < showBranch.size(); i++) {
|
||||
Ref ref = git.getRef(showBranch.get(i));
|
||||
if (ref != null
|
||||
&& ref.getObjectId() != null
|
||||
&& (projectControl.controlForRef(ref.getLeaf().getName()).isVisible())
|
||||
|| (all && projectControl.isOwner())) {
|
||||
result[i] = ref;
|
||||
}
|
||||
try (Repository git = repoManager.openRepository(projectName)) {
|
||||
for (int i = 0; i < showBranch.size(); i++) {
|
||||
Ref ref = git.getRef(showBranch.get(i));
|
||||
if (ref != null
|
||||
&& ref.getObjectId() != null
|
||||
&& (projectControl.controlForRef(ref.getLeaf().getName()).isVisible())
|
||||
|| (all && projectControl.isOwner())) {
|
||||
result[i] = ref;
|
||||
}
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
// Fall through and return what is available.
|
||||
|
@ -69,9 +69,7 @@ public class ListTags implements RestReadView<ProjectResource> {
|
||||
ResourceNotFoundException {
|
||||
List<TagInfo> tags = Lists.newArrayList();
|
||||
|
||||
Repository repo = getRepository(resource.getNameKey());
|
||||
|
||||
try {
|
||||
try (Repository repo = getRepository(resource.getNameKey())) {
|
||||
RevWalk rw = new RevWalk(repo);
|
||||
try {
|
||||
Map<String, Ref> all = visibleTags(resource.getControl(), repo,
|
||||
@ -82,8 +80,6 @@ public class ListTags implements RestReadView<ProjectResource> {
|
||||
} finally {
|
||||
rw.dispose();
|
||||
}
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
|
||||
Collections.sort(tags, new Comparator<TagInfo>() {
|
||||
|
@ -288,13 +288,10 @@ public class ProjectCacheImpl implements ProjectCache {
|
||||
@Override
|
||||
public ProjectState load(String projectName) throws Exception {
|
||||
Project.NameKey key = new Project.NameKey(projectName);
|
||||
Repository git = mgr.openRepository(key);
|
||||
try {
|
||||
try (Repository git = mgr.openRepository(key)) {
|
||||
ProjectConfig cfg = new ProjectConfig(key);
|
||||
cfg.load(git);
|
||||
return projectStateFactory.create(cfg);
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -535,14 +535,9 @@ public class ProjectControl {
|
||||
}
|
||||
|
||||
public boolean canReadCommit(ReviewDb db, RevWalk rw, RevCommit commit) {
|
||||
try {
|
||||
Repository repo = openRepository();
|
||||
try {
|
||||
return isMergedIntoVisibleRef(repo, db, rw, commit,
|
||||
repo.getAllRefs().values());
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
try (Repository repo = openRepository()) {
|
||||
return isMergedIntoVisibleRef(repo, db, rw, commit,
|
||||
repo.getAllRefs().values());
|
||||
} catch (IOException e) {
|
||||
String msg = String.format(
|
||||
"Cannot verify permissions to commit object %s in repository %s",
|
||||
|
@ -163,17 +163,12 @@ public class ProjectState {
|
||||
}
|
||||
|
||||
private boolean isRevisionOutOfDate() {
|
||||
try {
|
||||
Repository git = gitMgr.openRepository(getProject().getNameKey());
|
||||
try {
|
||||
Ref ref = git.getRefDatabase().exactRef(RefNames.REFS_CONFIG);
|
||||
if (ref == null || ref.getObjectId() == null) {
|
||||
return true;
|
||||
}
|
||||
return !ref.getObjectId().equals(config.getRevision());
|
||||
} finally {
|
||||
git.close();
|
||||
try (Repository git = gitMgr.openRepository(getProject().getNameKey())) {
|
||||
Ref ref = git.getRefDatabase().exactRef(RefNames.REFS_CONFIG);
|
||||
if (ref == null || ref.getObjectId() == null) {
|
||||
return true;
|
||||
}
|
||||
return !ref.getObjectId().equals(config.getRevision());
|
||||
} catch (IOException gone) {
|
||||
return true;
|
||||
}
|
||||
@ -228,13 +223,8 @@ public class ProjectState {
|
||||
}
|
||||
|
||||
ProjectLevelConfig cfg = new ProjectLevelConfig(fileName, this);
|
||||
try {
|
||||
Repository git = gitMgr.openRepository(getProject().getNameKey());
|
||||
try {
|
||||
cfg.load(git);
|
||||
} finally {
|
||||
git.close();
|
||||
}
|
||||
try (Repository git = gitMgr.openRepository(getProject().getNameKey())) {
|
||||
cfg.load(git);
|
||||
} catch (IOException e) {
|
||||
log.warn("Failed to load " + fileName + " for " + getProject().getName(), e);
|
||||
} catch (ConfigInvalidException e) {
|
||||
|
@ -328,17 +328,12 @@ public class RefControl {
|
||||
|
||||
private boolean isMergedIntoBranchOrTag(ReviewDb db, RevWalk rw,
|
||||
RevCommit commit) {
|
||||
try {
|
||||
Repository repo = projectControl.openRepository();
|
||||
try {
|
||||
List<Ref> refs = new ArrayList<>(
|
||||
repo.getRefDatabase().getRefs(Constants.R_HEADS).values());
|
||||
refs.addAll(repo.getRefDatabase().getRefs(Constants.R_TAGS).values());
|
||||
return projectControl.isMergedIntoVisibleRef(
|
||||
repo, db, rw, commit, refs);
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
try (Repository repo = projectControl.openRepository()) {
|
||||
List<Ref> refs = new ArrayList<>(
|
||||
repo.getRefDatabase().getRefs(Constants.R_HEADS).values());
|
||||
refs.addAll(repo.getRefDatabase().getRefs(Constants.R_TAGS).values());
|
||||
return projectControl.isMergedIntoVisibleRef(
|
||||
repo, db, rw, commit, refs);
|
||||
} catch (IOException e) {
|
||||
String msg = String.format(
|
||||
"Cannot verify permissions to commit object %s in repository %s",
|
||||
|
@ -76,9 +76,7 @@ public class SetHead implements RestModifyView<ProjectResource, Input> {
|
||||
}
|
||||
String ref = RefNames.fullName(input.ref);
|
||||
|
||||
Repository repo = null;
|
||||
try {
|
||||
repo = repoManager.openRepository(rsrc.getNameKey());
|
||||
try (Repository repo = repoManager.openRepository(rsrc.getNameKey())) {
|
||||
Map<String, Ref> cur =
|
||||
repo.getRefDatabase().exactRef(Constants.HEAD, ref);
|
||||
if (!cur.containsKey(ref)) {
|
||||
@ -129,10 +127,6 @@ public class SetHead implements RestModifyView<ProjectResource, Input> {
|
||||
return ref;
|
||||
} catch (RepositoryNotFoundException e) {
|
||||
throw new ResourceNotFoundException(rsrc.getName());
|
||||
} finally {
|
||||
if (repo != null) {
|
||||
repo.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -90,8 +90,7 @@ public class SchemaUpdater {
|
||||
}
|
||||
|
||||
public void update(final UpdateUI ui) throws OrmException {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
try (ReviewDb db = schema.open()) {
|
||||
final SchemaVersion u = updater.get();
|
||||
final CurrentSchemaVersion version = getSchemaVersion(db);
|
||||
if (version == null) {
|
||||
@ -112,8 +111,6 @@ public class SchemaUpdater {
|
||||
|
||||
updateSystemConfig(db);
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,31 +48,26 @@ public class SchemaVersionCheck implements LifecycleListener {
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
try {
|
||||
final ReviewDb db = schema.open();
|
||||
try {
|
||||
final CurrentSchemaVersion currentVer = getSchemaVersion(db);
|
||||
final int expectedVer = SchemaVersion.getBinaryVersion();
|
||||
try (ReviewDb db = schema.open()) {
|
||||
final CurrentSchemaVersion currentVer = getSchemaVersion(db);
|
||||
final int expectedVer = SchemaVersion.getBinaryVersion();
|
||||
|
||||
if (currentVer == null) {
|
||||
throw new ProvisionException("Schema not yet initialized."
|
||||
+ " Run init to initialize the schema:\n"
|
||||
+ "$ java -jar gerrit.war init -d "
|
||||
+ site.site_path.toAbsolutePath());
|
||||
}
|
||||
if (currentVer.versionNbr < expectedVer) {
|
||||
throw new ProvisionException("Unsupported schema version "
|
||||
+ currentVer.versionNbr + "; expected schema version " + expectedVer
|
||||
+ ". Run init to upgrade:\n"
|
||||
+ "$ java -jar " + site.gerrit_war.toAbsolutePath() + " init -d "
|
||||
+ site.site_path.toAbsolutePath());
|
||||
} else if (currentVer.versionNbr > expectedVer) {
|
||||
throw new ProvisionException("Unsupported schema version "
|
||||
+ currentVer.versionNbr + "; expected schema version " + expectedVer
|
||||
+ ". Downgrade is not supported.");
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
if (currentVer == null) {
|
||||
throw new ProvisionException("Schema not yet initialized."
|
||||
+ " Run init to initialize the schema:\n"
|
||||
+ "$ java -jar gerrit.war init -d "
|
||||
+ site.site_path.toAbsolutePath());
|
||||
}
|
||||
if (currentVer.versionNbr < expectedVer) {
|
||||
throw new ProvisionException("Unsupported schema version "
|
||||
+ currentVer.versionNbr + "; expected schema version " + expectedVer
|
||||
+ ". Run init to upgrade:\n"
|
||||
+ "$ java -jar " + site.gerrit_war.toAbsolutePath() + " init -d "
|
||||
+ site.site_path.toAbsolutePath());
|
||||
} else if (currentVer.versionNbr > expectedVer) {
|
||||
throw new ProvisionException("Unsupported schema version "
|
||||
+ currentVer.versionNbr + "; expected schema version " + expectedVer
|
||||
+ ". Downgrade is not supported.");
|
||||
}
|
||||
} catch (OrmException e) {
|
||||
throw new ProvisionException("Cannot read schema_version", e);
|
||||
|
@ -61,38 +61,33 @@ public class Schema_106 extends SchemaVersion {
|
||||
ui.message(String.format("creating reflog files for %s branches ...",
|
||||
RefNames.REFS_CONFIG));
|
||||
for (Project.NameKey project : repoList) {
|
||||
try {
|
||||
Repository repo = repoManager.openRepository(project);
|
||||
try {
|
||||
File metaConfigLog =
|
||||
new File(repo.getDirectory(), "logs/" + RefNames.REFS_CONFIG);
|
||||
if (metaConfigLog.exists()) {
|
||||
continue;
|
||||
}
|
||||
try (Repository repo = repoManager.openRepository(project)) {
|
||||
File metaConfigLog =
|
||||
new File(repo.getDirectory(), "logs/" + RefNames.REFS_CONFIG);
|
||||
if (metaConfigLog.exists()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!metaConfigLog.getParentFile().mkdirs()
|
||||
|| !metaConfigLog.createNewFile()) {
|
||||
throw new IOException(String.format(
|
||||
"Failed to create reflog for %s in repository %s",
|
||||
RefNames.REFS_CONFIG, project));
|
||||
}
|
||||
if (!metaConfigLog.getParentFile().mkdirs()
|
||||
|| !metaConfigLog.createNewFile()) {
|
||||
throw new IOException(String.format(
|
||||
"Failed to create reflog for %s in repository %s",
|
||||
RefNames.REFS_CONFIG, project));
|
||||
}
|
||||
|
||||
ObjectId metaConfigId = repo.resolve(RefNames.REFS_CONFIG);
|
||||
if (metaConfigId != null) {
|
||||
try (PrintWriter writer =
|
||||
new PrintWriter(metaConfigLog, UTF_8.name())) {
|
||||
writer.print(ObjectId.zeroId().name());
|
||||
writer.print(" ");
|
||||
writer.print(metaConfigId.name());
|
||||
writer.print(" ");
|
||||
writer.print(serverUser.toExternalString());
|
||||
writer.print("\t");
|
||||
writer.print("create reflog");
|
||||
writer.println();
|
||||
}
|
||||
ObjectId metaConfigId = repo.resolve(RefNames.REFS_CONFIG);
|
||||
if (metaConfigId != null) {
|
||||
try (PrintWriter writer =
|
||||
new PrintWriter(metaConfigLog, UTF_8.name())) {
|
||||
writer.print(ObjectId.zeroId().name());
|
||||
writer.print(" ");
|
||||
writer.print(metaConfigId.name());
|
||||
writer.print(" ");
|
||||
writer.print(serverUser.toExternalString());
|
||||
writer.print("\t");
|
||||
writer.print("create reflog");
|
||||
writer.println();
|
||||
}
|
||||
} finally {
|
||||
repo.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
ui.message(String.format("ERROR: Failed to create reflog file for the"
|
||||
|
@ -31,11 +31,8 @@ public class Schema_107 extends SchemaVersion {
|
||||
|
||||
@Override
|
||||
protected void migrateData(ReviewDb db, UpdateUI ui) throws SQLException {
|
||||
Statement stmt = ((JdbcSchema) db).getConnection().createStatement();
|
||||
try {
|
||||
try (Statement stmt = ((JdbcSchema) db).getConnection().createStatement()) {
|
||||
stmt.executeUpdate("UPDATE accounts set mute_common_path_prefixes = 'Y'");
|
||||
} finally {
|
||||
stmt.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -73,8 +73,7 @@ class ScriptRunner {
|
||||
}
|
||||
|
||||
private List<String> parse(final InputStream in) throws IOException {
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
|
||||
try {
|
||||
try (BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF-8"))) {
|
||||
String delimiter = ";";
|
||||
List<String> commands = new ArrayList<>();
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
@ -107,8 +106,6 @@ class ScriptRunner {
|
||||
commands.add(buffer.toString());
|
||||
}
|
||||
return commands;
|
||||
} finally {
|
||||
br.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,21 +109,15 @@ public class ToolsCatalog {
|
||||
|
||||
private static byte[] read(String path) {
|
||||
String name = "root/" + path;
|
||||
InputStream in = ToolsCatalog.class.getResourceAsStream(name);
|
||||
if (in == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
try (InputStream in = ToolsCatalog.class.getResourceAsStream(name)) {
|
||||
if (in == null) {
|
||||
return null;
|
||||
}
|
||||
final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
try {
|
||||
final byte[] buf = new byte[8192];
|
||||
int n;
|
||||
while ((n = in.read(buf, 0, buf.length)) > 0) {
|
||||
out.write(buf, 0, n);
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
final byte[] buf = new byte[8192];
|
||||
int n;
|
||||
while ((n = in.read(buf, 0, buf.length)) > 0) {
|
||||
out.write(buf, 0, n);
|
||||
}
|
||||
return out.toByteArray();
|
||||
} catch (Exception e) {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user