diff --git a/src/freenet/client/ArchiveManager.java b/src/freenet/client/ArchiveManager.java index 911c50f3db7..957f94cd6ad 100644 --- a/src/freenet/client/ArchiveManager.java +++ b/src/freenet/client/ArchiveManager.java @@ -30,11 +30,9 @@ import freenet.support.MutableBoolean; import freenet.support.api.Bucket; import freenet.support.api.BucketFactory; -import freenet.support.compress.CompressionOutputSizeException; import freenet.support.compress.Compressor; import freenet.support.compress.Compressor.COMPRESSOR_TYPE; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.SkipShieldingInputStream; import net.contrapunctus.lzma.LzmaInputStream; @@ -263,7 +261,7 @@ public void extractToCache(FreenetURI key, ARCHIVE_TYPE archiveType, COMPRESSOR_ ctx.removeAllCachedItems(this); // flush cache anyway final long expectedSize = ctx.getLastSize(); final long archiveSize = data.size(); - /** Set if we need to throw a RestartedException rather than returning success, + /* Set if we need to throw a RestartedException rather than returning success, * after we have unpacked everything. */ boolean throwAtExit = false; @@ -312,31 +310,21 @@ else if(logMINOR) PipedInputStream pis = new PipedInputStream(); PipedOutputStream pos = new PipedOutputStream(); pis.connect(pos); - final OutputStream os = new BufferedOutputStream(pos); + wrapper = new ExceptionWrapper(); context.mainExecutor.execute(new Runnable() { - @Override public void run() { - InputStream is = null; - try { - Compressor.COMPRESSOR_TYPE.LZMA_NEW.decompress(is = data.getInputStream(), os, data.size(), expectedSize); - } catch (CompressionOutputSizeException e) { - Logger.error(this, "Failed to decompress archive: "+e, e); - wrapper.set(e); + try ( + InputStream is = data.getInputStream(); + OutputStream os = new BufferedOutputStream(pos) + ){ + Compressor.COMPRESSOR_TYPE.LZMA_NEW.decompress(is, os, data.size(), expectedSize); } catch (IOException e) { Logger.error(this, "Failed to decompress archive: "+e, e); wrapper.set(e); - } finally { - try { - os.close(); - } catch (IOException e) { - Logger.error(this, "Failed to close PipedOutputStream: "+e, e); - } - Closer.close(is); } } - }); is = pis; } else if(ctype == COMPRESSOR_TYPE.LZMA) { @@ -347,55 +335,63 @@ public void run() { wrapper = null; } - if(ARCHIVE_TYPE.ZIP == archiveType) { - handleZIPArchive(ctx, key, is, element, callback, gotElement, throwAtExit, context); - } else if(ARCHIVE_TYPE.TAR == archiveType) { - // COMPRESS-449 workaround, see https://freenet.mantishub.io/view.php?id=6921 - handleTARArchive(ctx, key, new SkipShieldingInputStream(is), element, callback, gotElement, throwAtExit, context); - } else { - throw new ArchiveFailureException("Unknown or unsupported archive algorithm " + archiveType); + try (InputStream archiveInputStream = is) { + if (ARCHIVE_TYPE.ZIP == archiveType) { + handleZIPArchive(ctx, key, archiveInputStream, element, callback, gotElement, throwAtExit, context); + } else if (ARCHIVE_TYPE.TAR == archiveType) { + // COMPRESS-449 workaround, see https://freenet.mantishub.io/view.php?id=6921 + handleTARArchive(ctx, key, new SkipShieldingInputStream(archiveInputStream), element, callback, gotElement, throwAtExit, context); + } else { + throw new ArchiveFailureException("Unknown or unsupported archive algorithm " + archiveType); + } } - if(wrapper != null) { + if (wrapper != null) { Exception e = wrapper.get(); - if(e != null) throw new ArchiveFailureException("An exception occured decompressing: "+e.getMessage(), e); + if (e != null) { + throw new ArchiveFailureException("An exception occurred decompressing: " + e.getMessage(), e); + } } } catch (IOException ioe) { - throw new ArchiveFailureException("An IOE occured: "+ioe.getMessage(), ioe); - } finally { - Closer.close(is); - } + throw new ArchiveFailureException("An IOE occurred: "+ioe.getMessage(), ioe); + } } private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, InputStream data, String element, ArchiveExtractCallback callback, MutableBoolean gotElement, boolean throwAtExit, ClientContext context) throws ArchiveFailureException, ArchiveRestartException { - if(logMINOR) Logger.minor(this, "Handling a TAR Archive"); - TarArchiveInputStream tarIS = null; - try { - tarIS = new TarArchiveInputStream(data); - + if(logMINOR) { + Logger.minor(this, "Handling a TAR Archive"); + } + try( + TarArchiveInputStream tarIS = new TarArchiveInputStream(data); + ){ // MINOR: Assumes the first entry in the tarball is a directory. ArchiveEntry entry; byte[] buf = new byte[32768]; - HashSet names = new HashSet(); + Set names = new HashSet<>(); boolean gotMetadata = false; outerTAR: while(true) { try { - entry = tarIS.getNextEntry(); + entry = tarIS.getNextEntry(); } catch (IllegalArgumentException e) { // Annoyingly, it can throw this on some corruptions... throw new ArchiveFailureException("Error reading archive: "+e.getMessage(), e); } - if(entry == null) break; - if(entry.isDirectory()) continue; + if(entry == null) { + break; + } + if(entry.isDirectory()) { + continue; + } String name = stripLeadingSlashes(entry.getName()); if(names.contains(name)) { Logger.error(this, "Duplicate key "+name+" in archive "+key); continue; } long size = entry.getSize(); - if(name.equals(".metadata")) + if(name.equals(".metadata")) { gotMetadata = true; + } if(size > maxArchivedFileSize && !name.equals(element)) { addErrorElement(ctx, key, name, "File too big: "+size+" greater than current archived file size limit "+maxArchivedFileSize, true); } else { @@ -417,9 +413,11 @@ private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, InputStre continue outerTAR; } } - + } finally { - if(out != null) out.close(); + if(out != null) { + out.close(); + } } if(size <= maxArchivedFileSize) { addStoreElement(ctx, key, name, output, gotElement, element, callback, context); @@ -439,15 +437,15 @@ private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, InputStre generateMetadata(ctx, key, names, gotElement, element, callback, context); trimStoredData(); } - if(throwAtExit) throw new ArchiveRestartException("Archive changed on re-fetch"); + if(throwAtExit) { + throw new ArchiveRestartException("Archive changed on re-fetch"); + } - if((!gotElement.value) && element != null) + if((!gotElement.value) && element != null) { callback.notInArchive(context); - + } } catch (IOException e) { throw new ArchiveFailureException("Error reading archive: "+e.getMessage(), e); - } finally { - Closer.close(tarIS); } } @@ -484,7 +482,7 @@ private void handleZIPArchive(ArchiveStoreContext ctx, FreenetURI key, InputStre Bucket output = tempBucketFactory.makeBucket(size); OutputStream out = output.getOutputStream(); try { - + int readBytes; while((readBytes = zis.read(buf)) > 0) { out.write(buf, 0, readBytes); @@ -497,7 +495,7 @@ private void handleZIPArchive(ArchiveStoreContext ctx, FreenetURI key, InputStre continue outerZIP; } } - + } finally { if(out != null) out.close(); } diff --git a/src/freenet/client/Metadata.java b/src/freenet/client/Metadata.java index 2665bf7efdf..6075b53bf0f 100644 --- a/src/freenet/client/Metadata.java +++ b/src/freenet/client/Metadata.java @@ -37,7 +37,6 @@ import freenet.support.api.BucketFactory; import freenet.support.api.RandomAccessBucket; import freenet.support.compress.Compressor.COMPRESSOR_TYPE; -import freenet.support.io.Closer; import freenet.support.io.CountedOutputStream; import freenet.support.io.NullOutputStream; @@ -1228,16 +1227,13 @@ public byte[] writeToByteArray() throws MetadataUnresolvedException { } public long writtenLength() throws MetadataUnresolvedException { - CountedOutputStream cos = new CountedOutputStream(new NullOutputStream()); - DataOutputStream dos = null; - try { - dos = new DataOutputStream(cos); + CountedOutputStream cos = new CountedOutputStream(new NullOutputStream()); + try ( + DataOutputStream dos = new DataOutputStream(cos); + ){ writeTo(dos); } catch (IOException e) { - throw new Error("Could not write to CountedOutputStream: "+e, e); - } finally { - Closer.close(dos); - Closer.close(cos); + throw new RuntimeException("Could not write to CountedOutputStream: "+e, e); } return cos.written(); } @@ -1687,20 +1683,17 @@ public void resolve(String name) { public RandomAccessBucket toBucket(BucketFactory bf) throws MetadataUnresolvedException, IOException { RandomAccessBucket b = bf.makeBucket(-1); - DataOutputStream dos = null; boolean success = false; - try { - dos = new DataOutputStream(b.getOutputStream()); + try (DataOutputStream dos = new DataOutputStream(b.getOutputStream())) { writeTo(dos); - dos.close(); - dos = null; - b.setReadOnly(); // Must be after dos.close() success = true; - return b; } finally { - Closer.close(dos); - if(!success) b.free(); + if(!success) { + b.free(); + } } + b.setReadOnly(); + return b; } public boolean isResolved() { diff --git a/src/freenet/client/async/ClientGetWorkerThread.java b/src/freenet/client/async/ClientGetWorkerThread.java index e2156bab2ed..61a32a06aa0 100644 --- a/src/freenet/client/async/ClientGetWorkerThread.java +++ b/src/freenet/client/async/ClientGetWorkerThread.java @@ -4,11 +4,7 @@ package freenet.client.async; -import java.io.BufferedInputStream; -import java.io.EOFException; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; +import java.io.*; import java.net.URI; import java.net.URISyntaxException; @@ -26,7 +22,6 @@ import freenet.keys.FreenetURI; import freenet.support.Logger; import freenet.support.compress.CompressionOutputSizeException; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /**A thread which does postprocessing of decompressed data, in particular, @@ -35,7 +30,7 @@ * the relevant constructor arguments.*/ public class ClientGetWorkerThread extends Thread { - private InputStream input; + private final InputStream input; final private String schemeHostAndPort; final private URI uri; final private HashResult[] hashes; @@ -48,7 +43,7 @@ public class ClientGetWorkerThread extends Thread { private final LinkFilterExceptionProvider linkFilterExceptionProvider; final private String mimeType; - private OutputStream output; + private final OutputStream output; private boolean finished = false; private Throwable error = null; private ClientMetadata clientMetadata = null; @@ -96,14 +91,21 @@ public ClientGetWorkerThread(InputStream input, OutputStream output, FreenetURI String mimeType, String schemeHostAndPort, HashResult[] hashes, boolean filterData, String charset, FoundURICallback prefetchHook, TagReplacerCallback tagReplacer, LinkFilterExceptionProvider linkFilterExceptionProvider) throws URISyntaxException { super("ClientGetWorkerThread-"+counter()); + if (input == null) { + throw new IllegalArgumentException("Input stream is missing"); + } + if (output == null) { + throw new IllegalArgumentException("Output stream is missing"); + } this.input = input; + this.output = output; if(uri != null) this.uri = uri.toURI("/"); else this.uri = null; if(mimeType != null && mimeType.compareTo("application/xhtml+xml") == 0) mimeType = "text/html"; this.mimeType = mimeType; this.schemeHostAndPort = schemeHostAndPort; this.hashes = hashes; - this.output = output; + this.filterData = filterData; this.charset = charset; this.prefetchHook = prefetchHook; @@ -114,21 +116,30 @@ public ClientGetWorkerThread(InputStream input, OutputStream output, FreenetURI @Override public void run() { - if(logMINOR) Logger.minor(this, "Starting worker thread for "+uri+" mime type "+mimeType+" filter data = "+filterData+" charset "+charset); - try { + if (logMINOR) { + Logger.minor(this, "Starting worker thread for "+uri+" mime type "+mimeType+" filter data = "+filterData+" charset "+charset); + } + try ( + OutputStream outputStream = this.output; + InputStream is = this.input + ){ //Validate the hash of the now decompressed data - input = new BufferedInputStream(input); + InputStream inputStream = new BufferedInputStream(is); MultiHashInputStream hashStream = null; if(hashes != null) { - hashStream = new MultiHashInputStream(input, HashResult.makeBitmask(hashes)); - input = hashStream; + hashStream = new MultiHashInputStream(inputStream, HashResult.makeBitmask(hashes)); + inputStream = hashStream; } //Filter the data, if we are supposed to if(filterData){ - if(logMINOR) Logger.minor(this, "Running content filter... Prefetch hook: "+prefetchHook+" tagReplacer: "+tagReplacer); - if(mimeType == null || uri == null || input == null || output == null) throw new IOException("Insufficient arguements to worker thread"); + if(logMINOR) { + Logger.minor(this, "Running content filter... Prefetch hook: "+prefetchHook+" tagReplacer: "+tagReplacer); + } + if(mimeType == null || uri == null) { + throw new IOException("Insufficient arguements to worker thread"); + } // Send XHTML as HTML because we can't use web-pushing on XHTML. - FilterStatus filterStatus = ContentFilter.filter(input, output, mimeType, uri, + FilterStatus filterStatus = ContentFilter.filter(inputStream, outputStream, mimeType, uri, schemeHostAndPort, prefetchHook, tagReplacer, charset, linkFilterExceptionProvider); String detectedMIMEType = filterStatus.mimeType.concat(filterStatus.charset == null ? "" : "; charset="+filterStatus.charset); @@ -138,7 +149,7 @@ public void run() { } else { if(logMINOR) Logger.minor(this, "Ignoring content filter. The final result has not been written. Writing now."); - FileUtil.copy(input, output, -1); + FileUtil.copy(inputStream, outputStream, -1); } // Dump the rest. try { @@ -147,14 +158,14 @@ public void run() { // Note this is only necessary because we might have an AEADInputStream? // FIXME get rid - they should check the end anyway? byte[] buf = new byte[4096]; - int r = input.read(buf); + int r = inputStream.read(buf); if(r < 0) break; } } catch (EOFException e) { // Okay. } - input.close(); - output.close(); + inputStream.close(); + outputStream.close(); if(hashes != null) { HashResult[] results = hashStream.getResults(); if(!HashResult.strictEquals(results, hashes)) { @@ -170,9 +181,6 @@ public void run() { else if(logMINOR) Logger.minor(this, "Exception caught while processing fetch: "+t,t); setError(t); - } finally { - Closer.close(input); - Closer.close(output); } } diff --git a/src/freenet/client/async/ClientGetter.java b/src/freenet/client/async/ClientGetter.java index dcee4fde0e6..b63a3b09897 100644 --- a/src/freenet/client/async/ClientGetter.java +++ b/src/freenet/client/async/ClientGetter.java @@ -3,18 +3,7 @@ * http://www.gnu.org/ for further details of the GPL. */ package freenet.client.async; -import java.io.BufferedInputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; -import java.io.RandomAccessFile; -import java.io.Serializable; +import java.io.*; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.util.List; @@ -47,7 +36,6 @@ import freenet.support.compress.CompressionOutputSizeException; import freenet.support.compress.Compressor; import freenet.support.compress.DecompressorThreadManager; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; import freenet.support.io.FileUtil; import freenet.support.io.InsufficientDiskSpaceException; @@ -299,10 +287,6 @@ public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMeta // nested locking resulting in deadlocks, it also prevents long locks due to // doing massive encrypted I/Os while holding a lock. - PipedOutputStream dataOutput = new PipedOutputStream(); - PipedInputStream dataInput = new PipedInputStream(); - OutputStream output = null; - DecompressorThreadManager decompressorManager = null; ClientGetWorkerThread worker = null; Bucket finalResult = null; @@ -321,42 +305,54 @@ public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMeta maxLen = Math.max(ctx.maxTempLength, ctx.maxOutputLength); } + + FetchException ex = null; // set on failure - try { + try ( + PipedInputStream pis = new PipedInputStream(); + PipedOutputStream dataOutput = new PipedOutputStream() + ) { if(returnBucket == null) finalResult = context.getBucketFactory(persistent()).makeBucket(maxLen); else finalResult = returnBucket; if(logMINOR) Logger.minor(this, "Writing final data to "+finalResult+" return bucket is "+returnBucket); - dataOutput .connect(dataInput); + dataOutput.connect(pis); result = new FetchResult(clientMetadata, finalResult); + PipedInputStream dataInput = pis; // Decompress if(decompressors != null) { if(logMINOR) Logger.minor(this, "Decompressing..."); - decompressorManager = new DecompressorThreadManager(dataInput, decompressors, maxLen); + decompressorManager = new DecompressorThreadManager(pis, decompressors, maxLen); dataInput = decompressorManager.execute(); } - output = finalResult.getOutputStream(); - if(ctx.overrideMIME != null) mimeType = ctx.overrideMIME; - worker = new ClientGetWorkerThread(new BufferedInputStream(dataInput), output, uri, mimeType, ctx.getSchemeHostAndPort(), hashes, ctx.filterData, ctx.charset, ctx.prefetchHook, ctx.tagReplacer, context.linkFilterExceptionProvider); - worker.start(); - try { - streamGenerator.writeTo(dataOutput, context); - } catch(IOException e) { - //Check if the worker thread caught an exception - worker.getError(); - //If not, throw the original error - throw e; + if (ctx.overrideMIME != null) { + mimeType = ctx.overrideMIME; } + try ( + BufferedInputStream bufferedDataInput = new BufferedInputStream(dataInput); + OutputStream output = finalResult.getOutputStream() + ) { + worker = new ClientGetWorkerThread(bufferedDataInput, output, uri, mimeType, ctx.getSchemeHostAndPort(), hashes, ctx.filterData, ctx.charset, ctx.prefetchHook, ctx.tagReplacer, context.linkFilterExceptionProvider); + worker.start(); + try { + streamGenerator.writeTo(dataOutput, context); + } catch (IOException e) { + //Check if the worker thread caught an exception + worker.getError(); + //If not, throw the original error + throw e; + } - // An error will propagate backwards, so wait for the worker first. + // An error will propagate backwards, so wait for the worker first. - if(logMINOR) Logger.minor(this, "Waiting for hashing, filtration, and writing to finish"); - worker.waitFinished(); + if (logMINOR) Logger.minor(this, "Waiting for hashing, filtration, and writing to finish"); + worker.waitFinished(); - if(decompressorManager != null) { - if(logMINOR) Logger.minor(this, "Waiting for decompression to finalize"); - decompressorManager.waitFinished(); + if (decompressorManager != null) { + if (logMINOR) Logger.minor(this, "Waiting for decompression to finalize"); + decompressorManager.waitFinished(); + } } if(worker.getClientMetadata() != null) { @@ -391,11 +387,8 @@ public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMeta } catch(Throwable t) { Logger.error(this, "Caught "+t, t); ex = new FetchException(FetchExceptionMode.INTERNAL_ERROR, t); - } finally { - Closer.close(dataInput); - Closer.close(dataOutput); - Closer.close(output); } + if(ex != null) { onFailure(ex, state, context, true); if(finalResult != null && finalResult != returnBucket) { diff --git a/src/freenet/client/async/ContainerInserter.java b/src/freenet/client/async/ContainerInserter.java index f7f7a31e33c..4fbcfc8f2d0 100644 --- a/src/freenet/client/async/ContainerInserter.java +++ b/src/freenet/client/async/ContainerInserter.java @@ -34,7 +34,6 @@ import freenet.support.api.ManifestElement; import freenet.support.api.RandomAccessBucket; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.ResumeFailedException; /** @@ -162,30 +161,36 @@ public void schedule(ClientContext context) throws InsertException { private void start(ClientContext context) { - if(logDEBUG) Logger.debug(this, "Atempt to start a container inserter", new Exception("debug")); - + if (logDEBUG) { + Logger.debug(this, "Atempt to start a container inserter", new Exception("debug")); + } + makeMetadata(context); - - synchronized(this) { - if(finished) return; + + synchronized (this) { + if (finished) { + return; + } } - + InsertBlock block; - OutputStream os = null; try { - RandomAccessBucket outputBucket = context.getBucketFactory(persistent).makeBucket(-1); - os = new BufferedOutputStream(outputBucket.getOutputStream()); - String mimeType = (archiveType == ARCHIVE_TYPE.TAR ? - createTarBucket(os) : - createZipBucket(os)); - os = null; // create*Bucket closes os - if(logMINOR) - Logger.minor(this, "Archive size is "+outputBucket.size()); - - if(logMINOR) Logger.minor(this, "We are using "+archiveType); - + RandomAccessBucket outputBucket = context.getBucketFactory(persistent).makeBucket(-1); + String mimeType; + try (OutputStream os = new BufferedOutputStream(outputBucket.getOutputStream())) { + if (archiveType == ARCHIVE_TYPE.TAR) { + mimeType = createTarBucket(os); + } else { + mimeType = createZipBucket(os); + } + } + if (logMINOR) { + Logger.minor(this, "Archive size is " + outputBucket.size()); + Logger.minor(this, "We are using " + archiveType); + } + // Now we have to insert the Archive we have generated. - + // Can we just insert it, and not bother with a redirect to it? // Thereby exploiting implicit manifest support, which will pick up on .metadata?? // We ought to be able to !! @@ -193,31 +198,29 @@ private void start(ClientContext context) { } catch (IOException e) { fail(new InsertException(InsertExceptionMode.BUCKET_ERROR, e, null), context); return; - } finally { - Closer.close(os); } - + boolean dc = dontCompress; if (!dontCompress) { dc = (archiveType == ARCHIVE_TYPE.ZIP); } - + // Treat it as a splitfile for purposes of determining reinsert count. SingleFileInserter sfi = new SingleFileInserter(parent, cb, block, false, ctx, realTimeFlag, dc, reportMetadataOnly, token, archiveType, true, null, true, persistent, 0, 0, null, cryptoAlgorithm, forceCryptoKey, -1); - if(logMINOR) - Logger.minor(this, "Inserting container: "+sfi+" for "+this); + if (logMINOR) { + Logger.minor(this, "Inserting container: " + sfi + " for " + this); + } cb.onTransition(this, sfi, context); try { sfi.schedule(context); } catch (InsertException e) { fail(new InsertException(InsertExceptionMode.BUCKET_ERROR, e, null), context); - return; } } private void makeMetadata(ClientContext context) { - Bucket bucket = null; + Bucket bucket; int x = 0; Metadata md = makeManifest(origMetadata, ""); @@ -281,9 +284,8 @@ public int hashCode() { */ private String createTarBucket(OutputStream os) throws IOException { if(logMINOR) Logger.minor(this, "Create a TAR Bucket"); - - TarArchiveOutputStream tarOS = new TarArchiveOutputStream(os); - try { + + try (TarArchiveOutputStream tarOS = new TarArchiveOutputStream(os)) { tarOS.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); TarArchiveEntry ze; @@ -298,8 +300,6 @@ private String createTarBucket(OutputStream os) throws IOException { BucketTools.copyTo(ph.data, tarOS, size); tarOS.closeArchiveEntry(); } - } finally { - tarOS.close(); } return ARCHIVE_TYPE.TAR.mimeTypes[0]; @@ -307,9 +307,8 @@ private String createTarBucket(OutputStream os) throws IOException { private String createZipBucket(OutputStream os) throws IOException { if(logMINOR) Logger.minor(this, "Create a ZIP Bucket"); - - ZipOutputStream zos = new ZipOutputStream(os); - try { + + try (ZipOutputStream zos = new ZipOutputStream(os)) { ZipEntry ze; for (ContainerElement ph : containerItems) { @@ -319,8 +318,6 @@ private String createZipBucket(OutputStream os) throws IOException { BucketTools.copyTo(ph.data, zos, ph.data.size()); zos.closeEntry(); } - } finally { - zos.close(); } return ARCHIVE_TYPE.ZIP.mimeTypes[0]; diff --git a/src/freenet/client/async/InsertCompressor.java b/src/freenet/client/async/InsertCompressor.java index 9059341a3e1..0827aa02d9c 100644 --- a/src/freenet/client/async/InsertCompressor.java +++ b/src/freenet/client/async/InsertCompressor.java @@ -22,7 +22,6 @@ import freenet.support.compress.CompressionRatioException; import freenet.support.compress.Compressor.COMPRESSOR_TYPE; import freenet.support.compress.InvalidCompressionCodecException; -import freenet.support.io.Closer; import freenet.support.io.NativeThread; /** @@ -134,41 +133,48 @@ public boolean run(ClientContext context) { } } - InputStream is = null; - OutputStream os = null; - MultiHashInputStream hasher = null; - try { - is = origData.getInputStream(); - result = bucketFactory.makeBucket(-1); - os = result.getOutputStream(); - if(first && generateHashes != 0) { - if(logMINOR) Logger.minor(this, "Generating hashes: "+generateHashes); - is = hasher = new MultiHashInputStream(is, generateHashes); + try (InputStream is = origData.getInputStream()){ + InputStream multiHashInputStream = is; + MultiHashInputStream hasher = null; + if (first && generateHashes != 0) { + if (logMINOR) { + Logger.minor(this, "Generating hashes: "+generateHashes); + } + multiHashInputStream = hasher = new MultiHashInputStream(is, generateHashes); } - try { - comp.compress(is, os, origSize, bestCompressedDataSize, - amountOfDataToCheckCompressionRatio, minimumCompressionPercentage); - } catch (CompressionOutputSizeException | CompressionRatioException e) { - if(hasher != null) { - is.skip(Long.MAX_VALUE); + result = bucketFactory.makeBucket(-1); + try ( + InputStream mhis = multiHashInputStream; + OutputStream os = result.getOutputStream() + ){ + try { + comp.compress( + mhis, + os, + origSize, + bestCompressedDataSize, + amountOfDataToCheckCompressionRatio, + minimumCompressionPercentage + ); + } catch (CompressionOutputSizeException | CompressionRatioException e) { + if (hasher != null) { + mhis.skip(Long.MAX_VALUE); + hashes = hasher.getResults(); + first = false; + } + continue; // try next compressor type + } catch (RuntimeException e) { + // ArithmeticException has been seen in bzip2 codec. + Logger.error(this, "Compression failed with codec " + comp + " : " + e, e); + // Try the next one + // RuntimeException is iffy, so lets not try the hasher. + continue; + } + if (hasher != null) { hashes = hasher.getResults(); first = false; } - continue; // try next compressor type - } catch (RuntimeException e) { - // ArithmeticException has been seen in bzip2 codec. - Logger.error(this, "Compression failed with codec "+comp+" : "+e, e); - // Try the next one - // RuntimeException is iffy, so lets not try the hasher. - continue; - } - if(hasher != null) { - hashes = hasher.getResults(); - first = false; } - } finally { - Closer.close(is); - Closer.close(os); } long resultSize = result.size(); long resultNumberOfBlocks = resultSize/CHKBlock.DATA_LENGTH; diff --git a/src/freenet/client/async/SingleFileFetcher.java b/src/freenet/client/async/SingleFileFetcher.java index 3bea647a5de..4df09b31b9e 100644 --- a/src/freenet/client/async/SingleFileFetcher.java +++ b/src/freenet/client/async/SingleFileFetcher.java @@ -48,7 +48,6 @@ import freenet.support.compress.DecompressorThreadManager; import freenet.support.compress.Compressor.COMPRESSOR_TYPE; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.InsufficientDiskSpaceException; /** @@ -898,38 +897,41 @@ class ArchiveFetcherCallback implements GetCompletionCallback, Serializable { @Override public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMetadata, List decompressors, ClientGetState state, ClientContext context) { - OutputStream output = null; - PipedInputStream pipeIn = new PipedInputStream(); - PipedOutputStream pipeOut = new PipedOutputStream(); - Bucket data = null; + + Bucket data; // FIXME not strictly correct and unnecessary - archive size already checked against ctx.max*Length inside SingleFileFetcher long maxLen = Math.min(ctx.maxTempLength, ctx.maxOutputLength); try { data = context.getBucketFactory(persistent).makeBucket(maxLen); - output = data.getOutputStream(); - if(decompressors != null) { - if(logMINOR) Logger.minor(this, "decompressing..."); - pipeOut.connect(pipeIn); - DecompressorThreadManager decompressorManager = new DecompressorThreadManager(pipeIn, decompressors, maxLen); - pipeIn = decompressorManager.execute(); - ClientGetWorkerThread worker = new ClientGetWorkerThread(new BufferedInputStream(pipeIn), output, null, null , ctx.getSchemeHostAndPort(), null, false, null, null, null, context.linkFilterExceptionProvider); - worker.start(); - streamGenerator.writeTo(pipeOut, context); - decompressorManager.waitFinished(); - worker.waitFinished(); - } else streamGenerator.writeTo(output, context); - // We want to see anything thrown when these are closed. - output.close(); output = null; - pipeOut.close(); pipeOut = null; - pipeIn.close(); pipeIn = null; + try (OutputStream output = data.getOutputStream()) { + if (decompressors != null) { + if (logMINOR) { + Logger.minor(this, "decompressing..."); + } + try ( + PipedInputStream pipeIn = new PipedInputStream(); + PipedOutputStream pipeOut = new PipedOutputStream() + ) { + pipeOut.connect(pipeIn); + DecompressorThreadManager decompressorManager = new DecompressorThreadManager(pipeIn, decompressors, maxLen); + try ( + InputStream pipeInNext = new BufferedInputStream(decompressorManager.execute()) + ) { + ClientGetWorkerThread worker = new ClientGetWorkerThread(pipeInNext, output, null, null, ctx.getSchemeHostAndPort(), null, false, null, null, null, context.linkFilterExceptionProvider); + worker.start(); + streamGenerator.writeTo(pipeOut, context); + decompressorManager.waitFinished(); + worker.waitFinished(); + } + } + } else { + streamGenerator.writeTo(output, context); + } + } } catch (Throwable t) { Logger.error(this, "Caught "+t, t); onFailure(new FetchException(FetchExceptionMode.INTERNAL_ERROR, t), state, context); return; - } finally { - Closer.close(pipeOut); - Closer.close(pipeIn); - Closer.close(output); } if(key instanceof ClientSSK) { // Fetching the container is essentially a full success, we should update the latest known good. @@ -944,14 +946,19 @@ public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMeta private void innerSuccess(Bucket data, ClientContext context) { try { if(hashes != null) { - InputStream is = null; try { - is = data.getInputStream(); - MultiHashInputStream hasher = new MultiHashInputStream(is, HashResult.makeBitmask(hashes)); - byte[] buf = new byte[32768]; - while(hasher.read(buf) > 0); - hasher.close(); - is = null; + MultiHashInputStream hasher; + try ( + InputStream is = data.getInputStream(); + MultiHashInputStream hasherAutoCloseable = new MultiHashInputStream(is, HashResult.makeBitmask(hashes)) + ) { + hasher = hasherAutoCloseable; + byte[] buf = new byte[32768]; + while(hasherAutoCloseable.read(buf) > 0) { + // NOP + } + } + HashResult[] results = hasher.getResults(); if(!HashResult.strictEquals(results, hashes)) { onFailure(new FetchException(FetchExceptionMode.CONTENT_HASH_FAILED), SingleFileFetcher.this, context); @@ -962,8 +969,6 @@ private void innerSuccess(Bucket data, ClientContext context) { } catch (IOException e) { onFailure(new FetchException(FetchExceptionMode.BUCKET_ERROR, e), SingleFileFetcher.this, context); return; - } finally { - Closer.close(is); } } ah.extractToCache(data, actx, element, callback, context.archiveManager, context); @@ -1045,10 +1050,7 @@ class MultiLevelMetadataCallback implements GetCompletionCallback, Serializable @Override public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMetadata, List decompressors, ClientGetState state, ClientContext context) { - OutputStream output = null; - PipedInputStream pipeIn = new PipedInputStream(); - PipedOutputStream pipeOut = new PipedOutputStream(); - Bucket finalData = null; + Bucket finalData; // does matter only on pre-1255 keys (1255 keys have top block sizes) // FIXME would save at most few tics on decompression // and block allocation; @@ -1056,31 +1058,31 @@ public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMeta long maxLen = Math.min(ctx.maxTempLength, ctx.maxOutputLength); try { finalData = context.getBucketFactory(persistent).makeBucket(maxLen); - output = finalData.getOutputStream(); - if(decompressors != null) { - if(logMINOR) Logger.minor(this, "decompressing..."); - pipeIn.connect(pipeOut); - DecompressorThreadManager decompressorManager = new DecompressorThreadManager(pipeIn, decompressors, maxLen); - pipeIn = decompressorManager.execute(); - ClientGetWorkerThread worker = new ClientGetWorkerThread(new BufferedInputStream(pipeIn), output, null, null, ctx.getSchemeHostAndPort(), null, false, null, null, null, context.linkFilterExceptionProvider); - worker.start(); - streamGenerator.writeTo(pipeOut, context); - decompressorManager.waitFinished(); - worker.waitFinished(); - // ClientGetWorkerThread will close output. - } else { - streamGenerator.writeTo(output, context); - output.close(); + try (OutputStream output = finalData.getOutputStream()) { + if (decompressors != null) { + if (logMINOR) Logger.minor(this, "decompressing..."); + try ( + PipedInputStream pipeIn = new PipedInputStream(); + PipedOutputStream pipeOut = new PipedOutputStream(); + ) { + pipeIn.connect(pipeOut); + DecompressorThreadManager decompressorManager = new DecompressorThreadManager(pipeIn, decompressors, maxLen); + try (InputStream pipeInNext = new BufferedInputStream(decompressorManager.execute())) { + ClientGetWorkerThread worker = new ClientGetWorkerThread(pipeInNext, output, null, null, ctx.getSchemeHostAndPort(), null, false, null, null, null, context.linkFilterExceptionProvider); + worker.start(); + streamGenerator.writeTo(pipeOut, context); + decompressorManager.waitFinished(); + worker.waitFinished(); + } + } + } else { + streamGenerator.writeTo(output, context); + } } - } catch (Throwable t) { Logger.error(this, "Caught "+t, t); onFailure(new FetchException(FetchExceptionMode.INTERNAL_ERROR, t), state, context); return; - } finally { - Closer.close(pipeOut); - Closer.close(pipeIn); - Closer.close(output); } try { diff --git a/src/freenet/client/async/SingleFileStreamGenerator.java b/src/freenet/client/async/SingleFileStreamGenerator.java index 925aedd518a..6c46928a627 100644 --- a/src/freenet/client/async/SingleFileStreamGenerator.java +++ b/src/freenet/client/async/SingleFileStreamGenerator.java @@ -13,7 +13,6 @@ import freenet.support.Logger; import freenet.support.Logger.LogLevel; import freenet.support.api.Bucket; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /**Writes a Bucket to an output stream.*/ @@ -39,20 +38,19 @@ public void shouldUpdate(){ @Override public void writeTo(OutputStream os, ClientContext context) throws IOException { - try{ - if(logMINOR) Logger.minor(this, "Generating Stream", new Exception("debug")); - InputStream data = bucket.getInputStream(); - try { - FileUtil.copy(data, os, -1); - } finally { - data.close(); + try (Bucket b = this.bucket) { + if(logMINOR) { + Logger.minor(this, "Generating Stream"); + } + try ( + OutputStream out = os; + InputStream data = b.getInputStream() + ) { + FileUtil.copy(data, out, -1); + } + if(logMINOR) { + Logger.minor(this, "Stream completely generated"); } - os.close(); - bucket.free(); - if(logMINOR) Logger.minor(this, "Stream completely generated", new Exception("debug")); - } finally { - Closer.close(bucket); - Closer.close(os); } } diff --git a/src/freenet/client/async/USKFetcher.java b/src/freenet/client/async/USKFetcher.java index 3c9b48b2066..803a7d84e2c 100644 --- a/src/freenet/client/async/USKFetcher.java +++ b/src/freenet/client/async/USKFetcher.java @@ -6,12 +6,7 @@ import static java.util.concurrent.TimeUnit.HOURS; import static java.util.concurrent.TimeUnit.MINUTES; -import java.io.BufferedInputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; -import java.io.UnsupportedEncodingException; +import java.io.*; import java.lang.ref.WeakReference; import java.net.MalformedURLException; import java.util.ArrayList; @@ -57,7 +52,6 @@ import freenet.support.compress.Compressor; import freenet.support.compress.DecompressorThreadManager; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; /** * @@ -209,43 +203,46 @@ class DBRAttempt implements GetCompletionCallback { if(logMINOR) Logger.minor(this, "Created "+this+" with "+fetcher); } @Override - public void onSuccess(StreamGenerator streamGenerator, - ClientMetadata clientMetadata, - List decompressors, ClientGetState state, - ClientContext context) { - OutputStream output = null; - PipedInputStream pipeIn = new PipedInputStream(); - PipedOutputStream pipeOut = new PipedOutputStream(); - Bucket data = null; + public void onSuccess( + StreamGenerator streamGenerator, + ClientMetadata clientMetadata, + List decompressors, + ClientGetState state, + ClientContext context + ) { long maxLen = Math.max(ctx.maxTempLength, ctx.maxOutputLength); - try { - data = context.getBucketFactory(false).makeBucket(maxLen); - output = data.getOutputStream(); - if(decompressors != null) { - if(logMINOR) Logger.minor(this, "decompressing..."); - pipeOut.connect(pipeIn); - DecompressorThreadManager decompressorManager = new DecompressorThreadManager(pipeIn, decompressors, maxLen); - pipeIn = decompressorManager.execute(); - ClientGetWorkerThread worker = new ClientGetWorkerThread(new BufferedInputStream(pipeIn), output, null, null, ctx.getSchemeHostAndPort(), null, false, null, null, null, context.linkFilterExceptionProvider); - worker.start(); - streamGenerator.writeTo(pipeOut, context); - decompressorManager.waitFinished(); - worker.waitFinished(); - } else streamGenerator.writeTo(output, context); - - output.close(); - pipeOut.close(); - pipeIn.close(); - output = null; - pipeOut = null; - pipeIn = null; + try ( + Bucket data = context.getBucketFactory(false).makeBucket(maxLen) + ) { + try (OutputStream output = data.getOutputStream()) { + if (decompressors != null) { + if (logMINOR) { + Logger.minor(this, "decompressing..."); + } + try ( + PipedInputStream pipeIn = new PipedInputStream(); + PipedOutputStream pipeOut = new PipedOutputStream() + ) { + pipeOut.connect(pipeIn); + DecompressorThreadManager decompressorManager = new DecompressorThreadManager(pipeIn, decompressors, maxLen); + try (InputStream pipeInNext = new BufferedInputStream(decompressorManager.execute())) { + ClientGetWorkerThread worker = new ClientGetWorkerThread(new BufferedInputStream(pipeInNext), output, null, null, ctx.getSchemeHostAndPort(), null, false, null, null, null, context.linkFilterExceptionProvider); + worker.start(); + streamGenerator.writeTo(pipeOut, context); + decompressorManager.waitFinished(); + worker.waitFinished(); + } + } + } else { + streamGenerator.writeTo(output, context); + } + } // Run directly - we are running on some thread somewhere, don't worry about it. innerSuccess(data, context); } catch (Throwable t) { Logger.error(this, "Caught "+t, t); onFailure(new FetchException(FetchExceptionMode.INTERNAL_ERROR, t), state, context); - return; } finally { boolean dbrsFinished; synchronized(USKFetcher.this) { @@ -253,12 +250,9 @@ public void onSuccess(StreamGenerator streamGenerator, if(logMINOR) Logger.minor(this, "Remaining DBR attempts: "+dbrAttempts); dbrsFinished = dbrAttempts.isEmpty(); } - Closer.close(pipeOut); - Closer.close(pipeIn); - Closer.close(output); - if(dbrsFinished) + if (dbrsFinished) { onDBRsFinished(context); - Closer.close(data); + } } } private void innerSuccess(Bucket bucket, diff --git a/src/freenet/client/async/USKRetriever.java b/src/freenet/client/async/USKRetriever.java index ef19205bb3b..f87ffaee3c4 100644 --- a/src/freenet/client/async/USKRetriever.java +++ b/src/freenet/client/async/USKRetriever.java @@ -28,7 +28,6 @@ import freenet.support.api.Bucket; import freenet.support.compress.Compressor; import freenet.support.compress.DecompressorThreadManager; -import freenet.support.io.Closer; import freenet.support.io.InsufficientDiskSpaceException; import freenet.support.Logger.LogLevel; import freenet.support.io.NativeThread; @@ -101,11 +100,11 @@ public void onFoundEdition(long l, USK key, ClientContext context, boolean metad @Override public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMetadata, List decompressors, final ClientGetState state, ClientContext context) { - if(logMINOR) + if(logMINOR) { Logger.minor(this, "Success on "+this+" from "+state+" : length "+streamGenerator.size()+"mime type "+clientMetadata.getMIMEType()); - DecompressorThreadManager decompressorManager = null; - OutputStream output = null; - Bucket finalResult = null; + } + + Bucket finalResult; long maxLen = Math.max(ctx.maxTempLength, ctx.maxOutputLength); try { finalResult = context.getBucketFactory(persistent()).makeBucket(maxLen); @@ -122,27 +121,26 @@ public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMeta return; } - PipedInputStream pipeIn = null; - PipedOutputStream pipeOut = null; - try { - output = finalResult.getOutputStream(); + try ( + OutputStream output = finalResult.getOutputStream(); + ) { // Decompress if(decompressors != null) { - if(logMINOR) Logger.minor(this, "Decompressing..."); - pipeIn = new PipedInputStream(); - pipeOut = new PipedOutputStream(pipeIn); - decompressorManager = new DecompressorThreadManager(pipeIn, decompressors, maxLen); - pipeIn = decompressorManager.execute(); - ClientGetWorkerThread worker = new ClientGetWorkerThread(new BufferedInputStream(pipeIn), output, null, null, ctx.getSchemeHostAndPort(), null, false, null, null, null, context.linkFilterExceptionProvider); - worker.start(); - streamGenerator.writeTo(pipeOut, context); - worker.waitFinished(); - // If this throws, we want the whole request to fail. - pipeOut.close(); pipeOut = null; + if(logMINOR) { + Logger.minor(this, "Decompressing..."); + } + DecompressorThreadManager decompressorManager = new DecompressorThreadManager(new PipedInputStream(), decompressors, maxLen); + try ( + PipedInputStream pipeIn = decompressorManager.execute(); + PipedOutputStream pipeOut = new PipedOutputStream(pipeIn) + ) { + ClientGetWorkerThread worker = new ClientGetWorkerThread(new BufferedInputStream(pipeIn), output, null, null, ctx.getSchemeHostAndPort(), null, false, null, null, null, context.linkFilterExceptionProvider); + worker.start(); + streamGenerator.writeTo(pipeOut, context); + worker.waitFinished(); + } } else { - streamGenerator.writeTo(output, context); - // If this throws, we want the whole request to fail. - output.close(); output = null; + streamGenerator.writeTo(output, context); } } catch(IOException e) { Logger.error(this, "Caught "+e, e); @@ -151,9 +149,6 @@ public void onSuccess(StreamGenerator streamGenerator, ClientMetadata clientMeta Logger.error(this, "Caught "+t, t); onFailure(new FetchException(FetchExceptionMode.INTERNAL_ERROR, t), state, context); return; - } finally { - Closer.close(output); - Closer.close(pipeOut); } final FetchResult result = new FetchResult(clientMetadata, finalResult); diff --git a/src/freenet/client/filter/CSSReadFilter.java b/src/freenet/client/filter/CSSReadFilter.java index 6abd9c449c0..174c0c575dc 100644 --- a/src/freenet/client/filter/CSSReadFilter.java +++ b/src/freenet/client/filter/CSSReadFilter.java @@ -20,7 +20,6 @@ import freenet.support.LogThresholdCallback; import freenet.support.Logger; import freenet.support.Logger.LogLevel; -import freenet.support.io.Closer; import freenet.support.io.NullWriter; public class CSSReadFilter implements ContentDataFilter, CharsetExtractor { @@ -69,34 +68,29 @@ public void readFilter( } @Override - public String getCharset(byte [] input, int length, String charset) throws DataFilterException, IOException { - if(logDEBUG) + public String getCharset(byte [] input, int length, String charset) throws IOException { + if(logDEBUG) { Logger.debug(this, "Fetching charset for CSS with initial charset "+charset); + } if(input.length > getCharsetBufferSize() && logMINOR) { Logger.minor(this, "More data than was strictly needed was passed to the charset extractor for extraction"); } - InputStream strm = new ByteArrayInputStream(input, 0, length); - NullWriter w = new NullWriter(); - InputStreamReader isr; - BufferedReader r = null; - try { - try { - isr = new InputStreamReader(strm, charset); - r = new BufferedReader(isr, 32768); + try ( + InputStream strm = new ByteArrayInputStream(input, 0, length) + ) { + CSSParser parser; + try ( + InputStreamReader isr = new InputStreamReader(strm, charset); + BufferedReader r = new BufferedReader(isr, 32768); + NullWriter w = new NullWriter(); + ){ + parser = new CSSParser(r, w, false, new NullFilterCallback(), null, true, false); + parser.parse(); } catch(UnsupportedEncodingException e) { throw UnknownCharsetException.create(e, charset); } - CSSParser parser = new CSSParser(r, w, false, new NullFilterCallback(), null, true, false); - parser.parse(); - r.close(); - r = null; return parser.detectedCharset(); } - finally { - Closer.close(strm); - Closer.close(r); - Closer.close(w); - } } // CSS 2.1 section 4.4. diff --git a/src/freenet/client/filter/CSSTokenizerFilter.java b/src/freenet/client/filter/CSSTokenizerFilter.java index 9beec171ff8..9a0556470b2 100644 --- a/src/freenet/client/filter/CSSTokenizerFilter.java +++ b/src/freenet/client/filter/CSSTokenizerFilter.java @@ -20,7 +20,6 @@ import freenet.support.Fields; import freenet.support.Logger; import freenet.support.api.Bucket; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; import java.io.File; @@ -4867,18 +4866,15 @@ public static void main(String arg[]) throws Throwable { fout.delete(); final Bucket inputBucket = new FileBucket(fin, true, false, false, false); final Bucket outputBucket = new FileBucket(fout, false, true, false, false); - InputStream inputStream = null; - OutputStream outputStream = null; - try { - inputStream = inputBucket.getInputStream(); - outputStream = outputBucket.getOutputStream(); + try ( + InputStream inputStream = inputBucket.getInputStream(); + OutputStream outputStream = outputBucket.getOutputStream() + ) { Logger.setupStdoutLogging(Logger.LogLevel.DEBUG, ""); ContentFilter.filter(inputStream, outputStream, "text/css", new URI("http://127.0.0.1:8888/freenet:USK@ZupQjDFZSc3I4orBpl1iTEAPZKo2733RxCUbZ2Q7iH0,EO8Tuf8SP3lnDjQdAPdCM2ve2RaUEN8m-hod3tQ5oQE,AQACAAE/jFreesite/19/Style/"), null, null, null, null); } finally { - Closer.close(inputStream); - Closer.close(outputStream); inputBucket.free(); outputBucket.free(); } diff --git a/src/freenet/client/filter/OggFilter.java b/src/freenet/client/filter/OggFilter.java index 26a4a3bc06f..864ef900f23 100644 --- a/src/freenet/client/filter/OggFilter.java +++ b/src/freenet/client/filter/OggFilter.java @@ -18,7 +18,6 @@ import java.util.Map; import freenet.l10n.NodeL10n; -import freenet.support.io.Closer; import freenet.support.io.CountedOutputStream; /** Filters Ogg container files. These containers contain one or more @@ -80,17 +79,21 @@ public void readFilter( * @throws IOException */ private boolean hasValidSubpage(OggPage page, OggPage nextPage) throws IOException { - OggPage subpage = null; + OggPage subpage; int pageCount = 0; - ByteArrayOutputStream data = null; - DataInputStream in = null; - try{ - //Populate a byte array with all the data in which a subpage might hide - data = new ByteArrayOutputStream(); + + //Populate a byte array with all the data in which a subpage might hide + byte[] pageData; + try (ByteArrayOutputStream data = new ByteArrayOutputStream()) { data.write(page.toArray()); - if(nextPage != null) data.write(nextPage.toArray()); - in = new DataInputStream(new ByteArrayInputStream(data.toByteArray())); - data.close(); + if (nextPage != null) { + data.write(nextPage.toArray()); + } + pageData = data.toByteArray(); + } + try ( + DataInputStream in = new DataInputStream(new ByteArrayInputStream(pageData)); + ) { while(true) { OggPage.seekToPage(in); in.mark(65307); @@ -103,27 +106,21 @@ private boolean hasValidSubpage(OggPage page, OggPage nextPage) throws IOExcepti } } catch(EOFException e) { //We've ran out of data to read. Break. - in.close(); - } finally { - Closer.close(data); - Closer.close(in); } return (pageCount > 2 || hasValidSubpage(page)); } private boolean hasValidSubpage(OggPage page) throws IOException { - DataInputStream in = new DataInputStream(new ByteArrayInputStream(page.toArray())); - in.skip(1); //Break alignment with the first page - try { - while(true) { + try (DataInputStream in = new DataInputStream(new ByteArrayInputStream(page.toArray()))) { + in.skip(1); //Break alignment with the first page + while (true) { OggPage subpage = OggPage.readPage(in); - if(subpage.headerValid()) return true; + if (subpage.headerValid()) { + return true; + } } - } catch(EOFException e) { - //We've ran out of data to read. Break. - in.close(); - } finally { - Closer.close(in); + } catch (EOFException e) { + //We've ran out of data to read. Break.; } return false; } diff --git a/src/freenet/client/filter/PNGFilter.java b/src/freenet/client/filter/PNGFilter.java index bfc90297094..44bb4704064 100644 --- a/src/freenet/client/filter/PNGFilter.java +++ b/src/freenet/client/filter/PNGFilter.java @@ -23,7 +23,6 @@ import freenet.support.Logger; import freenet.support.Logger.LogLevel; import freenet.support.api.Bucket; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; /** @@ -338,18 +337,15 @@ public static void main(String arg[]) throws Throwable { fout.delete(); final Bucket inputBucket = new FileBucket(fin, true, false, false, false); final Bucket outputBucket = new FileBucket(fout, false, true, false, false); - InputStream inputStream = null; - OutputStream outputStream = null; - try { - inputStream = inputBucket.getInputStream(); - outputStream = outputBucket.getOutputStream(); + try ( + InputStream inputStream = inputBucket.getInputStream(); + OutputStream outputStream = outputBucket.getOutputStream() + ){ Logger.setupStdoutLogging(LogLevel.MINOR, ""); ContentFilter.filter(inputStream, outputStream, "image/png", new URI("http://127.0.0.1:8888/"), null, null, null, null); } finally { - Closer.close(inputStream); - Closer.close(outputStream); inputBucket.free(); outputBucket.free(); } diff --git a/src/freenet/clients/fcp/AddPeer.java b/src/freenet/clients/fcp/AddPeer.java index 10e9c5e07ab..337439816d2 100644 --- a/src/freenet/clients/fcp/AddPeer.java +++ b/src/freenet/clients/fcp/AddPeer.java @@ -31,7 +31,6 @@ import freenet.support.Logger; import freenet.support.MediaType; import freenet.support.SimpleFieldSet; -import freenet.support.io.Closer; public class AddPeer extends FCPMessage { @@ -75,36 +74,32 @@ public String getName() { } public static StringBuilder getReferenceFromURL(URL url) throws IOException { - StringBuilder ref = new StringBuilder(1024); - InputStream is = null; - try { - URLConnection uc = url.openConnection(); - is = uc.getInputStream(); + URLConnection uc = url.openConnection(); + try ( + InputStream is = uc.getInputStream(); BufferedReader in = new BufferedReader(new InputStreamReader(is, MediaType.getCharsetRobustOrUTF(uc.getContentType()))); + ) { String line; + StringBuilder ref = new StringBuilder(1024); while ((line = in.readLine()) != null) { ref.append( line ).append('\n'); } return ref; - } finally { - Closer.close(is); } } public static StringBuilder getReferenceFromFreenetURI(FreenetURI url, HighLevelSimpleClient client) throws IOException, FetchException { - StringBuilder ref = new StringBuilder(1024); // the 1024 is the initial capacity - InputStream is = null; - try { - is = client.fetch(url, 31000).asBucket().getInputStream(); // limit to 31k, which should suffice even if we add many more ipv6 addresses + try ( + InputStream is = client.fetch(url, 31000).asBucket().getInputStream(); // limit to 31k, which should suffice even if we add many more ipv6 addresses BufferedReader in = new BufferedReader(new InputStreamReader(is, MediaType.getCharsetRobustOrUTF("text/plain"))); + ) { + StringBuilder ref = new StringBuilder(1024); // the 1024 is the initial capacity String line; while ((line = in.readLine()) != null) { ref.append( line ).append('\n'); } return ref; - } finally { - Closer.close(is); } } diff --git a/src/freenet/clients/fcp/ClientPut.java b/src/freenet/clients/fcp/ClientPut.java index 0d96f9b3ce5..2edc07d0818 100644 --- a/src/freenet/clients/fcp/ClientPut.java +++ b/src/freenet/clients/fcp/ClientPut.java @@ -8,6 +8,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.util.Arrays; import java.util.Date; @@ -35,7 +36,6 @@ import freenet.support.Logger.LogLevel; import freenet.support.api.Bucket; import freenet.support.api.RandomAccessBucket; -import freenet.support.io.Closer; import freenet.support.io.ResumeFailedException; public class ClientPut extends ClientPutBase { @@ -229,27 +229,22 @@ public ClientPut(FCPConnectionHandler handler, ClientPutMessage message, FCPServ // Check the hash : allow it to be null for backward compatibility and if testDDA is allowed if(salt != null) { - MessageDigest md = SHA256.getMessageDigest(); byte[] foundHash; + MessageDigest md = SHA256.getMessageDigest(); try { - md.update(salt.getBytes("UTF-8")); - } catch (UnsupportedEncodingException e) { - throw new Error("Impossible: JVM doesn't support UTF-8: " + e, e); - } - InputStream is = null; - try { - is = data.getInputStream(); - SHA256.hash(is, md); - } catch (IOException e) { - SHA256.returnMessageDigest(md); - Logger.error(this, "Got IOE: " + e.getMessage(), e); - throw new MessageInvalidException(ProtocolErrorMessage.COULD_NOT_READ_FILE, + md.update(salt.getBytes(StandardCharsets.UTF_8)); + + try (InputStream is = data.getInputStream()) { + SHA256.hash(is, md); + } catch (IOException e) { + Logger.error(this, "Got IOE: " + e.getMessage(), e); + throw new MessageInvalidException(ProtocolErrorMessage.COULD_NOT_READ_FILE, "Unable to access file: " + e, identifier, global); + } + foundHash = md.digest(); } finally { - Closer.close(is); + SHA256.returnMessageDigest(md); } - foundHash = md.digest(); - SHA256.returnMessageDigest(md); if(logMINOR) Logger.minor(this, "FileHash result : we found " + Base64.encode(foundHash) + " and were given " + Base64.encode(saltedHash) + '.'); diff --git a/src/freenet/clients/fcp/FCPConnectionHandler.java b/src/freenet/clients/fcp/FCPConnectionHandler.java index c5cd3c92919..ecfe0966f46 100644 --- a/src/freenet/clients/fcp/FCPConnectionHandler.java +++ b/src/freenet/clients/fcp/FCPConnectionHandler.java @@ -7,6 +7,7 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.Socket; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.Random; @@ -30,7 +31,6 @@ import freenet.support.Logger; import freenet.support.Logger.LogLevel; import freenet.support.api.BucketFactory; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; import freenet.support.io.NativeThread; @@ -797,18 +797,14 @@ protected DDACheckJob enqueueDDACheck(String path, boolean read, boolean write) // We don't want to attempt to write before: in case an IOException is raised, we want to inform the // client somehow that the node can't write there... And setting readFile to null means we won't inform // it on the status (as if it hadn't requested us to do the test). - FileOutputStream fos = null; - BufferedOutputStream bos = null; - try { - fos = new FileOutputStream(result.readFilename); - bos = new BufferedOutputStream(fos); - bos.write(result.readContent.getBytes("UTF-8")); + try ( + FileOutputStream fos = new FileOutputStream(result.readFilename); + BufferedOutputStream bos = new BufferedOutputStream(fos) + ) { + bos.write(result.readContent.getBytes(StandardCharsets.UTF_8)); bos.flush(); } catch (IOException e) { - Logger.error(this, "Got a IOE while creating the file (" + readFile.toString() + " ! " + e.getMessage()); - } finally { - Closer.close(bos); - Closer.close(fos); + Logger.error(this, "Got a IOE while creating the file (" + readFile + " ! " + e.getMessage()); } } diff --git a/src/freenet/clients/fcp/FCPConnectionInputHandler.java b/src/freenet/clients/fcp/FCPConnectionInputHandler.java index 3cecb77e766..ae0a5c8ecaf 100644 --- a/src/freenet/clients/fcp/FCPConnectionInputHandler.java +++ b/src/freenet/clients/fcp/FCPConnectionInputHandler.java @@ -13,7 +13,6 @@ import freenet.support.Logger; import freenet.support.SimpleFieldSet; import freenet.support.Logger.LogLevel; -import freenet.support.io.Closer; import freenet.support.io.LineReadingInputStream; import freenet.support.io.TooLongException; @@ -62,90 +61,92 @@ public void run() { } public void realRun() throws IOException { - InputStream is = new BufferedInputStream(handler.sock.getInputStream(), 4096); - LineReadingInputStream lis = new LineReadingInputStream(is); + try ( + InputStream is = new BufferedInputStream(handler.sock.getInputStream(), 4096); + LineReadingInputStream lis = new LineReadingInputStream(is) + ) { + boolean firstMessage = true; - boolean firstMessage = true; - - while(true) { - SimpleFieldSet fs; - if(WrapperManager.hasShutdownHookBeenTriggered()) { - FCPMessage msg = new ProtocolErrorMessage(ProtocolErrorMessage.SHUTTING_DOWN,true,"The node is shutting down","Node",false); - handler.send(msg); - Closer.close(is); - return; - } - // Read a message - String messageType = lis.readLine(128, 128, true); - if(messageType == null) { - Closer.close(is); - return; - } - if(messageType.equals("")) - continue; - fs = new SimpleFieldSet(lis, 4096, 128, true, true, true); + while (true) { + SimpleFieldSet fs; + if (WrapperManager.hasShutdownHookBeenTriggered()) { + FCPMessage msg = new ProtocolErrorMessage(ProtocolErrorMessage.SHUTTING_DOWN, true, "The node is shutting down", "Node", false); + handler.send(msg); + return; + } + // Read a message + String messageType = lis.readLine(128, 128, true); + if (messageType == null) { + return; + } + if ("".equals(messageType)) { + continue; + } + fs = new SimpleFieldSet(lis, 4096, 128, true, true, true); - // check for valid endmarker - if (!firstMessage && fs.getEndMarker() != null && (!fs.getEndMarker().startsWith("End")) && (!"Data".equals(fs.getEndMarker()))) { - FCPMessage err = new ProtocolErrorMessage(ProtocolErrorMessage.MESSAGE_PARSE_ERROR, false, "Invalid end marker: "+fs.getEndMarker(), fs.get("Identifer"), fs.getBoolean("Global", false)); - handler.send(err); - continue; - } + // check for valid endmarker + if (!firstMessage && fs.getEndMarker() != null && (!fs.getEndMarker().startsWith("End")) && (!"Data".equals(fs.getEndMarker()))) { + FCPMessage err = new ProtocolErrorMessage(ProtocolErrorMessage.MESSAGE_PARSE_ERROR, false, "Invalid end marker: " + fs.getEndMarker(), fs.get("Identifer"), fs.getBoolean("Global", false)); + handler.send(err); + continue; + } - FCPMessage msg; - try { - if(logDEBUG) - Logger.debug(this, "Incoming FCP message:\n"+messageType+'\n'+fs.toString()); - msg = FCPMessage.create(messageType, fs, handler.bf, handler.server.core.persistentTempBucketFactory); - if(msg == null) continue; - } catch (MessageInvalidException e) { - if(firstMessage) { + FCPMessage msg; + try { + if (logDEBUG) { + Logger.debug(this, "Incoming FCP message:\n" + messageType + '\n' + fs.toString()); + } + msg = FCPMessage.create(messageType, fs, handler.bf, handler.server.core.persistentTempBucketFactory); + if (msg == null) { + continue; + } + } catch (MessageInvalidException e) { + if (firstMessage) { + FCPMessage err = new ProtocolErrorMessage(ProtocolErrorMessage.CLIENT_HELLO_MUST_BE_FIRST_MESSAGE, true, null, null, false); + handler.send(err); + handler.close(); + return; + } else { + FCPMessage err = new ProtocolErrorMessage(e.protocolCode, false, e.getMessage(), e.ident, e.global); + handler.send(err); + } + continue; + } + if (firstMessage && !(msg instanceof ClientHelloMessage)) { FCPMessage err = new ProtocolErrorMessage(ProtocolErrorMessage.CLIENT_HELLO_MUST_BE_FIRST_MESSAGE, true, null, null, false); handler.send(err); handler.close(); - Closer.close(is); return; - } else { - FCPMessage err = new ProtocolErrorMessage(e.protocolCode, false, e.getMessage(), e.ident, e.global); + } + if (msg instanceof BaseDataCarryingMessage) { + // FIXME tidy up - coalesce with above and below try { } catch (MIE) {}'s? + try { + ((BaseDataCarryingMessage) msg).readFrom(lis, handler.bf, handler.server); + } catch (MessageInvalidException e) { + FCPMessage err = new ProtocolErrorMessage(e.protocolCode, false, e.getMessage(), e.ident, e.global); + handler.send(err); + continue; + } + } + if ((!firstMessage) && (msg instanceof ClientHelloMessage)) { + FCPMessage err = new ProtocolErrorMessage(ProtocolErrorMessage.NO_LATE_CLIENT_HELLOS, false, null, null, false); handler.send(err); + continue; } - continue; - } - if(firstMessage && !(msg instanceof ClientHelloMessage)) { - FCPMessage err = new ProtocolErrorMessage(ProtocolErrorMessage.CLIENT_HELLO_MUST_BE_FIRST_MESSAGE, true, null, null, false); - handler.send(err); - handler.close(); - Closer.close(is); - return; - } - if(msg instanceof BaseDataCarryingMessage) { - // FIXME tidy up - coalesce with above and below try { } catch (MIE) {}'s? try { - ((BaseDataCarryingMessage)msg).readFrom(lis, handler.bf, handler.server); + if (logDEBUG) { + Logger.debug(this, "Parsed message: " + msg + " for " + handler); + } + msg.run(handler, handler.server.node); } catch (MessageInvalidException e) { FCPMessage err = new ProtocolErrorMessage(e.protocolCode, false, e.getMessage(), e.ident, e.global); handler.send(err); continue; } - } - if((!firstMessage) && (msg instanceof ClientHelloMessage)) { - FCPMessage err = new ProtocolErrorMessage(ProtocolErrorMessage.NO_LATE_CLIENT_HELLOS, false, null, null, false); - handler.send(err); - continue; - } - try { - if(logDEBUG) - Logger.debug(this, "Parsed message: "+msg+" for "+handler); - msg.run(handler, handler.server.node); - } catch (MessageInvalidException e) { - FCPMessage err = new ProtocolErrorMessage(e.protocolCode, false, e.getMessage(), e.ident, e.global); - handler.send(err); - continue; - } - firstMessage = false; - if(handler.isClosed()) { - Closer.close(is); - return; + firstMessage = false; + if (handler.isClosed()) { + return; + } } } } diff --git a/src/freenet/clients/fcp/FilterMessage.java b/src/freenet/clients/fcp/FilterMessage.java index c5db4f9c7d5..727444b875e 100644 --- a/src/freenet/clients/fcp/FilterMessage.java +++ b/src/freenet/clients/fcp/FilterMessage.java @@ -19,7 +19,6 @@ import freenet.support.SimpleFieldSet; import freenet.support.api.Bucket; import freenet.support.api.BucketFactory; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; /** @@ -171,11 +170,10 @@ public void run(FCPConnectionHandler handler, Node node) throws MessageInvalidEx String resultCharset = null; String resultMimeType = null; boolean unsafe = false; - InputStream input = null; - OutputStream output = null; - try { - input = bucket.getInputStream(); - output = resultBucket.getOutputStream(); + try ( + InputStream input = bucket.getInputStream(); + OutputStream output = resultBucket.getOutputStream() + ) { FilterStatus status = applyFilter(input, output, handler.server.core.clientContext); resultCharset = status.charset; resultMimeType = status.mimeType; @@ -184,9 +182,6 @@ public void run(FCPConnectionHandler handler, Node node) throws MessageInvalidEx } catch (IOException e) { Logger.error(this, "IO error running content filter", e); throw new MessageInvalidException(ProtocolErrorMessage.INTERNAL_ERROR, e.toString(), identifier, false); - } finally { - Closer.close(input); - Closer.close(output); } FilterResultMessage response = new FilterResultMessage(identifier, resultCharset, resultMimeType, unsafe, resultBucket); handler.send(response); diff --git a/src/freenet/clients/http/ConnectionsToadlet.java b/src/freenet/clients/http/ConnectionsToadlet.java index eb5d752eff5..e9bf29b640b 100644 --- a/src/freenet/clients/http/ConnectionsToadlet.java +++ b/src/freenet/clients/http/ConnectionsToadlet.java @@ -49,7 +49,6 @@ import freenet.support.SizeUtil; import freenet.support.TimeUtil; import freenet.support.api.HTTPRequest; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /** Base class for DarknetConnectionsToadlet and OpennetConnectionsToadlet */ @@ -664,24 +663,19 @@ public void handleMethodPOST(URI uri, final HTTPRequest request, ToadletContext return; } - StringBuilder ref = null; + StringBuilder ref; if (urltext.length() > 0) { // fetch reference from a URL - BufferedReader in = null; try { - try { - FreenetURI refUri = new FreenetURI(urltext); - ref = AddPeer.getReferenceFromFreenetURI(refUri, client); - } catch (MalformedURLException | FetchException e) { - Logger.warning(this, "Url cannot be used as Freenet URI, trying to fetch as URL: " + urltext); - URL url = new URL(urltext); - ref = AddPeer.getReferenceFromURL(url); - } + FreenetURI refUri = new FreenetURI(urltext); + ref = AddPeer.getReferenceFromFreenetURI(refUri, client); + } catch (MalformedURLException | FetchException e) { + Logger.warning(this, "Url cannot be used as Freenet URI, trying to fetch as URL: " + urltext); + URL url = new URL(urltext); + ref = AddPeer.getReferenceFromURL(url); } catch (IOException e) { - this.sendErrorPage(ctx, 200, l10n("failedToAddNodeTitle"), NodeL10n.getBase().getString("DarknetConnectionsToadlet.cantFetchNoderefURL", new String[] { "url" }, new String[] { urltext }), !isOpennet()); + this.sendErrorPage(ctx, 200, l10n("failedToAddNodeTitle"), NodeL10n.getBase().getString("DarknetConnectionsToadlet.cantFetchNoderefURL", new String[]{"url"}, new String[]{urltext}), !isOpennet()); return; - } finally { - Closer.close(in); } } else if (reftext.length() > 0) { // read from post data or file upload diff --git a/src/freenet/clients/http/ContentFilterToadlet.java b/src/freenet/clients/http/ContentFilterToadlet.java index 8c6afab994e..aee3577c242 100644 --- a/src/freenet/clients/http/ContentFilterToadlet.java +++ b/src/freenet/clients/http/ContentFilterToadlet.java @@ -21,7 +21,6 @@ import freenet.support.api.Bucket; import freenet.support.api.HTTPRequest; import freenet.support.api.HTTPUploadedFile; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; import freenet.support.io.FileUtil; @@ -322,21 +321,17 @@ private void handleFilter(Bucket data, String mimeType, FilterOperation operatio } private FilterStatus applyFilter(Bucket input, Bucket output, String mimeType, FilterOperation operation, NodeClientCore core) - throws UnsafeContentTypeException, IOException { - InputStream inputStream = null; - OutputStream outputStream = null; - try { - inputStream = input.getInputStream(); - outputStream = output.getOutputStream(); + throws IOException { + try ( + InputStream inputStream = input.getInputStream(); + OutputStream outputStream = output.getOutputStream() + ) { return applyFilter(inputStream, outputStream, mimeType, operation, core); - } finally { - Closer.close(inputStream); - Closer.close(outputStream); } } private FilterStatus applyFilter(InputStream input, OutputStream output, String mimeType, FilterOperation operation, NodeClientCore core) - throws UnsafeContentTypeException, IOException { + throws IOException { URI fakeUri; try { fakeUri = new URI("http://127.0.0.1:8888/"); diff --git a/src/freenet/clients/http/FProxyFetchInProgress.java b/src/freenet/clients/http/FProxyFetchInProgress.java index 33e91532159..ec2101e1a53 100644 --- a/src/freenet/clients/http/FProxyFetchInProgress.java +++ b/src/freenet/clients/http/FProxyFetchInProgress.java @@ -38,7 +38,6 @@ import freenet.support.Logger; import freenet.support.Logger.LogLevel; import freenet.support.api.Bucket; -import freenet.support.io.Closer; /** * Fetching a page for a browser. @@ -198,8 +197,8 @@ private boolean checkCache(ClientContext context) { if(bogusUSK(context)) return false; CacheFetchResult result = context.downloadCache == null ? null : context.downloadCache.lookupInstant(uri, !fctx.filterData, false, null); if(result == null) return false; - Bucket data = null; - String mimeType = null; + + if((!fctx.filterData) && (!result.alreadyFiltered)) { if(fctx.overrideMIME == null || fctx.overrideMIME.equals(result.getMimeType())) { // Works as-is. @@ -230,44 +229,41 @@ private boolean checkCache(ClientContext context) { return false; } } - data = result.asBucket(); - mimeType = result.getMimeType(); - if(mimeType == null || mimeType.equals("")) mimeType = DefaultMIMETypes.DEFAULT_MIME_TYPE; + Bucket resultData = result.asBucket(); + String resultMimeType = result.getMimeType(); + if(resultMimeType == null || resultMimeType.equals("")) resultMimeType = DefaultMIMETypes.DEFAULT_MIME_TYPE; if(fctx.overrideMIME != null && !result.alreadyFiltered) - mimeType = fctx.overrideMIME; - else if(fctx.overrideMIME != null && !mimeType.equals(fctx.overrideMIME)) { + resultMimeType = fctx.overrideMIME; + else if(fctx.overrideMIME != null && !resultMimeType.equals(fctx.overrideMIME)) { // Doesn't work. return false; } - String fullMimeType = mimeType; - mimeType = ContentFilter.stripMIMEType(mimeType); - FilterMIMEType type = ContentFilter.getMIMEType(mimeType); + String fullMimeType = resultMimeType; + resultMimeType = ContentFilter.stripMIMEType(resultMimeType); + FilterMIMEType type = ContentFilter.getMIMEType(resultMimeType); if(type == null || ((!type.safeToRead) && type.readFilter == null)) { - UnknownContentTypeException e = new UnknownContentTypeException(mimeType); - data.free(); - onFailure(new FetchException(e.getFetchErrorCode(), data.size(), e, mimeType), null); + UnknownContentTypeException e = new UnknownContentTypeException(resultMimeType); + resultData.free(); + onFailure(new FetchException(e.getFetchErrorCode(), resultData.size(), e, resultMimeType), null); return true; } else if(type.safeToRead) { tracker.removeFetcher(this); - onSuccess(new FetchResult(new ClientMetadata(mimeType), data), null); + onSuccess(new FetchResult(new ClientMetadata(resultMimeType), resultData), null); return true; } else { // Try to filter it. - Bucket output = null; - InputStream is = null; - OutputStream os = null; - try { - output = context.tempBucketFactory.makeBucket(-1); - is = data.getInputStream(); - os = output.getOutputStream(); - ContentFilter.filter(is, os, fullMimeType, uri.toURI("/"), fctx.getSchemeHostAndPort(), null, null, fctx.charset, context.linkFilterExceptionProvider); - is.close(); - is = null; - os.close(); - os = null; + try ( + Bucket input = resultData; + Bucket output = context.tempBucketFactory.makeBucket(-1); + ) { + try ( + InputStream is = input.getInputStream(); + OutputStream os = output.getOutputStream() + ) { + ContentFilter.filter(is, os, fullMimeType, uri.toURI("/"), fctx.getSchemeHostAndPort(), null, null, fctx.charset, context.linkFilterExceptionProvider); + } // Since we are not re-using the data bucket, we can happily stay in the FProxyFetchTracker. this.onSuccess(new FetchResult(new ClientMetadata(fullMimeType), output), null); - output = null; return true; } catch (IOException e) { Logger.normal(this, "Failed filtering coalesced data in fproxy"); @@ -277,11 +273,6 @@ else if(fctx.overrideMIME != null && !mimeType.equals(fctx.overrideMIME)) { } catch (URISyntaxException e) { Logger.error(this, "Impossible: "+e, e); return false; - } finally { - Closer.close(is); - Closer.close(os); - Closer.close(output); - Closer.close(data); } } } diff --git a/src/freenet/clients/http/FProxyToadlet.java b/src/freenet/clients/http/FProxyToadlet.java index 2f5a86d79df..63bc00a9831 100644 --- a/src/freenet/clients/http/FProxyToadlet.java +++ b/src/freenet/clients/http/FProxyToadlet.java @@ -72,7 +72,6 @@ import freenet.support.api.BucketFactory; import freenet.support.api.HTTPRequest; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; import freenet.support.io.NoFreeBucket; @@ -268,21 +267,18 @@ private void handleDownload(ToadletContext context, Bucket data, BucketFactory b if (range[1] == -1 || range[1] >= size) { range[1] = size - 1; } - InputStream is = null; - OutputStream os = null; + Bucket tmpRange = bucketFactory.makeBucket(range[1] - range[0]); - try { - is = data.getInputStream(); - os = tmpRange.getOutputStream(); - if (range[0] > 0) + try ( + InputStream is = data.getInputStream(); + OutputStream os = tmpRange.getOutputStream() + ) { + if (range[0] > 0) { FileUtil.skipFully(is, range[0]); + } FileUtil.copy(is, os, range[1] - range[0] + 1); // FIXME catch IOException here and tell the user there is a problem instead of just closing the connection. // Currently there is no way to tell the difference between an IOE caused by the connection to the client and an internal one, we just close the connection in both cases. - os.close(); os = null; // If we can't write, we need to throw, so we don't send too little data. - } finally { - Closer.close(is); - Closer.close(os); } retHdr.put("Content-Range", "bytes " + range[0] + "-" + range[1] + "/" + size); retHdr.put("X-Content-Type-Options", "nosniff"); @@ -421,20 +417,16 @@ public static String l10n(String msg) { * REDFLAG Expect future security issues! * @throws IOException */ private static boolean isSniffedAsFeed(Bucket data) throws IOException { - DataInputStream is = null; - try { - int sz = (int) Math.min(data.size(), 512); - if(sz == 0) - return false; - is = new DataInputStream(data.getInputStream()); - byte[] buf = new byte[sz]; + int sz = (int) Math.min(data.size(), 512L); + if(sz == 0) { + return false; + } + byte[] buf = new byte[sz]; + try (DataInputStream is = new DataInputStream(data.getInputStream())){ // FIXME Fortunately firefox doesn't detect RSS in UTF16 etc ... yet is.readFully(buf); return RssSniffer.isSniffedAsFeed(buf); } - finally { - Closer.close(is); - } } public void handleMethodGET(URI uri, HTTPRequest httprequest, ToadletContext ctx) diff --git a/src/freenet/clients/http/HTTPRequestImpl.java b/src/freenet/clients/http/HTTPRequestImpl.java index 0d8b2faf6c2..13db8930a09 100644 --- a/src/freenet/clients/http/HTTPRequestImpl.java +++ b/src/freenet/clients/http/HTTPRequestImpl.java @@ -11,6 +11,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -36,7 +37,6 @@ import freenet.support.api.HTTPUploadedFile; import freenet.support.api.RandomAccessBucket; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.LineReadingInputStream; /** @@ -474,55 +474,61 @@ public int[] getMultipleIntParam(String name) { * params, whereas if it is multipart/form-data it will be separated into buckets. */ private void parseMultiPartData() throws IOException { - InputStream is = null; - LineReadingInputStream lis = null; - OutputStream bucketos = null; - - try { - if(data == null) - return; - String ctype = this.headers.get("content-type"); - if(ctype == null) - return; - if(logMINOR) - Logger.minor(this, "Uploaded content-type: " + ctype); - String[] ctypeparts = ctype.split(";"); - if(ctypeparts[0].equalsIgnoreCase("application/x-www-form-urlencoded")) { - // Completely different encoding, but easy to handle - if(data.size() > 1024 * 1024) - throw new IOException("Too big"); - byte[] buf = BucketTools.toByteArray(data); - String s = new String(buf, "us-ascii"); - parseRequestParameters(s, true, true); - } - if(!ctypeparts[0].trim().equalsIgnoreCase("multipart/form-data") || (ctypeparts.length < 2)) - return; - - String boundary = null; - for(String ctypepart: ctypeparts) { - String[] subparts = ctypepart.split("="); - if((subparts.length == 2) && subparts[0].trim().equalsIgnoreCase("boundary")) - boundary = subparts[1]; + if (data == null) { + return; + } + String ctype = this.headers.get("content-type"); + if (ctype == null) { + return; + } + if (logMINOR) { + Logger.minor(this, "Uploaded content-type: " + ctype); + } + String[] ctypeparts = ctype.split(";"); + if (ctypeparts[0].equalsIgnoreCase("application/x-www-form-urlencoded")) { + // Completely different encoding, but easy to handle + if (data.size() > 1024 * 1024) { + throw new IOException("Too big"); } + byte[] buf = BucketTools.toByteArray(data); + String s = new String(buf, StandardCharsets.US_ASCII); + parseRequestParameters(s, true, true); + } + if (!ctypeparts[0].trim().equalsIgnoreCase("multipart/form-data") || (ctypeparts.length < 2)) { + return; + } - if((boundary == null) || (boundary.length() == 0)) - return; - if(boundary.charAt(0) == '"') - boundary = boundary.substring(1); - if(boundary.charAt(boundary.length() - 1) == '"') - boundary = boundary.substring(0, boundary.length() - 1); + String boundary = null; + for (String ctypepart : ctypeparts) { + String[] subparts = ctypepart.split("="); + if ((subparts.length == 2) && subparts[0].trim().equalsIgnoreCase("boundary")) { + boundary = subparts[1]; + } + } - boundary = "--" + boundary; + if ((boundary == null) || (boundary.length() == 0)) { + return; + } + if (boundary.charAt(0) == '"') { + boundary = boundary.substring(1); + } + if (boundary.charAt(boundary.length() - 1) == '"') { + boundary = boundary.substring(0, boundary.length() - 1); + } - if(logMINOR) - Logger.minor(this, "Boundary is: " + boundary); + boundary = "--" + boundary; - is = this.data.getInputStream(); - lis = new LineReadingInputStream(is); + if (logMINOR) { + Logger.minor(this, "Boundary is: " + boundary); + } + try ( + InputStream is = this.data.getInputStream(); + LineReadingInputStream lis = new LineReadingInputStream(is) + ) { String line; line = lis.readLine(100, 100, false); // really it's US-ASCII, but ISO-8859-1 is close enough. - while((is.available() > 0) && !line.equals(boundary)) { + while ((is.available() > 0) && !line.equals(boundary)) { line = lis.readLine(100, 100, false); } @@ -533,96 +539,97 @@ private void parseMultiPartData() throws IOException { String filename = null; String contentType = null; - while(is.available() > 0) { + while (is.available() > 0) { name = null; filename = null; contentType = null; // chomp headers - while((line = lis.readLine(200, 200, true)) /* should be UTF-8 as we told the browser to send UTF-8 */ != null) { - if(line.length() == 0) + while ((line = lis.readLine(200, 200, true)) /* should be UTF-8 as we told the browser to send UTF-8 */ != null) { + if (line.length() == 0) { break; + } String[] lineparts = line.split(":"); - if(lineparts == null || lineparts.length == 0) + if (lineparts.length == 0) { continue; + } String hdrname = lineparts[0].trim(); - if(hdrname.equalsIgnoreCase("Content-Disposition")) { - if(lineparts.length < 2) + if (hdrname.equalsIgnoreCase("Content-Disposition")) { + if (lineparts.length < 2) { continue; + } String[] valueparts = lineparts[1].split(";"); - for(int i = 0; i < valueparts.length; i++) { - String[] subparts = valueparts[i].split("="); - if(subparts.length != 2) + for (String valuepart : valueparts) { + String[] subparts = valuepart.split("="); + if (subparts.length != 2) { continue; + } String fieldname = subparts[0].trim(); String value = subparts[1].trim(); - if(value.startsWith("\"") && value.endsWith("\"")) + if (value.startsWith("\"") && value.endsWith("\"")) { value = value.substring(1, value.length() - 1); - if(fieldname.equalsIgnoreCase("name")) + } + if (fieldname.equalsIgnoreCase("name")) { name = value; - else if(fieldname.equalsIgnoreCase("filename")) + } else if (fieldname.equalsIgnoreCase("filename")) { filename = value; + } } - } - else if(hdrname.equalsIgnoreCase("Content-Type")) { + } else if (hdrname.equalsIgnoreCase("Content-Type")) { contentType = lineparts[1].trim(); - if(logMINOR) + if (logMINOR) { Logger.minor(this, "Parsed type: " + contentType); - } - else { - // Do nothing, irrelevant header + } + } else { + // Do nothing, irrelevant header } } - if(name == null) + if (name == null) { continue; + } // we should be at the data now. Start reading it in, checking for the - // boundary string + // boundary string // we can only give an upper bound for the size of the bucket filedata = this.bucketfactory.makeBucket(is.available()); - bucketos = filedata.getOutputStream(); - // buffer characters that match the boundary so far - // FIXME use whatever charset was used - byte[] bbound = boundary.getBytes("UTF-8"); // ISO-8859-1? boundary should be in US-ASCII - int offset = 0; - while((is.available() > 0) && (offset < bbound.length)) { - byte b = (byte) is.read(); - - if(b == bbound[offset]) - offset++; - else if((b != bbound[offset]) && (offset > 0)) { - // offset bytes matched, but no more - // write the bytes that matched, then the non-matching byte - bucketos.write(bbound, 0, offset); - offset = 0; - if(b == bbound[0]) - offset = 1; - else + try (OutputStream bucketos = filedata.getOutputStream()) { + // buffer characters that match the boundary so far + // FIXME use whatever charset was used + byte[] bbound = boundary.getBytes(StandardCharsets.UTF_8); // ISO-8859-1? boundary should be in US-ASCII + int offset = 0; + while ((is.available() > 0) && (offset < bbound.length)) { + byte b = (byte) is.read(); + + if (b == bbound[offset]) { + offset++; + } else if ((b != bbound[offset]) && (offset > 0)) { + // offset bytes matched, but no more + // write the bytes that matched, then the non-matching byte + bucketos.write(bbound, 0, offset); + offset = 0; + if (b == bbound[0]) { + offset = 1; + } else { + bucketos.write(b); + } + } else { bucketos.write(b); + } } - else - bucketos.write(b); } + } - bucketos.close(); - bucketos = null; - - parts.put(name, filedata); - if(logMINOR) - Logger.minor(this, "Name = " + name + " length = " + filedata.size() + " filename = " + filename); - if(filename != null) - uploadedFiles.put(name, new HTTPUploadedFileImpl(filename, contentType, filedata)); + parts.put(name, filedata); + if (logMINOR) { + Logger.minor(this, "Name = " + name + " length = " + filedata.size() + " filename = " + filename); + } + if (filename != null) { + uploadedFiles.put(name, new HTTPUploadedFileImpl(filename, contentType, filedata)); } - } - finally { - Closer.close(bucketos); - Closer.close(lis); - Closer.close(is); - Closer.close(is); } } @@ -700,28 +707,27 @@ private String getPartAsLimitedString(Bucket part, int maxLength) { @Override @Deprecated public byte[] getPartAsBytes(String name, int maxlength) { - if(freedParts) throw new IllegalStateException("Already freed"); + if (freedParts) { + throw new IllegalStateException("Already freed"); + } Bucket part = this.parts.get(name); - if(part == null) return new byte[0]; - - if (part.size() > maxlength) return new byte[0]; - - InputStream is = null; - DataInputStream dis = null; - try { - is = part.getInputStream(); - dis = new DataInputStream(is); - byte[] buf = new byte[(int)Math.min(part.size(), maxlength)]; + if(part == null) { + return new byte[0]; + } + if (part.size() > maxlength) { + return new byte[0]; + } + byte[] buf = new byte[(int)Math.min(part.size(), maxlength)]; + try ( + InputStream is = part.getInputStream(); + DataInputStream dis = new DataInputStream(is); + ){ dis.readFully(buf); return buf; } catch (IOException ioe) { - Logger.error(this, "Caught IOE:" + ioe.getMessage()); - } finally { - Closer.close(dis); - if(dis == null) Closer.close(is); // DataInputStream.close() does this for us normally + Logger.error(this, "Caught IOE:" + ioe.getMessage()); + return new byte[0]; } - - return new byte[0]; } @Override @@ -746,20 +752,16 @@ public byte[] getPartAsBytesFailsafe(String name, int maxLength) { } private byte[] getPartAsLimitedBytes(Bucket part, int maxLength) { - InputStream is = null; - DataInputStream dis = null; - try { - is = part.getInputStream(); - dis = new DataInputStream(is); - byte[] buf = new byte[(int)Math.min(part.size(), maxLength)]; + byte[] buf = new byte[(int)Math.min(part.size(), maxLength)]; + try ( + InputStream is = part.getInputStream(); + DataInputStream dis = new DataInputStream(is) + ) { dis.readFully(buf, 0, buf.length); return buf; } catch (IOException ioe) { Logger.error(this, "Caught IOE:" + ioe.getMessage()); return new byte[0]; - } finally { - Closer.close(dis); - if(dis == null) Closer.close(is); // DataInputStream.close() does this for us normally } } diff --git a/src/freenet/clients/http/QueueToadlet.java b/src/freenet/clients/http/QueueToadlet.java index f94dd5d76f9..45011725c77 100644 --- a/src/freenet/clients/http/QueueToadlet.java +++ b/src/freenet/clients/http/QueueToadlet.java @@ -18,6 +18,7 @@ import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; @@ -86,7 +87,6 @@ import freenet.support.api.HTTPUploadedFile; import freenet.support.api.RandomAccessBucket; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; import freenet.support.io.FileUtil; import freenet.support.io.NativeThread; @@ -2409,17 +2409,19 @@ public boolean run(ClientContext context) { } private boolean readCompletedIdentifiers(File file) { - FileInputStream fis = null; - try { - fis = new FileInputStream(file); + try ( + FileInputStream fis = new FileInputStream(file); BufferedInputStream bis = new BufferedInputStream(fis); - InputStreamReader isr = new InputStreamReader(bis, "UTF-8"); - BufferedReader br = new BufferedReader(isr); + InputStreamReader isr = new InputStreamReader(bis, StandardCharsets.UTF_8); + BufferedReader br = new BufferedReader(isr) + ){ synchronized(completedRequestIdentifiers) { completedRequestIdentifiers.clear(); while(true) { String identifier = br.readLine(); - if(identifier == null) return true; + if(identifier == null) { + return true; + } completedRequestIdentifiers.add(identifier); } } @@ -2434,8 +2436,6 @@ private boolean readCompletedIdentifiers(File file) { } catch (IOException e) { Logger.error(this, "Could not read completed identifiers list from "+file); return false; - } finally { - Closer.close(fis); } } diff --git a/src/freenet/clients/http/WelcomeToadlet.java b/src/freenet/clients/http/WelcomeToadlet.java index 944ba16bb67..090fdeb4b98 100644 --- a/src/freenet/clients/http/WelcomeToadlet.java +++ b/src/freenet/clients/http/WelcomeToadlet.java @@ -31,7 +31,6 @@ import freenet.support.Logger.LogLevel; import freenet.support.api.HTTPRequest; import freenet.support.api.RandomAccessBucket; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; import freenet.support.io.LineReadingInputStream; @@ -624,17 +623,16 @@ public static void maybeDisplayWrapperLogfile(ToadletContext ctx, HTMLNode conte long logSize = logs.length(); if(logs.exists() && logs.isFile() && logs.canRead() && (logSize > 0)) { HTMLNode logInfoboxContent = ctx.getPageMaker().getInfobox("infobox-info", "Current status", contentNode, "start-progress", true); - LineReadingInputStream logreader = null; - try { - logreader = FileUtil.getLogTailReader(logs, 2000); + try ( + LineReadingInputStream logreader = FileUtil.getLogTailReader(logs, 2000) + ){ String line; while ((line = logreader.readLine(100000, 200, true)) != null) { logInfoboxContent.addChild("#", line); logInfoboxContent.addChild("br"); } - } catch(IOException e) {} - finally { - Closer.close(logreader); + } catch(IOException ignored) { + // ignore } } } @@ -650,12 +648,8 @@ public static void maybeDisplayWrapperLogfile(ToadletContext ctx, HTMLNode conte * @throws IOException if an I/O error occurs */ private static String readLogTail(File logfile, long byteLimit) throws IOException { - LineReadingInputStream stream = null; - try { - stream = FileUtil.getLogTailReader(logfile, byteLimit); + try (LineReadingInputStream stream = FileUtil.getLogTailReader(logfile, byteLimit)) { return FileUtil.readUTF(stream).toString(); - } finally { - Closer.close(stream); } } diff --git a/src/freenet/clients/http/bookmark/BookmarkManager.java b/src/freenet/clients/http/bookmark/BookmarkManager.java index c51dd41055b..63243e01530 100644 --- a/src/freenet/clients/http/bookmark/BookmarkManager.java +++ b/src/freenet/clients/http/bookmark/BookmarkManager.java @@ -30,7 +30,6 @@ import freenet.support.Logger; import freenet.support.Logger.LogLevel; import freenet.support.SimpleFieldSet; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; public class BookmarkManager implements RequestClient { @@ -359,26 +358,25 @@ public void run() { public void storeBookmarks() { Logger.normal(this, "Attempting to save bookmarks to " + bookmarksFile.toString()); - SimpleFieldSet sfs = null; + SimpleFieldSet sfs; synchronized(bookmarks) { - if(isSavingBookmarks) + if(isSavingBookmarks) { return; + } isSavingBookmarks = true; sfs = toSimpleFieldSet(); } - FileOutputStream fos = null; try { - fos = new FileOutputStream(backupBookmarksFile); - sfs.writeToBigBuffer(fos); - fos.close(); - fos = null; - if(!FileUtil.renameTo(backupBookmarksFile, bookmarksFile)) - Logger.error(this, "Unable to rename " + backupBookmarksFile.toString() + " to " + bookmarksFile.toString()); + try (FileOutputStream fos = new FileOutputStream(backupBookmarksFile)) { + sfs.writeToBigBuffer(fos); + } + if(!FileUtil.renameTo(backupBookmarksFile, bookmarksFile)) { + Logger.error(this, "Unable to rename " + backupBookmarksFile + " to " + bookmarksFile); + } } catch(IOException ioe) { Logger.error(this, "An error has occured saving the bookmark file :" + ioe.getMessage(), ioe); } finally { - Closer.close(fos); synchronized(bookmarks) { isSavingBookmarks = false; } diff --git a/src/freenet/clients/http/geoip/IPConverter.java b/src/freenet/clients/http/geoip/IPConverter.java index f0d4861027c..5c548600b90 100644 --- a/src/freenet/clients/http/geoip/IPConverter.java +++ b/src/freenet/clients/http/geoip/IPConverter.java @@ -6,6 +6,7 @@ import java.io.RandomAccessFile; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; @@ -224,9 +225,9 @@ public static IPConverter getInstance(File file) { * @throws IOException */ private Cache readRanges() { - RandomAccessFile raf; - try { - raf = new RandomAccessFile(dbFile, "r"); + try ( + RandomAccessFile raf = new RandomAccessFile(dbFile, "r") + ){ String line; do { line = raf.readLine(); @@ -244,7 +245,7 @@ private Cache readRanges() { String code = line.substring(offset, offset + 2); // Ip String ipcode = line.substring(offset + 2, offset + 7); - long ip = decodeBase85(ipcode.getBytes("ISO-8859-1")); + long ip = decodeBase85(ipcode.getBytes(StandardCharsets.ISO_8859_1)); try { Country country = Country.valueOf(code); codes[i] = (short) country.ordinal(); @@ -255,7 +256,6 @@ private Cache readRanges() { } ips[i] = (int)ip; } - raf.close(); return new Cache(codes, ips); } catch (FileNotFoundException e) { // Not downloaded yet diff --git a/src/freenet/config/FilePersistentConfig.java b/src/freenet/config/FilePersistentConfig.java index 72b2b1db7be..15b1c84523c 100644 --- a/src/freenet/config/FilePersistentConfig.java +++ b/src/freenet/config/FilePersistentConfig.java @@ -15,7 +15,6 @@ import freenet.support.Logger; import freenet.support.SimpleFieldSet; import freenet.support.Logger.LogLevel; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; import freenet.support.io.LineReadingInputStream; @@ -112,20 +111,16 @@ protected FilePersistentConfig(SimpleFieldSet origFS, File fnam, File temp, Stri /** Load the config file into a SimpleFieldSet. * @throws IOException */ private static SimpleFieldSet initialLoad(File toRead) throws IOException { - if(toRead == null) return null; - FileInputStream fis = null; - BufferedInputStream bis = null; - LineReadingInputStream lis = null; - try { - fis = new FileInputStream(toRead); - bis = new BufferedInputStream(fis); - lis = new LineReadingInputStream(bis); + if(toRead == null) { + return null; + } + try ( + FileInputStream fis = new FileInputStream(toRead); + BufferedInputStream bis = new BufferedInputStream(fis); + LineReadingInputStream lis = new LineReadingInputStream(bis) + ) { // Config file is UTF-8 too! return new SimpleFieldSet(lis, 1024*1024, 128, true, true, true); // FIXME? advanced users may edit the config file, hence true? - } finally { - Closer.close(lis); - Closer.close(bis); - Closer.close(fis); } } @@ -154,26 +149,21 @@ public void store() { /** Don't call without taking storeSync first */ protected final void innerStore() throws IOException { - if(!finishedInit) + if(!finishedInit) { throw new IllegalStateException("SHOULD NOT HAPPEN!!"); + } SimpleFieldSet fs = exportFieldSet(); - if(logMINOR) + if(logMINOR) { Logger.minor(this, "fs = " + fs); - FileOutputStream fos = null; - try { - fos = new FileOutputStream(tempFilename); + } + try (FileOutputStream fos = new FileOutputStream(tempFilename)){ synchronized(this) { fs.setHeader(header); fs.writeToBigBuffer(fos); } - fos.close(); - fos = null; - FileUtil.renameTo(tempFilename, filename); - } - finally { - Closer.close(fos); } + FileUtil.renameTo(tempFilename, filename); } public void finishedInit() { diff --git a/src/freenet/config/WrapperConfig.java b/src/freenet/config/WrapperConfig.java index 47be2007e46..deaf59a5bfb 100644 --- a/src/freenet/config/WrapperConfig.java +++ b/src/freenet/config/WrapperConfig.java @@ -15,7 +15,6 @@ import freenet.node.NodeInitException; import freenet.support.Logger; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /** @@ -81,27 +80,23 @@ public static synchronized boolean setWrapperProperty(String name, String value) newConfig = new File("wrapper.conf.new"); wrapperDir="."; } - FileInputStream fis = null; - FileOutputStream fos = null; - - try { + - fis = new FileInputStream(oldConfig); + try ( + FileInputStream fis = new FileInputStream(oldConfig); BufferedInputStream bis = new BufferedInputStream(fis); InputStreamReader isr = new InputStreamReader(bis); BufferedReader br = new BufferedReader(isr); - - fos = new FileOutputStream(newConfig); + + FileOutputStream fos = new FileOutputStream(newConfig); OutputStreamWriter osw = new OutputStreamWriter(fos); - BufferedWriter bw = new BufferedWriter(osw); - - String line; - + BufferedWriter bw = new BufferedWriter(osw) + ) { boolean written = false; boolean writtenReload = false; - + + String line; while((line = br.readLine()) != null) { - if(line.startsWith(name+"=")) { bw.write(name+'='+value+'\n'); written = true; @@ -111,28 +106,20 @@ public static synchronized boolean setWrapperProperty(String name, String value) } else { bw.write(line+'\n'); } - } - br.close(); - fis = null; - if(!written) + if(!written) { bw.write(name+'='+value+'\n'); - if(!writtenReload) + } + if(!writtenReload) { bw.write("wrapper.restart.reload_configuration=TRUE\n"); - bw.close(); - fos = null; + } } catch(IOException e) { - Closer.close(fis); - Closer.close(fos); - fis = null; - fos = null; - if(oldConfig.exists()) newConfig.delete(); + if(oldConfig.exists()) { + newConfig.delete(); + } Logger.error(WrapperConfig.class, "Cannot update wrapper property "+"name: "+e, e); System.err.println("Unable to update wrapper property "+name+" : "+e); return false; - } finally { - Closer.close(fis); - Closer.close(fos); } if(!newConfig.renameTo(oldConfig)) { diff --git a/src/freenet/crypt/JceLoader.java b/src/freenet/crypt/JceLoader.java index f0ef8488019..c428c9fd524 100644 --- a/src/freenet/crypt/JceLoader.java +++ b/src/freenet/crypt/JceLoader.java @@ -6,6 +6,7 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; import java.lang.reflect.Constructor; +import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; import java.security.Provider; import java.security.Security; @@ -15,7 +16,6 @@ import javax.crypto.KeyGenerator; import freenet.support.Logger; -import freenet.support.io.Closer; public class JceLoader { static public final Provider BouncyCastle; @@ -122,19 +122,16 @@ private Provider load(boolean atfirst) throws Throwable { if(nssProvider == null) { File nssFile = File.createTempFile("nss",".cfg"); nssFile.deleteOnExit(); - OutputStream os = null; - try { + + try ( // More robust than PrintWriter(file), which can hang on out of disk space. - os = new FileOutputStream(nssFile); - OutputStreamWriter osw = new OutputStreamWriter(os, "ISO-8859-1"); + OutputStream os = new FileOutputStream(nssFile); + OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.ISO_8859_1); BufferedWriter bw = new BufferedWriter(osw); + ) { bw.write("name=NSScrypto\n"); bw.write("nssDbMode=noDb\n"); bw.write("attributes=compatibility\n"); - bw.close(); - os = null; - } finally { - Closer.close(os); } Class c = Class.forName("sun.security.pkcs11.SunPKCS11"); Constructor constructor = c.getConstructor(String.class); diff --git a/src/freenet/crypt/SHA256.java b/src/freenet/crypt/SHA256.java index e68a8e0fa10..fe4c48abfd5 100644 --- a/src/freenet/crypt/SHA256.java +++ b/src/freenet/crypt/SHA256.java @@ -47,7 +47,6 @@ LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING import freenet.node.Node; import freenet.node.NodeInitException; import freenet.support.Logger; -import freenet.support.io.Closer; /** * @author Jeroen C. van Gelderen (gelderen@cryptix.org) @@ -65,16 +64,13 @@ public class SHA256 { * @throws IOException */ public static void hash(InputStream is, MessageDigest md) throws IOException { - try { + try (InputStream inputStream = is){ byte[] buf = new byte[4096]; - int readBytes = is.read(buf); + int readBytes = inputStream.read(buf); while(readBytes > -1) { md.update(buf, 0, readBytes); - readBytes = is.read(buf); + readBytes = inputStream.read(buf); } - is.close(); - } finally { - Closer.close(is); } } @@ -86,7 +82,7 @@ public static void hash(InputStream is, MessageDigest md) throws IOException { */ public static MessageDigest getMessageDigest() { try { - SoftReference item = null; + SoftReference item; while (((item = digests.poll()) != null)) { MessageDigest md = item.get(); if (md != null) { @@ -109,11 +105,13 @@ public static MessageDigest getMessageDigest() { * Must be SHA-256 ! */ public static void returnMessageDigest(MessageDigest md256) { - if(md256 == null) + if (md256 == null) { return; + } String algo = md256.getAlgorithm(); - if(!(algo.equals("SHA-256") || algo.equals("SHA256"))) + if (!(algo.equals("SHA-256") || algo.equals("SHA256"))) { throw new IllegalArgumentException("Should be SHA-256 but is " + algo); + } md256.reset(); digests.add(new SoftReference<>(md256)); } diff --git a/src/freenet/crypt/SSL.java b/src/freenet/crypt/SSL.java index b9786fe8fb4..f979aea057a 100644 --- a/src/freenet/crypt/SSL.java +++ b/src/freenet/crypt/SSL.java @@ -42,7 +42,6 @@ import freenet.support.Logger; import freenet.support.api.BooleanCallback; import freenet.support.api.StringCallback; -import freenet.support.io.Closer; import java.net.ServerSocket; public class SSL { @@ -216,9 +215,8 @@ private static void loadKeyStore() throws NoSuchAlgorithmException, CertificateE if(enable) { // A keystore is where keys and certificates are kept // Both the keystore and individual private keys should be password protected - FileInputStream fis = null; - try { - fis = new FileInputStream(keyStore); + + try (FileInputStream fis = new FileInputStream(keyStore)){ keystore.load(fis, keyStorePass.toCharArray()); } catch(FileNotFoundException fnfe) { // If keystore not exist, create keystore and server certificate @@ -251,25 +249,17 @@ private static void loadKeyStore() throws NoSuchAlgorithmException, CertificateE keystore.setKeyEntry("freenet", privKey, keyPass.toCharArray(), chain); storeKeyStore(); createSSLContext(); - } catch (ClassNotFoundException cnfe) { - throw new UnsupportedOperationException("The JVM you are using does not support generating strong SSL certificates", cnfe); - } catch (NoSuchMethodException nsme) { - throw new UnsupportedOperationException("The JVM you are using does not support generating strong SSL certificates", nsme); + } catch (ClassNotFoundException | NoSuchMethodException e) { + throw new UnsupportedOperationException("The JVM you are using does not support generating strong SSL certificates", e); } - } finally { - Closer.close(fis); } } } private static void storeKeyStore() throws KeyStoreException, NoSuchAlgorithmException, CertificateException, IOException { if(enable) { - FileOutputStream fos = null; - try { - fos = new FileOutputStream(keyStore); + try (FileOutputStream fos = new FileOutputStream(keyStore)){ keystore.store(fos, keyStorePass.toCharArray()); - } finally { - Closer.close(fos); } } } diff --git a/src/freenet/crypt/Yarrow.java b/src/freenet/crypt/Yarrow.java index efdb0acfca8..b9ec7dfa765 100644 --- a/src/freenet/crypt/Yarrow.java +++ b/src/freenet/crypt/Yarrow.java @@ -28,7 +28,6 @@ import freenet.support.LogThresholdCallback; import freenet.support.Logger; import freenet.support.Logger.LogLevel; -import freenet.support.io.Closer; /** * An implementation of the Yarrow PRNG in Java. @@ -134,29 +133,26 @@ public Yarrow(File seed, String digest, String cipher, boolean updateSeed, boole private void seedFromExternalStuff(boolean canBlock) { byte[] buf = new byte[32]; if(File.separatorChar == '/') { - DataInputStream dis = null; - FileInputStream fis = null; File hwrng = new File("/dev/hwrng"); - if(hwrng.exists() && hwrng.canRead()) - try { - fis = new FileInputStream(hwrng); - dis = new DataInputStream(fis); + if(hwrng.exists() && hwrng.canRead()) { + try ( + FileInputStream fis = new FileInputStream(hwrng); + DataInputStream dis = new DataInputStream(fis) + ) { dis.readFully(buf); consumeBytes(buf); dis.readFully(buf); consumeBytes(buf); - dis.close(); } catch(Throwable t) { Logger.normal(this, "Can't read /dev/hwrng even though exists and is readable: " + t, t); - } finally { - Closer.close(dis); - Closer.close(fis); } + } // Read some bits from /dev/urandom - try { - fis = new FileInputStream("/dev/urandom"); - dis = new DataInputStream(fis); + try ( + FileInputStream fis = new FileInputStream("/dev/urandom"); + DataInputStream dis = new DataInputStream(fis) + ) { dis.readFully(buf); consumeBytes(buf); dis.readFully(buf); @@ -165,30 +161,25 @@ private void seedFromExternalStuff(boolean canBlock) { Logger.normal(this, "Can't read /dev/urandom: " + t, t); // We can't read it; let's skip /dev/random and seed from SecureRandom.generateSeed() canBlock = true; - } finally { - Closer.close(dis); - Closer.close(fis); } if(canBlock) // Read some bits from /dev/random - try { - fis = new FileInputStream("/dev/random"); - dis = new DataInputStream(fis); + try ( + FileInputStream fis = new FileInputStream("/dev/random"); + DataInputStream dis = new DataInputStream(fis) + ) { dis.readFully(buf); consumeBytes(buf); dis.readFully(buf); consumeBytes(buf); } catch(Throwable t) { Logger.normal(this, "Can't read /dev/random: " + t, t); - } finally { - Closer.close(dis); - Closer.close(fis); } - fis = null; - } else + } else { // Force generateSeed(), since we can't read random data from anywhere else. // Anyway, Windows's CAPI won't block. canBlock = true; + } if(canBlock) { // SecureRandom hopefully acts as a proxy for CAPI on Windows buf = sr.generateSeed(32); @@ -239,27 +230,19 @@ protected void readStartupEntropy(EntropySource startupEntropy) { * Seed handling */ private void read_seed(File filename) { - FileInputStream fis = null; - BufferedInputStream bis = null; - DataInputStream dis = null; - - try { - fis = new FileInputStream(filename); - bis = new BufferedInputStream(fis); - dis = new DataInputStream(bis); - + try ( + FileInputStream fis = new FileInputStream(filename); + BufferedInputStream bis = new BufferedInputStream(fis); + DataInputStream dis = new DataInputStream(bis) + ) { EntropySource seedFile = new EntropySource(); - for(int i = 0; i < 32; i++) - acceptEntropy(seedFile, dis.readLong(), 64); - dis.close(); + for(int i = 0; i < 32; i++) { + acceptEntropy(seedFile, dis.readLong(), 64); + } } catch(EOFException f) { // Okay. } catch(IOException e) { Logger.error(this, "IOE trying to read the seedfile from disk : " + e.getMessage()); - } finally { - Closer.close(dis); - Closer.close(bis); - Closer.close(fis); } fast_pool_reseed(); } @@ -278,34 +261,27 @@ public void write_seed(boolean force) { } private void write_seed(File filename, boolean force) { - if(!force) + if(!force) { synchronized(this) { long now = System.currentTimeMillis(); - if(now - timeLastWroteSeed <= HOURS.toMillis(1) /* once per hour */) + if(now - timeLastWroteSeed <= HOURS.toMillis(1) /* once per hour */) { return; - else + } else { timeLastWroteSeed = now; + } } - - FileOutputStream fos = null; - BufferedOutputStream bos = null; - DataOutputStream dos = null; - try { - fos = new FileOutputStream(filename); - bos = new BufferedOutputStream(fos); - dos = new DataOutputStream(bos); - - for(int i = 0; i < 32; i++) + } + try ( + FileOutputStream fos = new FileOutputStream(filename); + BufferedOutputStream bos = new BufferedOutputStream(fos); + DataOutputStream dos = new DataOutputStream(bos) + ) { + for(int i = 0; i < 32; i++) { dos.writeLong(nextLong()); - + } dos.flush(); - dos.close(); } catch(IOException e) { Logger.error(this, "IOE while saving the seed file! : " + e.getMessage()); - } finally { - Closer.close(dos); - Closer.close(bos); - Closer.close(fos); } } /** diff --git a/src/freenet/l10n/BaseL10n.java b/src/freenet/l10n/BaseL10n.java index b28bb825e47..e089cd41f3b 100644 --- a/src/freenet/l10n/BaseL10n.java +++ b/src/freenet/l10n/BaseL10n.java @@ -20,7 +20,6 @@ import freenet.support.HTMLNode; import freenet.support.Logger; import freenet.support.SimpleFieldSet; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /** @@ -335,11 +334,10 @@ private void loadOverrideFileOrBackup() throws IOException { */ private SimpleFieldSet loadTranslation(LANGUAGE lang) { SimpleFieldSet result = null; - InputStream in = null; - - try { + try ( // Returns null on lookup failures: - in = this.cl.getResourceAsStream(this.getL10nFileName(lang)); + InputStream in = this.cl.getResourceAsStream(this.getL10nFileName(lang)) + ) { if (in != null) { result = SimpleFieldSet.readFrom(in, false, false); } else { @@ -348,9 +346,6 @@ private SimpleFieldSet loadTranslation(LANGUAGE lang) { } catch (Exception e) { System.err.println("Error while loading the l10n file from " + this.getL10nFileName(lang) + " :" + e.getMessage()); e.printStackTrace(); - result = null; - } finally { - Closer.close(in); } return result; @@ -420,25 +415,20 @@ public void setOverride(String key, String value) { * Save the SimpleFieldSet of overriden keys in a file. */ private void saveTranslationFile() { - FileOutputStream fos = null; File finalFile = new File(this.getL10nOverrideFileName(this.lang)); - try { // We don't set deleteOnExit on it : if the save operation fails, we want a backup File tempFile = File.createTempFile(finalFile.getName(), ".bak", finalFile.getParentFile());; Logger.minor(this.getClass(), "The temporary filename is : " + tempFile); - fos = new FileOutputStream(tempFile); - this.translationOverride.writeToBigBuffer(fos); - fos.close(); - fos = null; + try (FileOutputStream fos = new FileOutputStream(tempFile)) { + this.translationOverride.writeToBigBuffer(fos); + } FileUtil.renameTo(tempFile, finalFile); Logger.normal(this.getClass(), "Override file saved successfully!"); } catch (IOException e) { Logger.error(this.getClass(), "Error while saving the translation override: " + e.getMessage(), e); - } finally { - Closer.close(fos); } } diff --git a/src/freenet/l10n/ISO639_3.java b/src/freenet/l10n/ISO639_3.java index 6d707b4eec5..11d539ffef0 100644 --- a/src/freenet/l10n/ISO639_3.java +++ b/src/freenet/l10n/ISO639_3.java @@ -6,11 +6,12 @@ import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.Collections; +import java.util.HashMap; import java.util.Hashtable; import java.util.Map; -import freenet.support.io.Closer; /** * Provides the content of the ISO639-3 standard for language codes. @@ -59,16 +60,22 @@ public static final class LanguageCode implements Comparable { */ public final String part1; - public static enum Scope { + public enum Scope { Individual, Macrolanguage, Special; private static Scope fromTabFile(String abbreviation) { - if(abbreviation.equals("I")) return Scope.Individual; - else if(abbreviation.equals("M")) return Scope.Macrolanguage; - else if(abbreviation.equals("S")) return Scope.Special; - else throw new IllegalArgumentException("Unknown scope abbreviation: " + abbreviation); + switch (abbreviation) { + case "I": + return Scope.Individual; + case "M": + return Scope.Macrolanguage; + case "S": + return Scope.Special; + default: + throw new IllegalArgumentException("Unknown scope abbreviation: " + abbreviation); + } } }; @@ -77,7 +84,7 @@ private static Scope fromTabFile(String abbreviation) { */ public final Scope scope; - public static enum Type { + public enum Type { Ancient, Constructed, Extinct, @@ -86,13 +93,22 @@ public static enum Type { Special; private static Type fromTabFile(String abbreviation) { - if(abbreviation.equals("A")) return Type.Ancient; - else if(abbreviation.equals("C")) return Type.Constructed; - else if(abbreviation.equals("E")) return Type.Extinct; - else if(abbreviation.equals("H")) return Type.Historical; - else if(abbreviation.equals("L")) return Type.Living; - else if(abbreviation.equals("S")) return Type.Special; - else throw new IllegalArgumentException("Unknwon type abbreviation: " + abbreviation); + switch (abbreviation) { + case "A": + return Type.Ancient; + case "C": + return Type.Constructed; + case "E": + return Type.Extinct; + case "H": + return Type.Historical; + case "L": + return Type.Living; + case "S": + return Type.Special; + default: + throw new IllegalArgumentException("Unknwon type abbreviation: " + abbreviation); + } } } @@ -166,23 +182,13 @@ public String toString() { } - private static Hashtable loadFromTabFile() { - final Hashtable codes = new Hashtable(7705 * 2); - - InputStream in = null; - InputStreamReader isr = null; - BufferedReader br = null; - - try { - // Returns null on lookup failures: - in = ISO639_3.class.getClassLoader().getResourceAsStream("freenet/l10n/iso-639-3_20100707.tab"); - - if (in == null) - throw new RuntimeException("Could not open the language codes resource"); - - isr = new InputStreamReader(in, "UTF-8"); - br = new BufferedReader(isr); - + private static Map loadFromTabFile() { + final Map codes = new HashMap<>(7705 * 2); + try ( + InputStream in = getTabFileInputStreamOrThrow(); + InputStreamReader isr = new InputStreamReader(in, StandardCharsets.UTF_8); + BufferedReader br = new BufferedReader(isr); + ) { { String[] headerTokens = br.readLine().split("[\t]"); if( @@ -224,15 +230,19 @@ private static Hashtable loadFromTabFile() { } } catch(Exception e) { throw new RuntimeException(e); - } finally { - Closer.close(br); - Closer.close(isr); - Closer.close(in); } - return codes; } - + + private static InputStream getTabFileInputStreamOrThrow() { + // Returns null on lookup failures: + InputStream in = ISO639_3.class.getClassLoader().getResourceAsStream("freenet/l10n/iso-639-3_20100707.tab"); + if (in == null) { + throw new RuntimeException("Could not open the language codes resource"); + } + return in; + } + private final Map allLanguagesCache; @@ -253,7 +263,7 @@ public ISO639_3() { * @return Returns the map of all ISO639-3 language codes. The key in the returned list is the ID of the language code, * which is the 3-letter code of ISO639-3. The given map is unmodifiable since it is used for the cache. */ - public final Map getLanguages() { + public Map getLanguages() { return allLanguagesCache; } @@ -265,9 +275,9 @@ public final Map getLanguages() { * @return Gets a {@link Hashtable} of language codes with the given scope and type. The key in the returned list is the ID * of the language code, which is the 3-letter code of ISO639-3. The given Hashtable is free for modification. */ - public final Hashtable getLanguagesByScopeAndType(LanguageCode.Scope scope, LanguageCode.Type type) { + public Map getLanguagesByScopeAndType(LanguageCode.Scope scope, LanguageCode.Type type) { final Map all = getLanguages(); - final Hashtable result = new Hashtable(); + final Map result = new HashMap<>(); for(final LanguageCode c : all.values()) { if(c.scope.equals(scope) && c.type.equals(type)) @@ -280,9 +290,8 @@ public final Hashtable getLanguagesByScopeAndType(Language /** * @return The special symbolic language code which is supposed to be a category for multiple languages. */ - public final LanguageCode getMultilingualCode() { + public LanguageCode getMultilingualCode() { return getLanguages().get("mul"); - } public static void main(String[] args) { diff --git a/src/freenet/node/Announcer.java b/src/freenet/node/Announcer.java index 74fca944368..77555a861e5 100644 --- a/src/freenet/node/Announcer.java +++ b/src/freenet/node/Announcer.java @@ -11,6 +11,7 @@ import java.io.IOException; import java.io.InputStreamReader; import java.net.InetAddress; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -30,7 +31,6 @@ import freenet.support.Logger.LogLevel; import freenet.support.SimpleFieldSet; import freenet.support.TimeUtil; -import freenet.support.io.Closer; import freenet.support.transport.ip.IPUtil; import static java.util.concurrent.TimeUnit.SECONDS; @@ -262,13 +262,13 @@ private synchronized int connectSomeNodesInner(List seeds) { } public static List readSeednodes(File file) { - List list = new ArrayList(); - FileInputStream fis = null; - try { - fis = new FileInputStream(file); + List list = new ArrayList<>(); + try ( + FileInputStream fis = new FileInputStream(file); BufferedInputStream bis = new BufferedInputStream(fis); - InputStreamReader isr = new InputStreamReader(bis, "UTF-8"); + InputStreamReader isr = new InputStreamReader(bis, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(isr); + ){ while(true) { try { SimpleFieldSet fs = new SimpleFieldSet(br, false, false, true, false); @@ -286,8 +286,6 @@ public static List readSeednodes(File file) { } catch (IOException e) { Logger.error(Announcer.class, "Unexpected error while reading seednodes from " + file, e); return list; - } finally { - Closer.close(fis); } } diff --git a/src/freenet/node/LocationManager.java b/src/freenet/node/LocationManager.java index c6d439a5465..c798c30339c 100644 --- a/src/freenet/node/LocationManager.java +++ b/src/freenet/node/LocationManager.java @@ -15,6 +15,7 @@ import java.io.IOException; import java.io.OutputStreamWriter; import java.net.MalformedURLException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.security.MessageDigest; import java.text.DateFormat; @@ -60,7 +61,6 @@ import freenet.support.ShortBuffer; import freenet.support.TimeSortedHashtable; import freenet.support.io.ArrayBucket; -import freenet.support.io.Closer; import freenet.support.math.BootstrappingDecayingRunningAverage; /** @@ -919,21 +919,18 @@ private void recordLocChange(final boolean randomReset, final boolean fromDupLoc @Override public void run() { File locationLog = node.nodeDir().file("location.log.txt"); - if(locationLog.exists() && locationLog.length() > 1024*1024*10) - locationLog.delete(); - FileOutputStream os = null; - try { - os = new FileOutputStream(locationLog, true); - BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(os, "ISO-8859-1")); + if(locationLog.exists() && locationLog.length() > 1024*1024*10) { + locationLog.delete(); + } + try ( + FileOutputStream os = new FileOutputStream(locationLog, true); + BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.ISO_8859_1)) + ) { DateFormat df = DateFormat.getDateTimeInstance(); df.setTimeZone(TimeZone.getTimeZone("GMT")); bw.write(""+df.format(new Date())+" : "+getLocation()+(randomReset ? " (random reset"+(fromDupLocation?" from duplicated location" : "")+")" : "")+'\n'); - bw.close(); - os = null; } catch (IOException e) { Logger.error(this, "Unable to write changed location to "+locationLog+" : "+e, e); - } finally { - Closer.close(os); } } diff --git a/src/freenet/node/MasterKeys.java b/src/freenet/node/MasterKeys.java index bd084ecbb99..8ee784a3d09 100644 --- a/src/freenet/node/MasterKeys.java +++ b/src/freenet/node/MasterKeys.java @@ -11,6 +11,7 @@ import java.io.IOException; import java.io.RandomAccessFile; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.util.Arrays; import java.util.Random; @@ -22,7 +23,6 @@ import freenet.crypt.UnsupportedCipherException; import freenet.crypt.ciphers.Rijndael; import freenet.support.Fields; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /** Keys read from the master keys file */ @@ -77,15 +77,16 @@ public static MasterKeys read(File masterKeysFile, Random hardRandom, String pas System.err.println("Trying to read master keys file..."); if(masterKeysFile != null && masterKeysFile.exists()) { // Try to read the keys - FileInputStream fis = null; + // FIXME move declarations of sensitive data out and clear() in finally {} long len = masterKeysFile.length(); if(len > 1024) throw new MasterKeysFileSizeException(true); if(len < (32 + 32 + 8 + 32)) throw new MasterKeysFileSizeException(false); int length = (int) len; - try { - fis = new FileInputStream(masterKeysFile); - DataInputStream dis = new DataInputStream(fis); + try ( + FileInputStream fis = new FileInputStream(masterKeysFile); + DataInputStream dis = new DataInputStream(fis) + ) { if(len == 140) { MasterKeys ret = readOldFormat(dis, length, hardRandom, password); System.out.println("Read old-format master keys file. Writing new format master.keys ..."); @@ -103,7 +104,7 @@ public static MasterKeys read(File masterKeysFile, Random hardRandom, String pas byte[] dataAndHash = new byte[length - salt.length - iv.length - 4 - 8]; dis.readFully(dataAndHash); // System.err.println("Data and hash: "+HexUtil.bytesToHex(dataAndHash)); - byte[] pwd = password.getBytes("UTF-8"); + byte[] pwd = password.getBytes(StandardCharsets.UTF_8); MessageDigest md = SHA256.getMessageDigest(); md.update(pwd); md.update(salt); @@ -142,32 +143,40 @@ public static MasterKeys read(File masterKeysFile, Random hardRandom, String pas } // It matches. Now decode it. - ByteArrayInputStream bais = new ByteArrayInputStream(data); - dis = new DataInputStream(bais); - long flags = dis.readLong(); - // At the moment there are no interesting flags. - // In future the flags will tell us whether the database and the datastore are encrypted. - byte[] clientCacheKey = new byte[32]; - dis.readFully(clientCacheKey); - byte[] databaseKey = null; - databaseKey = new byte[32]; - dis.readFully(databaseKey); - byte[] tempfilesMasterSecret = new byte[64]; boolean mustWrite = false; - if(data.length >= 8+32+32+64) { - dis.readFully(tempfilesMasterSecret); - } else { - System.err.println("Created new master secret for encrypted tempfiles"); - hardRandom.nextBytes(tempfilesMasterSecret); - mustWrite = true; + long flags; + byte[] clientCacheKey; + byte[] databaseKey; + byte[] tempfilesMasterSecret; + try ( + ByteArrayInputStream bais = new ByteArrayInputStream(data); + DataInputStream dis2 = new DataInputStream(bais); + ) { + flags = dis2.readLong(); + // At the moment there are no interesting flags. + // In future the flags will tell us whether the database and the datastore are encrypted. + + clientCacheKey = new byte[32]; + dis2.readFully(clientCacheKey); + databaseKey = new byte[32]; + dis2.readFully(databaseKey); + tempfilesMasterSecret = new byte[64]; + + if (data.length >= 8 + 32 + 32 + 64) { + dis2.readFully(tempfilesMasterSecret); + } else { + System.err.println("Created new master secret for encrypted tempfiles"); + hardRandom.nextBytes(tempfilesMasterSecret); + mustWrite = true; + } } MasterKeys ret = new MasterKeys(clientCacheKey, databaseKey, tempfilesMasterSecret, flags); clear(data); clear(hash); SHA256.returnMessageDigest(md); System.err.println("Read old master keys file"); - if(mustWrite) { - ret.changePassword(masterKeysFile, password, hardRandom); + if (mustWrite) { + ret.changePassword(masterKeysFile, password, hardRandom); } return ret; } catch (FileNotFoundException e) { @@ -178,8 +187,6 @@ public static MasterKeys read(File masterKeysFile, Random hardRandom, String pas throw new Error(e); } catch (EOFException e) { throw new MasterKeysFileSizeException(false); - } finally { - Closer.close(fis); } } System.err.println("Creating new master keys file"); @@ -197,7 +204,7 @@ private static MasterKeys readOldFormat(DataInputStream dis, int length, Random byte[] dataAndHash = new byte[length - salt.length - iv.length]; dis.readFully(dataAndHash); // System.err.println("Data and hash: "+HexUtil.bytesToHex(dataAndHash)); - byte[] pwd = password.getBytes("UTF-8"); + byte[] pwd = password.getBytes(StandardCharsets.UTF_8); MessageDigest md = SHA256.getMessageDigest(); md.update(pwd); md.update(salt); @@ -274,13 +281,8 @@ private void write(File masterKeysFile, String newPassword, Random hardRandom) t hardRandom.nextBytes(salt); byte[] pwd; - try { - pwd = newPassword.getBytes("UTF-8"); - } catch (UnsupportedEncodingException e) { - // Impossible - throw new Error(e); - } - MessageDigest md = SHA256.getMessageDigest(); + pwd = newPassword.getBytes(StandardCharsets.UTF_8); + MessageDigest md = SHA256.getMessageDigest(); md.update(pwd); md.update(salt); byte[] outerKey = md.digest(); diff --git a/src/freenet/node/Node.java b/src/freenet/node/Node.java index 0b4fce5ffa5..c72b698f45f 100644 --- a/src/freenet/node/Node.java +++ b/src/freenet/node/Node.java @@ -26,6 +26,7 @@ import java.io.UnsupportedEncodingException; import java.net.InetAddress; import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; import java.security.SecureRandom; import java.util.ArrayList; import java.util.HashMap; @@ -135,7 +136,6 @@ import freenet.support.api.ShortCallback; import freenet.support.api.StringCallback; import freenet.support.io.ArrayBucketFactory; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; import freenet.support.io.NativeThread; import freenet.support.math.MersenneTwister; @@ -891,20 +891,14 @@ public void writeOpennetFile() { private void writeNodeFile(File orig, File backup) { SimpleFieldSet fs = darknetCrypto.exportPrivateFieldSet(); - if(orig.exists()) backup.delete(); - - FileOutputStream fos = null; - try { - fos = new FileOutputStream(backup); + if(orig.exists()) { + backup.delete(); + } + try (FileOutputStream fos = new FileOutputStream(backup)){ fs.writeTo(fos); - fos.close(); - fos = null; FileUtil.renameTo(backup, orig); } catch (IOException ioe) { Logger.error(this, "IOE :"+ioe.getMessage(), ioe); - return; - } finally { - Closer.close(fos); } } @@ -1220,15 +1214,14 @@ public void set(Boolean val) throws InvalidConfigValueException, File bootIDFile = runDir.file("bootID"); int BOOT_FILE_LENGTH = 64 / 4; // A long in padded hex bytes long oldBootID = -1; - RandomAccessFile raf = null; - try { - raf = new RandomAccessFile(bootIDFile, "rw"); + + try (RandomAccessFile raf = new RandomAccessFile(bootIDFile, "rw")){ if(raf.length() < BOOT_FILE_LENGTH) { oldBootID = -1; } else { byte[] buf = new byte[BOOT_FILE_LENGTH]; raf.readFully(buf); - String s = new String(buf, "ISO-8859-1"); + String s = new String(buf, StandardCharsets.ISO_8859_1); try { oldBootID = Fields.bytesToLong(HexUtil.hexToBytes(s)); } catch (NumberFormatException e) { @@ -1237,15 +1230,14 @@ public void set(Boolean val) throws InvalidConfigValueException, raf.seek(0); } String s = HexUtil.bytesToHex(Fields.longToBytes(bootID)); - byte[] buf = s.getBytes("ISO-8859-1"); - if(buf.length != BOOT_FILE_LENGTH) + byte[] buf = s.getBytes(StandardCharsets.ISO_8859_1); + if(buf.length != BOOT_FILE_LENGTH) { System.err.println("Not 16 bytes for boot ID "+bootID+" - WTF??"); + } raf.write(buf); } catch (IOException e) { oldBootID = -1; // If we have an error in reading, *or in writing*, we don't reliably know the last boot ID. - } finally { - Closer.close(raf); } lastBootID = oldBootID; diff --git a/src/freenet/node/NodeCrypto.java b/src/freenet/node/NodeCrypto.java index 005527a0015..76a40877549 100644 --- a/src/freenet/node/NodeCrypto.java +++ b/src/freenet/node/NodeCrypto.java @@ -29,7 +29,6 @@ import freenet.support.IllegalBase64Exception; import freenet.support.Logger; import freenet.support.SimpleFieldSet; -import freenet.support.io.Closer; /** * Cryptographic and transport level node identity. @@ -383,20 +382,16 @@ private byte[] myCompressedRef(boolean setup, boolean heavySetup, boolean forARK SimpleFieldSet fs = exportPublicFieldSet(setup, heavySetup, forARK); ByteArrayOutputStream baos = new ByteArrayOutputStream(); - DeflaterOutputStream gis; - gis = new DeflaterOutputStream(baos); - try { + try (DeflaterOutputStream gis = new DeflaterOutputStream(baos)) { fs.writeTo(gis); - } catch (IOException e) { - Logger.error(this, "IOE :"+e.getMessage(), e); - } finally { - Closer.close(gis); - Closer.close(baos); + } catch (IOException e) { + Logger.error(this, "IOE :" + e.getMessage(), e); } byte[] buf = baos.toByteArray(); - if(buf.length >= 4096) + if(buf.length >= 4096) { throw new IllegalStateException("We are attempting to send a "+buf.length+" bytes big reference!"); + } byte[] obuf = new byte[buf.length + 1]; int offset = 0; obuf[offset++] = 0x01; // compressed noderef diff --git a/src/freenet/node/OpennetManager.java b/src/freenet/node/OpennetManager.java index 8a18124159b..a3eb27354f0 100644 --- a/src/freenet/node/OpennetManager.java +++ b/src/freenet/node/OpennetManager.java @@ -16,6 +16,7 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.net.InetAddress; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -51,7 +52,6 @@ import freenet.support.SimpleFieldSet; import freenet.support.TimeSortedHashtable; import freenet.support.io.ByteArrayRandomAccessBuffer; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; import freenet.support.io.NativeThread; import freenet.support.transport.ip.HostnameSyntaxException; @@ -301,31 +301,29 @@ public void writeFile() { private void writeFile(File orig, File backup) { SimpleFieldSet fs = crypto.exportPrivateFieldSet(); - - if(orig.exists()) backup.delete(); - - FileOutputStream fos = null; - OutputStreamWriter osr = null; - BufferedWriter bw = null; - try { - fos = new FileOutputStream(backup); - osr = new OutputStreamWriter(fos, "UTF-8"); - bw = new BufferedWriter(osr); + if(orig.exists()) { + backup.delete(); + } + try ( + FileOutputStream fos = new FileOutputStream(backup); + OutputStreamWriter osr = new OutputStreamWriter(fos, StandardCharsets.UTF_8); + BufferedWriter bw = new BufferedWriter(osr) + ) { fs.writeTo(bw); - - bw.close(); + } catch (IOException ignored) { + // ignore + } + try { FileUtil.renameTo(backup, orig); - } catch (IOException e) { - Closer.close(bw); - Closer.close(osr); - Closer.close(fos); + } catch (Exception ignored) { + // ignore } } private void readFile(File filename) throws IOException { // REDFLAG: Any way to share this code with Node and NodePeer? FileInputStream fis = new FileInputStream(filename); - InputStreamReader isr = new InputStreamReader(fis, "UTF-8"); + InputStreamReader isr = new InputStreamReader(fis, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(isr); SimpleFieldSet fs = new SimpleFieldSet(br, false, true); br.close(); diff --git a/src/freenet/node/PeerManager.java b/src/freenet/node/PeerManager.java index a23a6773498..4ed5c2424f3 100644 --- a/src/freenet/node/PeerManager.java +++ b/src/freenet/node/PeerManager.java @@ -13,6 +13,7 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -39,7 +40,6 @@ import freenet.support.ShortBuffer; import freenet.support.SimpleFieldSet; import freenet.support.TimeUtil; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; import freenet.support.io.NativeThread; @@ -1501,39 +1501,23 @@ private void writePeersInnerOpennet(boolean rotateBackups) { private void writePeersInner(String filename, String sb, int maxBackups, boolean rotateBackups) { assert(maxBackups >= 1); synchronized(writePeerFileSync) { - FileOutputStream fos = null; - File f; File full = new File(filename).getAbsoluteFile(); + File f; try { f = File.createTempFile(full.getName()+".", ".tmp", full.getParentFile()); } catch (IOException e2) { Logger.error(this, "Cannot write peers to disk: Cannot create temp file - " + e2, e2); - Closer.close(fos); - return; - } - try { - fos = new FileOutputStream(f); - } catch(FileNotFoundException e2) { - Logger.error(this, "Cannot write peers to disk: Cannot create " + f + " - " + e2, e2); - Closer.close(fos); - f.delete(); return; } - OutputStreamWriter w = null; - try { - w = new OutputStreamWriter(fos, "UTF-8"); - } catch(UnsupportedEncodingException e2) { - Closer.close(w); - f.delete(); - throw new Error("Impossible: JVM doesn't support UTF-8: " + e2, e2); - } - try { + try ( + FileOutputStream fos = new FileOutputStream(f); + OutputStreamWriter w = new OutputStreamWriter(fos, StandardCharsets.UTF_8) + ) { w.write(sb); w.flush(); fos.getFD().sync(); w.close(); - w = null; - + if(rotateBackups) { File prevFile = null; for(int i=maxBackups;i>=0;i--) { @@ -1551,18 +1535,11 @@ private void writePeersInner(String filename, String sb, int maxBackups, boolean } else { FileUtil.renameTo(f, getBackupFilename(filename, 0)); } + } catch(FileNotFoundException e2) { + Logger.error(this, "Cannot write peers to disk: Cannot create " + f + " - " + e2, e2); } catch(IOException e) { - try { - fos.close(); - } catch(IOException e1) { - Logger.error(this, "Cannot close peers file: " + e, e); - } Logger.error(this, "Cannot write file: " + e, e); - f.delete(); - return; // don't overwrite old file! } finally { - Closer.close(w); - Closer.close(fos); f.delete(); } } diff --git a/src/freenet/node/Persister.java b/src/freenet/node/Persister.java index 18be5ab51ca..222f886f26f 100644 --- a/src/freenet/node/Persister.java +++ b/src/freenet/node/Persister.java @@ -10,7 +10,6 @@ import freenet.support.Logger; import freenet.support.SimpleFieldSet; import freenet.support.Ticker; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; class Persister implements Runnable { @@ -65,18 +64,15 @@ private void persistThrottle() { Logger.minor(this, "Trying to persist throttles..."); } SimpleFieldSet fs = persistable.persistThrottlesToFieldSet(); - FileOutputStream fos = null; try { - fos = new FileOutputStream(persistTemp); - fs.writeToBigBuffer(fos); - fos.close(); + try (FileOutputStream fos = new FileOutputStream(persistTemp)) { + fs.writeToBigBuffer(fos); + } FileUtil.renameTo(persistTemp, persistTarget); } catch (FileNotFoundException e) { Logger.error(this, "Could not store throttle data to disk: " + e, e); } catch (IOException e) { persistTemp.delete(); - } finally { - Closer.close(fos); } } diff --git a/src/freenet/node/TextModeClientInterface.java b/src/freenet/node/TextModeClientInterface.java index 004cb523f43..082cd8803af 100644 --- a/src/freenet/node/TextModeClientInterface.java +++ b/src/freenet/node/TextModeClientInterface.java @@ -23,6 +23,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.text.NumberFormat; import java.util.HashMap; @@ -59,7 +60,6 @@ import freenet.support.api.Bucket; import freenet.support.io.ArrayBucket; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; /** @@ -440,34 +440,27 @@ public void run() { outsb.append("Here is the result:\r\n"); final String content = readLines(reader, false); - final Bucket input = new ArrayBucket(content.getBytes("UTF-8")); + final Bucket input = new ArrayBucket(content.getBytes(StandardCharsets.UTF_8)); final Bucket output = new ArrayBucket(); - InputStream inputStream = null; - OutputStream outputStream = null; - InputStream bis = null; - try { - inputStream = input.getInputStream(); - outputStream = output.getOutputStream(); + + try ( + OutputStream outputStream = output.getOutputStream(); + InputStream inputStream = input.getInputStream(); + ) { ContentFilter.filter(inputStream, outputStream, "text/html", new URI("http://127.0.0.1:8888/"), null, null, null, null, core.getLinkFilterExceptionProvider()); - inputStream.close(); - inputStream = null; - outputStream.close(); - outputStream = null; - bis = output.getInputStream(); - while(bis.available() > 0){ - outsb.append((char)bis.read()); - } - } catch (IOException e) { + try (InputStream bis = output.getInputStream()) { + while (bis.available() > 0) { + outsb.append((char) bis.read()); + } + } + } catch (IOException e) { outsb.append("Bucket error?: " + e.getMessage()); Logger.error(this, "Bucket error?: " + e, e); } catch (URISyntaxException e) { outsb.append("Internal error: " + e.getMessage()); Logger.error(this, "Internal error: " + e, e); } finally { - Closer.close(inputStream); - Closer.close(outputStream); - Closer.close(bis); input.free(); output.free(); } diff --git a/src/freenet/node/UptimeEstimator.java b/src/freenet/node/UptimeEstimator.java index 0d02036b0d4..6e59dc15e0f 100644 --- a/src/freenet/node/UptimeEstimator.java +++ b/src/freenet/node/UptimeEstimator.java @@ -18,7 +18,6 @@ import freenet.support.Fields; import freenet.support.Logger; import freenet.support.Ticker; -import freenet.support.io.Closer; /** * A class to estimate the node's average uptime. Every 5 minutes (with a fixed offset), we write @@ -77,10 +76,10 @@ public void start() { } private void readData(File file, int base) { - FileInputStream fis = null; - try { - fis = new FileInputStream(file); - DataInputStream dis = new DataInputStream(fis); + try ( + FileInputStream fis = new FileInputStream(file); + DataInputStream dis = new DataInputStream(fis) + ) { try { while(true) { int offset = dis.readInt(); @@ -96,13 +95,9 @@ private void readData(File file, int base) { } } catch (EOFException e) { // Finished - } finally { - Closer.close(dis); } } catch (IOException e) { Logger.error(this, "Unable to read old uptime file: "+file+" - we will assume we weren't online during that period"); - } finally { - Closer.close(fis); } } @@ -118,20 +113,17 @@ public void run() { prevFile.delete(); logFile.renameTo(prevFile); } - FileOutputStream fos = null; - DataOutputStream dos = null; int fiveMinutesSinceEpoch = (int)(now / PERIOD); - try { - fos = new FileOutputStream(logFile, true); - dos = new DataOutputStream(fos); + try ( + FileOutputStream fos = new FileOutputStream(logFile, true); + DataOutputStream dos = new DataOutputStream(fos) + ) { dos.writeInt(fiveMinutesSinceEpoch); } catch (FileNotFoundException e) { Logger.error(this, "Unable to create or access "+logFile+" : "+e, e); } catch (IOException e) { Logger.error(this, "Unable to write to uptime estimator log file: "+logFile); } finally { - Closer.close(dos); - Closer.close(fos); // Schedule next time schedule(now); } diff --git a/src/freenet/node/simulator/LongTermMHKTest.java b/src/freenet/node/simulator/LongTermMHKTest.java index feebf3cd86e..8a2e3b60c51 100644 --- a/src/freenet/node/simulator/LongTermMHKTest.java +++ b/src/freenet/node/simulator/LongTermMHKTest.java @@ -9,12 +9,7 @@ import java.io.InputStreamReader; import java.io.OutputStream; import java.net.MalformedURLException; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; -import java.util.GregorianCalendar; -import java.util.List; -import java.util.TimeZone; +import java.util.*; import freenet.client.ClientMetadata; import freenet.client.FetchException; @@ -31,7 +26,6 @@ import freenet.support.Logger.LogLevel; import freenet.support.PooledExecutor; import freenet.support.api.RandomAccessBucket; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /** Simulates MHKs. Creates 4 CHKs, inserts the first one 3 times, and inserts the @@ -62,7 +56,7 @@ public static void main(String[] args) { boolean dumpOnly = args.length == 2 && "--dump".equalsIgnoreCase(args[1]); - List csvLine = new ArrayList(); + List csvLine = new ArrayList<>(); System.out.println("DATE:" + dateFormat.format(today.getTime())); csvLine.add(dateFormat.format(today.getTime())); @@ -72,7 +66,6 @@ public static void main(String[] args) { int exitCode = 0; Node node = null; Node node2 = null; - FileInputStream fis = null; File file = new File("mhk-test-"+uid + ".csv"); long t1, t2; @@ -94,9 +87,10 @@ public static void main(String[] args) { final File innerDir = new File(dir, Integer.toString(DARKNET_PORT1)); innerDir.mkdir(); - fis = new FileInputStream(seednodes); - FileUtil.writeTo(fis, new File(innerDir, "seednodes.fref")); - fis.close(); + + try (FileInputStream fis = new FileInputStream(seednodes)) { + FileUtil.writeTo(fis, new File(innerDir, "seednodes.fref")); + } // Create one node node = NodeStarter.createTestNode(DARKNET_PORT1, OPENNET_PORT1, dir.getPath(), false, Node.DEFAULT_MAX_HTL, @@ -164,18 +158,16 @@ else if(successes != 0) else System.err.println("NO INSERTS SUCCEEDED FOR SINGLE BLOCK: "+successes); - uri = null; + // Insert 3 blocks for(int i=0;i<3;i++) { System.err.println("Inserting MHK #"+i); - uri = null; block = new InsertBlock(mhks[i], new ClientMetadata(), FreenetURI.EMPTY_CHK_URI); try { t1 = System.currentTimeMillis(); - FreenetURI thisURI = client.insert(block, false, null); - uri = thisURI; + uri = client.insert(block, false, null); t2 = System.currentTimeMillis(); System.out.println("PUSH-TIME-" + i + ":" + (t2 - t1)+" for "+uri+" for MHK #"+i); @@ -196,8 +188,6 @@ else if(successes != 0) System.err.println("Some inserts succeeded for MHK: "+successes); else System.err.println("NO INSERTS SUCCEEDED FOR MHK: "+successes); - - uri = null; } // PARSE FILE AND FETCH OLD STUFF IF APPROPRIATE @@ -206,138 +196,142 @@ else if(successes != 0) FreenetURI singleURI = null; FreenetURI[] mhkURIs = new FreenetURI[3]; - fis = new FileInputStream(file); - BufferedReader br = new BufferedReader(new InputStreamReader(fis, ENCODING)); - String line = null; - int linesTooShort = 0, linesBroken = 0, linesNoNumber = 0, linesNoURL = 0, linesNoFetch = 0; - int total = 0, singleKeysSucceeded = 0, mhkSucceeded = 0; - int totalSingleKeyFetches = 0, totalSingleKeySuccesses = 0; - while((line = br.readLine()) != null) { - - singleURI = null; - for(int i=0;i 3 + 6 + 6) { + int token = 3 + 6 + 6; + int singleKeyFetchTime = -1; + boolean singleKeySuccess = false; + for (int i = 0; i < 3; i++) { + // Fetched 3 times + if (!singleKeySuccess) { + try { + singleKeyFetchTime = Integer.parseInt(split[token]); + singleKeySuccess = true; + System.out.println("Fetched single key on try " + i + " on " + date + " in " + singleKeyFetchTime + "ms"); + } catch (NumberFormatException e) { + System.out.println("Failed fetch single key on " + date + " try " + i + " : " + split[token]); + singleKeyFetchTime = -1; + } + } // Else will be empty. + token++; } - token++; - } - System.out.println("Single key URI: "+singleURI); - - for(int i=0;i<3;i++) { - int insertTime = Integer.parseInt(split[token]); - token++; - mhkURIs[i] = new FreenetURI(split[token]); - token++; - System.out.println("MHK #"+i+" URI: "+mhkURIs[i]+" insert time "+insertTime); - } - - } catch (NumberFormatException e) { - System.err.println("Failed to parse row: "+e); - linesNoNumber++; - continue; - } catch (MalformedURLException e) { - System.err.println("Failed to parse row: "+e); - linesNoURL++; - continue; - } - if(Math.abs(target.getTimeInMillis() - calendar.getTimeInMillis()) < HOURS.toMillis(12)) { - System.out.println("Found row for target date "+dateFormat.format(target.getTime())+" : "+dateFormat.format(calendar.getTime())); - System.out.println("Version: "+split[1]); - match = true; - break; - } else if(split.length > 3+6+6) { - int token = 3 + 6 + 6; - int singleKeyFetchTime = -1; - boolean singleKeySuccess = false; - for(int i=0;i<3;i++) { - // Fetched 3 times - if(!singleKeySuccess) { + boolean mhkSuccess = false; + for (int i = 0; i < 3; i++) { + totalSingleKeyFetches++; + int mhkFetchTime = -1; try { - singleKeyFetchTime = Integer.parseInt(split[token]); - singleKeySuccess = true; - System.out.println("Fetched single key on try "+i+" on "+date+" in "+singleKeyFetchTime+"ms"); + mhkFetchTime = Integer.parseInt(split[token]); + mhkSuccess = true; + totalSingleKeySuccesses++; + System.out.println("Fetched MHK #" + i + " on " + date + " in " + mhkFetchTime + "ms"); } catch (NumberFormatException e) { - System.out.println("Failed fetch single key on "+date+" try "+i+" : "+split[token]); - singleKeyFetchTime = -1; + System.out.println("Failed fetch MHK #" + i + " on " + date + " : " + split[token]); } - } // Else will be empty. - token++; - } - boolean mhkSuccess = false; - for(int i=0;i<3;i++) { - totalSingleKeyFetches++; - int mhkFetchTime = -1; - try { - mhkFetchTime = Integer.parseInt(split[token]); - mhkSuccess = true; - totalSingleKeySuccesses++; - System.out.println("Fetched MHK #"+i+" on "+date+" in "+mhkFetchTime+"ms"); - } catch (NumberFormatException e) { - System.out.println("Failed fetch MHK #"+i+" on "+date+" : "+split[token]); + token++; } - token++; - } - total++; - if(singleKeySuccess) - singleKeysSucceeded++; - if(mhkSuccess) - mhkSucceeded++; - } else linesNoFetch++; + total++; + if (singleKeySuccess) + singleKeysSucceeded++; + if (mhkSuccess) + mhkSucceeded++; + } else linesNoFetch++; + } + System.out.println("Lines where insert failed or no fetch: too short: " + linesTooShort + " broken: " + linesBroken + " no number: " + linesNoNumber + " no url: " + linesNoURL + " no fetch " + linesNoFetch); + System.out.println("Total attempts where insert succeeded and fetch executed: " + total); + System.out.println("Single keys succeeded: " + singleKeysSucceeded); + System.out.println("MHKs succeeded: " + mhkSucceeded); + System.out.println("Single key individual fetches: " + totalSingleKeyFetches); + System.out.println("Single key individual fetches succeeded: " + totalSingleKeySuccesses); + System.out.println("Success rate for individual keys (from MHK inserts): " + ((double) totalSingleKeySuccesses) / ((double) totalSingleKeyFetches)); + System.out.println("Success rate for the single key triple inserted: " + ((double) singleKeysSucceeded) / ((double) total)); + System.out.println("Success rate for the MHK (success = any of the 3 different keys worked): " + ((double) mhkSucceeded) / ((double) total)); } - System.out.println("Lines where insert failed or no fetch: too short: "+linesTooShort+" broken: "+linesBroken+" no number: "+linesNoNumber+" no url: "+linesNoURL+" no fetch "+linesNoFetch); - System.out.println("Total attempts where insert succeeded and fetch executed: "+total); - System.out.println("Single keys succeeded: "+singleKeysSucceeded); - System.out.println("MHKs succeeded: "+mhkSucceeded); - System.out.println("Single key individual fetches: "+totalSingleKeyFetches); - System.out.println("Single key individual fetches succeeded: "+totalSingleKeySuccesses); - System.out.println("Success rate for individual keys (from MHK inserts): "+((double)totalSingleKeySuccesses)/((double)totalSingleKeyFetches)); - System.out.println("Success rate for the single key triple inserted: "+((double)singleKeysSucceeded)/((double)total)); - System.out.println("Success rate for the MHK (success = any of the 3 different keys worked): "+((double)mhkSucceeded)/((double)total)); - fis.close(); - fis = null; // FETCH STUFF @@ -404,7 +398,6 @@ else if(successes != 0) node2.park(); } catch (Throwable tt) { } - Closer.close(fis); if(!dumpOnly) { writeToStatusLog(file, csvLine); @@ -415,17 +408,14 @@ else if(successes != 0) private static RandomAccessBucket randomData(Node node) throws IOException { RandomAccessBucket data = node.clientCore.tempBucketFactory.makeBucket(TEST_SIZE); - OutputStream os = data.getOutputStream(); - try { - byte[] buf = new byte[4096]; - for (long written = 0; written < TEST_SIZE;) { - node.fastWeakRandom.nextBytes(buf); - int toWrite = (int) Math.min(TEST_SIZE - written, buf.length); - os.write(buf, 0, toWrite); - written += toWrite; - } - } finally { - os.close(); + try (OutputStream os = data.getOutputStream()) { + byte[] buf = new byte[4096]; + for (long written = 0; written < TEST_SIZE; ) { + node.fastWeakRandom.nextBytes(buf); + int toWrite = (int) Math.min(TEST_SIZE - written, buf.length); + os.write(buf, 0, toWrite); + written += toWrite; + } } return data; } diff --git a/src/freenet/node/simulator/LongTermManySingleBlocksTest.java b/src/freenet/node/simulator/LongTermManySingleBlocksTest.java index cc25ddf5da2..de720f04d60 100644 --- a/src/freenet/node/simulator/LongTermManySingleBlocksTest.java +++ b/src/freenet/node/simulator/LongTermManySingleBlocksTest.java @@ -36,7 +36,6 @@ import freenet.support.Logger.LogLevel; import freenet.support.PooledExecutor; import freenet.support.api.RandomAccessBucket; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /** @@ -182,7 +181,7 @@ public static void main(String[] args) { int exitCode = 0; Node node = null; Node node2 = null; - FileInputStream fis = null; + File file = new File("many-single-blocks-test-"+uid + ".csv"); long t1, t2; @@ -201,9 +200,10 @@ public static void main(String[] args) { final File innerDir = new File(dir, Integer.toString(DARKNET_PORT1)); innerDir.mkdir(); - fis = new FileInputStream(seednodes); - FileUtil.writeTo(fis, new File(innerDir, "seednodes.fref")); - fis.close(); + + try (FileInputStream fis = new FileInputStream(seednodes)) { + FileUtil.writeTo(fis, new File(innerDir, "seednodes.fref")); + } // Create one node node = NodeStarter.createTestNode(DARKNET_PORT1, OPENNET_PORT1, dir.getPath(), false, Node.DEFAULT_MAX_HTL, @@ -218,7 +218,7 @@ public static void main(String[] args) { exitCode = EXIT_FAILED_TARGET; return; } - + t2 = System.currentTimeMillis(); System.out.println("SEED-TIME:" + (t2 - t1)); csvLine.add(String.valueOf(t2 - t1)); @@ -226,30 +226,30 @@ public static void main(String[] args) { HighLevelSimpleClient client = node.clientCore.makeClient((short) 0, false, false); int successes = 0; - + long startInsertsTime = System.currentTimeMillis(); - + InsertBatch batch = new InsertBatch(client); - + // Inserts are sloooooow so do them in parallel. - + for(int i=0;i token + INSERTED_BLOCKS) { - int delta; - try { - delta = Integer.parseInt(split[token]); - } catch (NumberFormatException e) { - System.err.println("Unable to parse token "+token+" = \""+token+"\""); - System.err.println("This is supposed to be a delta"); - System.err.println("Skipping the rest of the line for date "+dateFormat.format(calendar.getTime())); - continue loopOverLines; } - System.out.println("Delta: "+((1< token + INSERTED_BLOCKS) { + int delta; try { - mhkFetchTime = Integer.parseInt(split[token]); - System.out.println("Fetched block #"+i+" on "+date+" in "+mhkFetchTime+"ms"); - totalSuccesses++; - totalFetchTime += mhkFetchTime; + delta = Integer.parseInt(split[token]); } catch (NumberFormatException e) { - System.out.println("Failed block #"+i+" on "+date+" : "+split[token]); + System.err.println("Unable to parse token " + token + " = \"" + token + "\""); + System.err.println("This is supposed to be a delta"); + System.err.println("Skipping the rest of the line for date " + dateFormat.format(calendar.getTime())); + continue loopOverLines; } + System.out.println("Delta: " + ((1 << delta) - 1) + " days"); token++; + int totalFetchTime = 0; + int totalSuccesses = 0; + int totalFetches = 0; + for (int i = 0; i < INSERTED_BLOCKS; i++) { + if (split[token].equals("")) + continue; + int mhkFetchTime = -1; + totalFetches++; + try { + mhkFetchTime = Integer.parseInt(split[token]); + System.out.println("Fetched block #" + i + " on " + date + " in " + mhkFetchTime + "ms"); + totalSuccesses++; + totalFetchTime += mhkFetchTime; + } catch (NumberFormatException e) { + System.out.println("Failed block #" + i + " on " + date + " : " + split[token]); + } + token++; + } + totalFetchesByDelta[delta] += totalFetches; + totalSuccessfulFetchesByDelta[delta] += totalSuccesses; + totalFetchTimeByDelta[delta] += totalFetchTime; + System.err.println("Succeeded: " + totalSuccesses + " of " + totalFetches + " average " + ((double) totalFetchTime) / ((double) totalSuccesses) + "ms for delta " + delta + " on " + dateFormat.format(date)); } - totalFetchesByDelta[delta] += totalFetches; - totalSuccessfulFetchesByDelta[delta] += totalSuccesses; - totalFetchTimeByDelta[delta] += totalFetchTime; - System.err.println("Succeeded: "+totalSuccesses+" of "+totalFetches+" average "+((double)totalFetchTime)/((double)totalSuccesses)+"ms for delta "+delta+" on "+dateFormat.format(date)); + } + + System.out.println(); + System.out.println(); + + for (int i = 0; i < MAX_N + 1; i++) { + System.out.println("DELTA: " + i + " days: Total fetches: " + totalFetchesByDelta[i] + " total successes " + totalSuccessfulFetchesByDelta[i] + " = " + ((totalSuccessfulFetchesByDelta[i] * 100.0) / totalFetchesByDelta[i]) + "% in " + (totalFetchTimeByDelta[i] * 1.0) / totalSuccessfulFetchesByDelta[i] + "ms"); } } - - System.out.println(); - System.out.println(); - - for(int i=0;i/dev/null 2>&1 ???? Believed to be portable. //osw.write("trap true PIPE\n"); - should not be necessary osw.write("while kill -0 "+WrapperManager.getWrapperPID()+" > /dev/null 2>&1; do sleep 1; done\n"); osw.write("./"+runshNoNice+" start > /dev/null 2>&1\n"); osw.write("rm "+RESTART_SCRIPT_NAME+"\n"); osw.write("rm "+runshNoNice+"\n"); - osw.close(); - osw = null; - os = null; return restartFreenet; - } finally { - Closer.close(os); } } @@ -1532,32 +1522,27 @@ private File createRestartScript() throws IOException { * REDFLAG FIXME TODO Surely we can improve on this? This mechanism is only used for * updating very old wrapper installs - but we'll want to update the wrapper in the future * too, and the ability to restart the wrapper fully is likely useful, so maybe we won't - * just get rid of this - in which case maybe we want to improve on this. - * @throws IOException */ - private boolean createRunShNoNice(File input, File output) throws IOException { - final String charset = "UTF-8"; - InputStream is = null; - OutputStream os = null; + * just get rid of this - in which case maybe we want to improve on this. */ + private boolean createRunShNoNice(File input, File output) { boolean failed = false; - try { - is = new FileInputStream(input); - BufferedReader br = new BufferedReader(new InputStreamReader(new BufferedInputStream(is), charset)); - os = new FileOutputStream(output); - Writer w = new BufferedWriter(new OutputStreamWriter(new BufferedOutputStream(os), charset)); - boolean writtenPrio = false; - String line; - while((line = br.readLine()) != null) { - if((!writtenPrio) && line.startsWith("PRIORITY=")) { - writtenPrio = true; - line = "PRIORITY="; // = don't use nice. - } - w.write(line+"\n"); - } - // We want to see exceptions on close() here. - br.close(); - is = new FileInputStream(input); - w.close(); - os = null; + try { + try ( + InputStream is = new FileInputStream(input); + BufferedReader br = new BufferedReader(new InputStreamReader(new BufferedInputStream(is), StandardCharsets.UTF_8)); + OutputStream os = new FileOutputStream(output); + Writer w = new BufferedWriter(new OutputStreamWriter(new BufferedOutputStream(os), StandardCharsets.UTF_8)) + ) { + boolean writtenPrio = false; + String line; + while ((line = br.readLine()) != null) { + if ((!writtenPrio) && line.startsWith("PRIORITY=")) { + writtenPrio = true; + line = "PRIORITY="; // = don't use nice. + } + w.write(line + "\n"); + } + } + if(!(output.setExecutable(true) || output.canExecute())) { failed = true; return false; @@ -1569,9 +1554,9 @@ private boolean createRunShNoNice(File input, File output) throws IOException { failed = true; return false; } finally { - Closer.close(is); - Closer.close(os); - if(failed) output.delete(); + if(failed) { + output.delete(); + } } } @@ -1579,10 +1564,11 @@ private boolean createRunShNoNice(File input, File output) throws IOException { public static String getDependencyVersion(File currentFile) { // We can't use parseProperties because there are multiple sections. - InputStream is = null; - try { - is = new FileInputStream(currentFile); - ZipInputStream zis = new ZipInputStream(is); + + try ( + InputStream is = new FileInputStream(currentFile); + ZipInputStream zis = new ZipInputStream(is) + ){ ZipEntry ze; while(true) { ze = zis.getNextEntry(); @@ -1592,11 +1578,11 @@ public static String getDependencyVersion(File currentFile) { if(name.equals("META-INF/MANIFEST.MF")) { final String key = "Implementation-Version"; - BufferedInputStream bis = new BufferedInputStream(zis); - Manifest m = new Manifest(bis); - bis.close(); - bis = null; - Attributes a = m.getMainAttributes(); + Manifest m; + try (BufferedInputStream bis = new BufferedInputStream(zis)) { + m = new Manifest(bis); + } + Attributes a = m.getMainAttributes(); if(a != null) { String ver = a.getValue(key); if(ver != null) return ver; @@ -1610,12 +1596,8 @@ public static String getDependencyVersion(File currentFile) { } Logger.error(MainJarDependenciesChecker.class, "Unable to get dependency version from "+currentFile); return null; - } catch (FileNotFoundException e) { - return null; } catch (IOException e) { return null; - } finally { - Closer.close(is); } } @@ -1670,31 +1652,28 @@ public static boolean validFile(File filename, byte[] expectedHash, long size, b System.out.println("File exists while updating but length is wrong ("+filename.length()+" should be "+size+") for "+filename); return false; } - FileInputStream fis = null; try { - fis = new FileInputStream(filename); - MessageDigest md = SHA256.getMessageDigest(); - SHA256.hash(fis, md); - byte[] hash = md.digest(); - SHA256.returnMessageDigest(md); - fis.close(); - fis = null; - if(Arrays.equals(hash, expectedHash)) { - if(executable && !filename.canExecute()) { - filename.setExecutable(true); - } - return true; - } else { + byte[] hash; + try (FileInputStream fis = new FileInputStream(filename)) { + MessageDigest md = SHA256.getMessageDigest(); + SHA256.hash(fis, md); + hash = md.digest(); + SHA256.returnMessageDigest(md); + } + if (!Arrays.equals(hash, expectedHash)) { return false; } + if(executable && !filename.canExecute()) { + filename.setExecutable(true); + } + return true; + } catch (FileNotFoundException e) { Logger.error(MainJarDependencies.class, "File not found: "+filename); return false; } catch (IOException e) { System.err.println("Unable to read "+filename+" for updater"); return false; - } finally { - Closer.close(fis); } } diff --git a/src/freenet/node/updater/MainJarUpdater.java b/src/freenet/node/updater/MainJarUpdater.java index 34bd086d094..73e76eb57f5 100644 --- a/src/freenet/node/updater/MainJarUpdater.java +++ b/src/freenet/node/updater/MainJarUpdater.java @@ -33,7 +33,6 @@ import freenet.node.useralerts.UserAlert; import freenet.support.HTMLNode; import freenet.support.Logger; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; import freenet.support.io.FileUtil; import freenet.support.io.InsufficientDiskSpaceException; @@ -326,19 +325,16 @@ public boolean brokenDependencies() { } public void cleanupDependencies() { - InputStream is = getClass().getResourceAsStream("/"+DEPENDENCIES_FILE); - if(is == null) { - System.err.println("Can't find dependencies file. Other nodes will not be able to use Update Over Mandatory through this one."); - return; - } Properties props = new Properties(); - try { + try (InputStream is = getClass().getResourceAsStream("/"+DEPENDENCIES_FILE)) { + if (is == null) { + System.err.println("Can't find dependencies file. Other nodes will not be able to use Update Over Mandatory through this one."); + return; + } props.load(is); } catch (IOException e) { System.err.println("Can't read dependencies file. Other nodes will not be able to use Update Over Mandatory through this one."); return; - } finally { - Closer.close(is); } dependencies.cleanup(props, this, Version.buildNumber()); } diff --git a/src/freenet/node/updater/NodeUpdateManager.java b/src/freenet/node/updater/NodeUpdateManager.java index 8bb39abb37f..835e8efcd8a 100644 --- a/src/freenet/node/updater/NodeUpdateManager.java +++ b/src/freenet/node/updater/NodeUpdateManager.java @@ -54,7 +54,6 @@ import freenet.support.api.Bucket; import freenet.support.api.StringCallback; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; /** @@ -437,14 +436,12 @@ public void onFailure(FetchException e, ClientGetter state) { @Override public void onSuccess(FetchResult result, ClientGetter state) { File temp; - FileOutputStream fos = null; try { temp = FileUtil.createTempFile(filename, ".tmp", directory.dir()); temp.deleteOnExit(); - fos = new FileOutputStream(temp); - BucketTools.copyTo(result.asBucket(), fos, -1); - fos.close(); - fos = null; + try (FileOutputStream fos = new FileOutputStream(temp)) { + BucketTools.copyTo(result.asBucket(), fos, -1); + } for (int i = 0; i < 10; i++) { // FIXME add a callback in case it's being used on Windows. if (FileUtil.renameTo(temp, directory.file(filename))) { @@ -474,8 +471,14 @@ public void onSuccess(FetchResult result, ClientGetter state) { System.err.println("The error was: " + e); e.printStackTrace(); } finally { - Closer.close(fos); - Closer.close(result.asBucket()); + Bucket bucket = result.asBucket(); + if (bucket != null) { + try { + bucket.free(); + } catch(RuntimeException e) { + Logger.error(this, "Error during free().", e); + } + } } } @@ -1356,16 +1359,9 @@ public void writeJarTo(File fNew) throws IOException { if (!fNew.delete() && fNew.exists()) { System.err.println("Can't delete " + fNew + "!"); } - - FileOutputStream fos = null; - try { - fos = new FileOutputStream(fNew); - + try (FileOutputStream fos = new FileOutputStream(fNew)){ BucketTools.copyTo(this.fetchedMainJarData, fos, -1); - fos.flush(); - } finally { - Closer.close(fos); } } diff --git a/src/freenet/node/updater/NodeUpdater.java b/src/freenet/node/updater/NodeUpdater.java index 414a6eeb274..c0d691f144e 100644 --- a/src/freenet/node/updater/NodeUpdater.java +++ b/src/freenet/node/updater/NodeUpdater.java @@ -11,6 +11,7 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.net.MalformedURLException; +import java.nio.charset.StandardCharsets; import java.util.Properties; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; @@ -37,7 +38,6 @@ import freenet.support.Ticker; import freenet.support.api.Bucket; import freenet.support.api.RandomAccessBucket; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; import freenet.support.io.FileUtil; import freenet.support.io.NullOutputStream; @@ -317,49 +317,44 @@ public void run() { protected abstract void maybeParseManifest(FetchResult result, int build); protected void parseManifest(FetchResult result) { - InputStream is = null; - try { - is = result.asBucket().getInputStream(); - ZipInputStream zis = new ZipInputStream(is); - try { - ZipEntry ze; - while(true) { - ze = zis.getNextEntry(); - if(ze == null) break; - if(ze.isDirectory()) continue; - String name = ze.getName(); - - if(name.equals("META-INF/MANIFEST.MF")) { - if(logMINOR) Logger.minor(this, "Found manifest"); - long size = ze.getSize(); - if(logMINOR) Logger.minor(this, "Manifest size: "+size); - if(size > MAX_MANIFEST_SIZE) { - Logger.error(this, "Manifest is too big: "+size+" bytes, limit is "+MAX_MANIFEST_SIZE); - break; - } - byte[] buf = new byte[(int) size]; - DataInputStream dis = new DataInputStream(zis); - dis.readFully(buf); - ByteArrayInputStream bais = new ByteArrayInputStream(buf); - InputStreamReader isr = new InputStreamReader(bais, "UTF-8"); - BufferedReader br = new BufferedReader(isr); - String line; - while((line = br.readLine()) != null) { - parseManifestLine(line); - } - } else { - zis.closeEntry(); + + try ( + InputStream is = result.asBucket().getInputStream(); + ZipInputStream zis = new ZipInputStream(is) + ){ + ZipEntry ze; + while(true) { + ze = zis.getNextEntry(); + if(ze == null) break; + if(ze.isDirectory()) continue; + String name = ze.getName(); + + if(name.equals("META-INF/MANIFEST.MF")) { + if(logMINOR) Logger.minor(this, "Found manifest"); + long size = ze.getSize(); + if(logMINOR) Logger.minor(this, "Manifest size: "+size); + if(size > MAX_MANIFEST_SIZE) { + Logger.error(this, "Manifest is too big: "+size+" bytes, limit is "+MAX_MANIFEST_SIZE); + break; + } + byte[] buf = new byte[(int) size]; + DataInputStream dis = new DataInputStream(zis); + dis.readFully(buf); + ByteArrayInputStream bais = new ByteArrayInputStream(buf); + InputStreamReader isr = new InputStreamReader(bais, StandardCharsets.UTF_8); + BufferedReader br = new BufferedReader(isr); + String line; + while((line = br.readLine()) != null) { + parseManifestLine(line); } + } else { + zis.closeEntry(); } - } finally { - Closer.close(zis); } } catch (IOException e) { Logger.error(this, "IOException trying to read manifest on update"); } catch (Throwable t) { Logger.error(this, "Failed to parse update manifest: "+t, t); - } finally { - Closer.close(is); } } @@ -376,8 +371,7 @@ protected void parseManifest(FetchResult result) { * @throws IOException If there is a temporary files error or the jar is corrupted. */ static Properties parseProperties(InputStream is, String filename) throws IOException { Properties props = new Properties(); - ZipInputStream zis = new ZipInputStream(is); - try { + try (ZipInputStream zis = new ZipInputStream(is)) { ZipEntry ze; while(true) { ze = zis.getNextEntry(); @@ -407,23 +401,17 @@ static Properties parseProperties(InputStream is, String filename) throws IOExce zis.closeEntry(); } } - } finally { - Closer.close(zis); } return props; } protected void parseDependencies(FetchResult result, int build) { - InputStream is = null; - try { - is = result.asBucket().getInputStream(); + try (InputStream is = result.asBucket().getInputStream()){ parseDependencies(parseProperties(is, DEPENDENCIES_FILE), build); } catch (IOException e) { Logger.error(this, "IOException trying to read manifest on update"); } catch (Throwable t) { Logger.error(this, "Failed to parse update manifest: "+t, t); - } finally { - Closer.close(is); } } diff --git a/src/freenet/node/updater/UpdateDeployContext.java b/src/freenet/node/updater/UpdateDeployContext.java index 8cc5c745176..92adacbc3ff 100644 --- a/src/freenet/node/updater/UpdateDeployContext.java +++ b/src/freenet/node/updater/UpdateDeployContext.java @@ -22,7 +22,6 @@ import freenet.node.updater.MainJarDependenciesChecker.MainJarDependencies; import freenet.support.JVMVersion; import freenet.support.Logger; -import freenet.support.io.Closer; /** * Handles the wrapper.conf, essentially. @@ -371,68 +370,52 @@ public static CHANGED tryIncreaseMemoryLimit(int extraMemoryMB, } } - FileInputStream fis = null; - BufferedInputStream bis = null; - InputStreamReader isr = null; - BufferedReader br = null; - FileOutputStream fos = null; - OutputStreamWriter osw = null; - BufferedWriter bw = null; + boolean success = false; - - try { - - fis = new FileInputStream(oldConfig); - bis = new BufferedInputStream(fis); - isr = new InputStreamReader(bis); - br = new BufferedReader(isr); - - fos = new FileOutputStream(newConfig); - osw = new OutputStreamWriter(fos); - bw = new BufferedWriter(osw); - String line; - - while((line = br.readLine()) != null) { - - if(line.equals("#" + markerComment)) - return CHANGED.ALREADY; - - if(line.startsWith("wrapper.java.maxmemory=")) { - try { - int memoryLimit = Integer.parseInt(line.substring("wrapper.java.maxmemory=".length())); - int newMemoryLimit = memoryLimit + extraMemoryMB; - // There have been some cases where really high limits have caused the JVM to do bad things. - if(NodeStarter.isSomething32bits() && newMemoryLimit > 1408) { - Logger.error(UpdateDeployContext.class, "We've detected a 32bit JVM so we're refusing to set maxmemory to "+newMemoryLimit); - newMemoryLimit = 1408; + try ( + FileInputStream fis = new FileInputStream(oldConfig); + BufferedInputStream bis = new BufferedInputStream(fis); + InputStreamReader isr = new InputStreamReader(bis); + BufferedReader br = new BufferedReader(isr); + + FileOutputStream fos = new FileOutputStream(newConfig); + OutputStreamWriter osw = new OutputStreamWriter(fos); + BufferedWriter bw = new BufferedWriter(osw) + ) { + String line; + + while ((line = br.readLine()) != null) { + + if (line.equals("#" + markerComment)) + return CHANGED.ALREADY; + + if (line.startsWith("wrapper.java.maxmemory=")) { + try { + int memoryLimit = Integer.parseInt(line.substring("wrapper.java.maxmemory=".length())); + int newMemoryLimit = memoryLimit + extraMemoryMB; + // There have been some cases where really high limits have caused the JVM to do bad things. + if (NodeStarter.isSomething32bits() && newMemoryLimit > 1408) { + Logger.error(UpdateDeployContext.class, "We've detected a 32bit JVM so we're refusing to set maxmemory to " + newMemoryLimit); + newMemoryLimit = 1408; + } + bw.write('#' + markerComment + '\n'); + bw.write("wrapper.java.maxmemory=" + newMemoryLimit + '\n'); + success = true; + continue; + } catch (NumberFormatException e) { + // Grrrrr! } - bw.write('#' + markerComment + '\n'); - bw.write("wrapper.java.maxmemory="+newMemoryLimit+'\n'); - success = true; - continue; - } catch (NumberFormatException e) { - // Grrrrr! } + + bw.write(line + '\n'); } - - bw.write(line+'\n'); - } - br.close(); - + } catch (IOException e) { newConfig.delete(); System.err.println("Unable to rewrite wrapper.conf with new memory limit."); return CHANGED.FAIL; - } finally { - Closer.close(br); - Closer.close(isr); - Closer.close(bis); - Closer.close(fis); - Closer.close(bw); - Closer.close(osw); - Closer.close(fos); } if(success) { diff --git a/src/freenet/node/updater/UpdateOverMandatoryManager.java b/src/freenet/node/updater/UpdateOverMandatoryManager.java index d9ea3d8eb48..daef7f60983 100644 --- a/src/freenet/node/updater/UpdateOverMandatoryManager.java +++ b/src/freenet/node/updater/UpdateOverMandatoryManager.java @@ -70,7 +70,6 @@ import freenet.support.api.RandomAccessBuffer; import freenet.support.io.ArrayBucket; import freenet.support.io.ByteArrayRandomAccessBuffer; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; import freenet.support.io.FileUtil; import freenet.support.io.FileRandomAccessBuffer; @@ -1926,20 +1925,19 @@ private boolean maybeFetch() { public void run() { boolean failed = false; File tmp = null; - FileRandomAccessBuffer raf = null; + try { System.out.println("Fetching "+saveTo+" from "+fetchFrom); long uid = updateManager.node.fastWeakRandom.nextLong(); fetchFrom.sendAsync(DMT.createUOMFetchDependency(uid, expectedHash, size), null, updateManager.ctr); tmp = FileUtil.createTempFile(saveTo.getName(), NodeUpdateManager.TEMP_FILE_SUFFIX, saveTo.getParentFile()); - raf = new FileRandomAccessBuffer(tmp, size, false); - PartiallyReceivedBulk prb = - new PartiallyReceivedBulk(updateManager.node.getUSM(), size, - Node.PACKET_SIZE, raf, false); - BulkReceiver br = new BulkReceiver(prb, fetchFrom, uid, updateManager.ctr); - failed = !br.receive(); - raf.close(); - raf = null; + try (FileRandomAccessBuffer raf = new FileRandomAccessBuffer(tmp, size, false)) { + PartiallyReceivedBulk prb = + new PartiallyReceivedBulk(updateManager.node.getUSM(), size, + Node.PACKET_SIZE, raf, false); + BulkReceiver br = new BulkReceiver(prb, fetchFrom, uid, updateManager.ctr); + failed = !br.receive(); + } if(!failed) { // Check the hash. if(MainJarDependenciesChecker.validFile(tmp, expectedHash, size, executable)) { @@ -1996,7 +1994,6 @@ public void run() { peersFailed.add(fetchFrom); peersFetching.remove(fetchFrom); } - Closer.close(raf); if(tmp != null) tmp.delete(); if(failed) { diff --git a/src/freenet/pluginmanager/PluginDownLoaderOfficialHTTPS.java b/src/freenet/pluginmanager/PluginDownLoaderOfficialHTTPS.java index 6b9f7cc469e..af63d317b24 100644 --- a/src/freenet/pluginmanager/PluginDownLoaderOfficialHTTPS.java +++ b/src/freenet/pluginmanager/PluginDownLoaderOfficialHTTPS.java @@ -13,16 +13,15 @@ import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; +import java.nio.charset.StandardCharsets; import java.security.KeyStore; import java.security.cert.Certificate; import java.security.cert.CertificateFactory; import java.util.Collection; -import java.util.Iterator; import freenet.pluginmanager.PluginManager.PluginProgress; import freenet.support.api.Bucket; import freenet.support.io.ArrayBucket; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; import freenet.support.io.FileUtil; @@ -61,7 +60,7 @@ String getSHA1sum() throws PluginNotFoundException { bos.write(buffer, 0, read); } - return new String(bos.toByteArray(), "ISO-8859-1").split(" ")[0]; + return new String(bos.toByteArray(), StandardCharsets.ISO_8859_1).split(" ")[0]; } catch (MalformedURLException e) { throw new PluginNotFoundException("impossible: "+e,e); @@ -72,9 +71,7 @@ String getSHA1sum() throws PluginNotFoundException { @Override InputStream getInputStream(PluginProgress progress) throws IOException { - File TMP_KEYSTORE = null; - FileInputStream fis = null; - InputStream is = null; + File TMP_KEYSTORE; try { TMP_KEYSTORE = File.createTempFile("keystore", ".tmp"); TMP_KEYSTORE.deleteOnExit(); @@ -82,27 +79,21 @@ InputStream getInputStream(PluginProgress progress) throws IOException { KeyStore ks = KeyStore.getInstance("JKS"); ks.load(null, new char[0]); - is = getCert(); - - CertificateFactory cf = CertificateFactory.getInstance("X.509"); - Collection c = cf.generateCertificates(is); - Iterator it = c.iterator(); - while(it.hasNext()) { - Certificate cert = it.next(); + Collection c; + try (InputStream is = getCert()) { + CertificateFactory cf = CertificateFactory.getInstance("X.509"); + c = cf.generateCertificates(is); + } + for (Certificate cert : c) { ks.setCertificateEntry(cert.getPublicKey().toString(), cert); } - FileOutputStream tmpFOS = new FileOutputStream(TMP_KEYSTORE); - try { + try (FileOutputStream tmpFOS = new FileOutputStream(TMP_KEYSTORE)) { ks.store(tmpFOS, new char[0]); - } finally { - Closer.close(tmpFOS); } System.out.println("The CA has been imported into the trustStore"); } catch(Exception e) { System.err.println("Error while handling the CA :" + e.getMessage()); throw new IOException("Error while handling the CA : "+e); - } finally { - Closer.close(fis); } System.setProperty("javax.net.ssl.trustStore", TMP_KEYSTORE.toString()); @@ -121,26 +112,19 @@ private InputStream getCert() throws IOException { } Bucket bucket; - OutputStream os = null; - try { - try { - bucket = new FileBucket(certFile, false, false, false, false); - os = bucket.getOutputStream(); + bucket = new FileBucket(certFile, false, false, false, false); + try (OutputStream os = bucket.getOutputStream()) { writeCerts(os); - // If this fails, we need the whole fetch to fail. - os.close(); os = null; - } finally { - Closer.close(os); } return bucket.getInputStream(); } catch (IOException e) { // We don't have access to TempBucketFactory here. // But the certs should be small, so just keep them in memory. bucket = new ArrayBucket(); - os = bucket.getOutputStream(); - writeCerts(os); - os.close(); + try (OutputStream os = bucket.getOutputStream()) { + writeCerts(os); + } return bucket.getInputStream(); } } @@ -148,19 +132,13 @@ private InputStream getCert() throws IOException { private static void writeCerts(OutputStream os) throws IOException { // try to create pem file ClassLoader loader = ClassLoader.getSystemClassLoader(); - InputStream in = null; for(String certurl : certURLs) { - try { - in = loader.getResourceAsStream(certurl); + try (InputStream in = loader.getResourceAsStream(certurl)) { if (in != null) { FileUtil.copy(in, os, -1); } else { throw new IOException("Could not find certificates in fred source nor find certificates file"); } - } finally { - if (in != null) { - in.close(); - } } } } diff --git a/src/freenet/pluginmanager/PluginInfoWrapper.java b/src/freenet/pluginmanager/PluginInfoWrapper.java index 1be940efd9c..263b4faff30 100644 --- a/src/freenet/pluginmanager/PluginInfoWrapper.java +++ b/src/freenet/pluginmanager/PluginInfoWrapper.java @@ -13,7 +13,6 @@ import freenet.node.Node; import freenet.support.JarClassLoader; import freenet.support.Logger; -import freenet.support.io.Closer; public class PluginInfoWrapper implements Comparable { @@ -184,7 +183,11 @@ public boolean finishShutdownPlugin(PluginManager manager, long maxWaitTime, boo // Close the jar file, so we may delete / reload it ClassLoader cl = plug.getClass().getClassLoader(); if (cl instanceof JarClassLoader) { - Closer.close((JarClassLoader) cl); + try { + ((JarClassLoader) cl).close(); + } catch (IOException e) { + Logger.error(this, "Error during close() on "+ cl, e); + } } return success; } diff --git a/src/freenet/pluginmanager/PluginManager.java b/src/freenet/pluginmanager/PluginManager.java index acdbc3bad67..9ba953f8cdd 100644 --- a/src/freenet/pluginmanager/PluginManager.java +++ b/src/freenet/pluginmanager/PluginManager.java @@ -64,13 +64,12 @@ import freenet.support.api.BooleanCallback; import freenet.support.api.HTTPRequest; import freenet.support.api.StringArrCallback; -import freenet.support.io.Closer; import freenet.support.io.FileUtil; import freenet.support.io.NativeThread.PriorityLevel; public class PluginManager { - private final HashMap toadletList = new HashMap(); + private final Map toadletList = new HashMap<>(); /* All currently starting plugins. */ private final OfficialPlugins officialPlugins = new OfficialPlugins(); @@ -1159,18 +1158,14 @@ private void deleteCachedVersions(List filesInPluginDirectory) { private void downloadPluginFile(PluginDownLoader pluginDownLoader, File pluginDirectory, File pluginFile, PluginProgress pluginProgress) throws IOException, PluginNotFoundException { File tempPluginFile = File.createTempFile("plugin-", ".jar", pluginDirectory); tempPluginFile.deleteOnExit(); - OutputStream pluginOutputStream = null; - InputStream pluginInputStream = null; - try { - pluginOutputStream = new FileOutputStream(tempPluginFile); - pluginInputStream = pluginDownLoader.getInputStream(pluginProgress); + try ( + OutputStream pluginOutputStream = new FileOutputStream(tempPluginFile); + InputStream pluginInputStream = pluginDownLoader.getInputStream(pluginProgress) + ) { FileUtil.copy(pluginInputStream, pluginOutputStream, -1); } catch (IOException ioe1) { tempPluginFile.delete(); throw ioe1; - } finally { - Closer.close(pluginInputStream); - Closer.close(pluginOutputStream); } if (tempPluginFile.length() == 0) { throw new PluginNotFoundException("downloaded zero length file"); @@ -1194,9 +1189,7 @@ private void verifyDigest(PluginDownLoader pluginDownLoader, File pluginFile) } private String verifyJarFileAndGetPluginMainClass(File pluginFile) throws PluginNotFoundException, PluginAlreadyLoaded { - JarFile pluginJarFile = null; - try { - pluginJarFile = new JarFile(pluginFile); + try (JarFile pluginJarFile = new JarFile(pluginFile)) { Manifest manifest = pluginJarFile.getManifest(); if (manifest == null) { throw new PluginNotFoundException("could not load manifest from plugin file"); @@ -1216,8 +1209,6 @@ private String verifyJarFileAndGetPluginMainClass(File pluginFile) throws Plugin return pluginMainClassName; } catch (IOException ioe1) { throw new PluginNotFoundException("error procesesing jar file", ioe1); - } finally { - Closer.close(pluginJarFile); } } @@ -1339,13 +1330,9 @@ private long extractTimestamp(String filename) { private String getFileDigest(File file, String digest) throws PluginNotFoundException { final int BUFFERSIZE = 4096; - MessageDigest hash = null; - FileInputStream fis = null; - BufferedInputStream bis = null; boolean wasFromDigest256Pool = false; - String result; - try { + MessageDigest hash; if ("SHA-256".equals(digest)) { hash = SHA256.getMessageDigest(); // grab digest from pool wasFromDigest256Pool = true; @@ -1354,23 +1341,24 @@ private String getFileDigest(File file, String digest) throws PluginNotFoundExce } // We compute the hash // http://java.sun.com/developer/TechTips/1998/tt0915.html#tip2 - fis = new FileInputStream(file); - bis = new BufferedInputStream(fis); - int len = 0; - byte[] buffer = new byte[BUFFERSIZE]; - while((len = bis.read(buffer)) > -1) { - hash.update(buffer, 0, len); - } - result = HexUtil.bytesToHex(hash.digest()); - if (wasFromDigest256Pool) + try ( + FileInputStream fis = new FileInputStream(file); + BufferedInputStream bis = new BufferedInputStream(fis) + ) { + byte[] buffer = new byte[BUFFERSIZE]; + int len; + while ((len = bis.read(buffer)) > -1) { + hash.update(buffer, 0, len); + } + } + String result = HexUtil.bytesToHex(hash.digest()); + if (wasFromDigest256Pool) { SHA256.returnMessageDigest(hash); + } + return result; } catch(Exception e) { throw new PluginNotFoundException("Error while computing hash '"+digest+"' of the downloaded plugin: " + e, e); - } finally { - Closer.close(bis); - Closer.close(fis); } - return result; } Ticker getTicker() { diff --git a/src/freenet/store/saltedhash/SaltedHashFreenetStore.java b/src/freenet/store/saltedhash/SaltedHashFreenetStore.java index 719ec022248..41cfe65d92d 100644 --- a/src/freenet/store/saltedhash/SaltedHashFreenetStore.java +++ b/src/freenet/store/saltedhash/SaltedHashFreenetStore.java @@ -57,7 +57,6 @@ import freenet.support.Logger.LogLevel; import freenet.support.Ticker; import freenet.support.WrapperKeepalive; -import freenet.support.io.Closer; import freenet.support.io.Fallocate; import freenet.support.io.FileUtil; import freenet.support.io.NativeThread; @@ -1160,65 +1159,62 @@ private boolean loadConfigFile(byte[] masterKey) throws IOException { writeConfigFile(); return true; } else { - try { + try ( // try to load - RandomAccessFile raf = new RandomAccessFile(configFile, "r"); - try { - byte[] salt = new byte[0x10]; - raf.readFully(salt); - - byte[] diskSalt = salt; - if(masterKey != null) { - BlockCipher cipher; - try { - cipher = new Rijndael(256, 128); - } catch (UnsupportedCipherException e) { - throw new Error("Impossible: no Rijndael(256,128): "+e, e); - } - cipher.initialize(masterKey); - salt = new byte[0x10]; - cipher.decipher(diskSalt, salt); - if(logDEBUG) - Logger.debug(this, "Encrypting (new) with "+HexUtil.bytesToHex(salt)+" from "+HexUtil.bytesToHex(diskSalt)); + RandomAccessFile raf = new RandomAccessFile(configFile, "r") + ){ + byte[] salt = new byte[0x10]; + raf.readFully(salt); + + byte[] diskSalt = salt; + if(masterKey != null) { + BlockCipher cipher; + try { + cipher = new Rijndael(256, 128); + } catch (UnsupportedCipherException e) { + throw new Error("Impossible: no Rijndael(256,128): "+e, e); } + cipher.initialize(masterKey); + salt = new byte[0x10]; + cipher.decipher(diskSalt, salt); + if(logDEBUG) + Logger.debug(this, "Encrypting (new) with "+HexUtil.bytesToHex(salt)+" from "+HexUtil.bytesToHex(diskSalt)); + } - cipherManager = new CipherManager(salt, diskSalt); + cipherManager = new CipherManager(salt, diskSalt); - storeSize = raf.readLong(); - if(storeSize <= 0) throw new IOException("Bogus datastore size"); - prevStoreSize = raf.readLong(); - keyCount.set(raf.readLong()); - generation = raf.readInt(); - flags = raf.readInt(); + storeSize = raf.readLong(); + if(storeSize <= 0) throw new IOException("Bogus datastore size"); + prevStoreSize = raf.readLong(); + keyCount.set(raf.readLong()); + generation = raf.readInt(); + flags = raf.readInt(); - if (((flags & FLAG_DIRTY) != 0) && - // FIXME figure out a way to do this consistently! - // Not critical as a few blocks wrong is something we can handle. - ResizablePersistentIntBuffer.getPersistenceTime() != -1) - flags |= FLAG_REBUILD_BLOOM; + if (((flags & FLAG_DIRTY) != 0) && + // FIXME figure out a way to do this consistently! + // Not critical as a few blocks wrong is something we can handle. + ResizablePersistentIntBuffer.getPersistenceTime() != -1) + flags |= FLAG_REBUILD_BLOOM; - try { - raf.readInt(); // bloomFilterK - raf.readInt(); // reserved - raf.readLong(); // reserved - long w = raf.readLong(); - writes.set(w); - initialWrites = w; - Logger.normal(this, "Set writes to saved value "+w); - hits.set(raf.readLong()); - initialHits = hits.get(); - misses.set(raf.readLong()); - initialMisses = misses.get(); - bloomFalsePos.set(raf.readLong()); - initialBloomFalsePos = bloomFalsePos.get(); - } catch (EOFException e) { - // Ignore, back compatibility. - } - - return false; - } finally { - Closer.close(raf); + try { + raf.readInt(); // bloomFilterK + raf.readInt(); // reserved + raf.readLong(); // reserved + long w = raf.readLong(); + writes.set(w); + initialWrites = w; + Logger.normal(this, "Set writes to saved value "+w); + hits.set(raf.readLong()); + initialHits = hits.get(); + misses.set(raf.readLong()); + initialMisses = misses.get(); + bloomFalsePos.set(raf.readLong()); + initialBloomFalsePos = bloomFalsePos.get(); + } catch (EOFException e) { + // Ignore, back compatibility. } + + return false; } catch (IOException e) { // corrupted? delete it and try again Logger.error(this, "config file corrupted, trying to create a new store: " + name, e); diff --git a/src/freenet/support/BinaryBloomFilter.java b/src/freenet/support/BinaryBloomFilter.java index 991f9b90723..4c1d4f81fb0 100644 --- a/src/freenet/support/BinaryBloomFilter.java +++ b/src/freenet/support/BinaryBloomFilter.java @@ -3,8 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package freenet.support; -import freenet.support.io.Closer; - import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; @@ -38,18 +36,14 @@ protected BinaryBloomFilter(int length, int k) { */ protected BinaryBloomFilter(File file, int length, int k) throws IOException { super(length, k); - if (!file.exists() || file.length() != length / 8) + if (!file.exists() || file.length() != length / 8) { needRebuild = true; - - RandomAccessFile raf = new RandomAccessFile(file, "rw"); - FileChannel channel = null; - try { + } + try (RandomAccessFile raf = new RandomAccessFile(file, "rw")) { raf.setLength(length / 8); - channel = raf.getChannel(); - filter = channel.map(MapMode.READ_WRITE, 0, length / 8).load(); - } finally { - Closer.close(raf); - Closer.close(channel); + try (FileChannel channel = raf.getChannel()) { + filter = channel.map(MapMode.READ_WRITE, 0, length / 8).load(); + } } } diff --git a/src/freenet/support/CountingBloomFilter.java b/src/freenet/support/CountingBloomFilter.java index f0ab8137f27..baa3b38e00a 100644 --- a/src/freenet/support/CountingBloomFilter.java +++ b/src/freenet/support/CountingBloomFilter.java @@ -3,8 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package freenet.support; -import freenet.support.io.Closer; - import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; @@ -46,18 +44,14 @@ public CountingBloomFilter(int length, int k) { protected CountingBloomFilter(File file, int length, int k) throws IOException { super(length, k); int fileLength = length / 4; - if (!file.exists() || file.length() != fileLength) + if (!file.exists() || file.length() != fileLength) { needRebuild = true; - - RandomAccessFile raf = new RandomAccessFile(file, "rw"); - FileChannel channel = null; - try { + } + try (RandomAccessFile raf = new RandomAccessFile(file, "rw")) { raf.setLength(fileLength); - channel = raf.getChannel(); - filter = channel.map(MapMode.READ_WRITE, 0, fileLength).load(); - } finally { - Closer.close(raf); - Closer.close(channel); + try (FileChannel channel = raf.getChannel()) { + filter = channel.map(MapMode.READ_WRITE, 0, fileLength).load(); + } } } diff --git a/src/freenet/support/Logger.java b/src/freenet/support/Logger.java index ee03898e0af..d9f1cb390db 100644 --- a/src/freenet/support/Logger.java +++ b/src/freenet/support/Logger.java @@ -11,11 +11,10 @@ import java.lang.ref.WeakReference; import java.lang.reflect.Field; import java.lang.reflect.Modifier; -import java.util.regex.PatternSyntaxException; +import java.nio.charset.StandardCharsets; import freenet.support.FileLoggerHook.IntervalParseException; import freenet.support.LoggerHook.InvalidThresholdException; -import freenet.support.io.Closer; /** * @author Iakin @@ -56,49 +55,30 @@ public synchronized static int getPPID(Object o) { * it's unavailable for some reason. */ public synchronized static String getFieldFromProcSelfStat(int fieldNumber, Object o) { - String readLine = null; - if (!procSelfStatEnabled) { return null; } // read /proc/self/stat and parse for the specified field - InputStream is = null; - BufferedReader br = null; File procFile = new File("/proc/self/stat"); if (procFile.exists()) { - try { - is = new FileInputStream(procFile); - br = new BufferedReader(new InputStreamReader(is, "ISO-8859-1" /* ASCII */)); + try ( + InputStream is = new FileInputStream(procFile); + BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.ISO_8859_1 /* ASCII */)) + ){ + String readLine = br.readLine(); + if (readLine != null) { + String[] procFields = readLine.trim().split(" "); + if (4 <= procFields.length) { + return procFields[fieldNumber]; + } + } } catch (FileNotFoundException e1) { logStatic(o, "'/proc/self/stat' not found", logToFileVerbosity); procSelfStatEnabled = false; - br = null; - } catch (UnsupportedEncodingException e) { - // Impossible. - throw new Error(e); - } - if (null != br) { - try { - readLine = br.readLine(); - } catch (IOException e) { - error(o, "Caught IOException in br.readLine() of OSThread.getFieldFromProcSelfStat()", e); - readLine = null; - } finally { - Closer.close(br); - } - if (null != readLine) { - try { - String[] procFields = readLine.trim().split(" "); - if (4 <= procFields.length) { - return procFields[ fieldNumber ]; - } - } catch (PatternSyntaxException e) { - error(o, "Caught PatternSyntaxException in readLine.trim().split(\" \") of OSThread.getFieldFromProcSelfStat() while parsing '"+readLine+"'", e); - } - } - } else { - Closer.close(is); + } catch (IOException e) { + error(o, "Caught IOException in OSThread.getFieldFromProcSelfStat()", e); + procSelfStatEnabled = false; } } return null; diff --git a/src/freenet/support/SimpleFieldSet.java b/src/freenet/support/SimpleFieldSet.java index 1a7babc8464..08a5cf0e456 100644 --- a/src/freenet/support/SimpleFieldSet.java +++ b/src/freenet/support/SimpleFieldSet.java @@ -16,6 +16,7 @@ import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -27,7 +28,6 @@ import java.util.Set; import freenet.node.FSParseException; -import freenet.support.io.Closer; import freenet.support.io.LineReader; import freenet.support.io.Readers; @@ -940,39 +940,20 @@ public static SimpleFieldSet readFrom(InputStream is, boolean allowMultiple, boo * characters etc. */ public static SimpleFieldSet readFrom(InputStream is, boolean allowMultiple, boolean shortLived, boolean allowBase64, boolean alwaysBase64) throws IOException { - BufferedInputStream bis = null; - InputStreamReader isr = null; - BufferedReader br = null; - - try { - bis = new BufferedInputStream(is); - try { - isr = new InputStreamReader(bis, "UTF-8"); - } catch (UnsupportedEncodingException e) { - Logger.error(SimpleFieldSet.class, "Impossible: "+e, e); - is.close(); - throw new Error("Impossible: JVM doesn't support UTF-8: " + e, e); - } - br = new BufferedReader(isr); - SimpleFieldSet fs = new SimpleFieldSet(br, allowMultiple, shortLived, allowBase64, alwaysBase64); - br.close(); - - return fs; - } finally { - Closer.close(br); - Closer.close(isr); - Closer.close(bis); - } + try ( + BufferedInputStream bis = new BufferedInputStream(is); + InputStreamReader isr = new InputStreamReader(bis, StandardCharsets.UTF_8); + BufferedReader br = new BufferedReader(isr) + ) { + return new SimpleFieldSet(br, allowMultiple, shortLived, allowBase64, alwaysBase64); + } } /** Read a SimpleFieldSet from a File. */ public static SimpleFieldSet readFrom(File f, boolean allowMultiple, boolean shortLived) throws IOException { - FileInputStream fis = new FileInputStream(f); - try { - return readFrom(fis, allowMultiple, shortLived); - } finally { - fis.close(); - } + try (FileInputStream fis = new FileInputStream(f)) { + return readFrom(fis, allowMultiple, shortLived); + } } /** Write to the given OutputStream (as UTF-8) and flush it. */ @@ -988,18 +969,9 @@ public void writeToBigBuffer(OutputStream os) throws IOException { /** Write to the given OutputStream and flush it. */ public void writeTo(OutputStream os, int bufferSize) throws IOException { - BufferedOutputStream bos = null; - OutputStreamWriter osw = null; - BufferedWriter bw = null; - - bos = new BufferedOutputStream(os, bufferSize); - try { - osw = new OutputStreamWriter(bos, "UTF-8"); - } catch (UnsupportedEncodingException e) { - Logger.error(SimpleFieldSet.class, "Impossible: " + e, e); - throw e; - } - bw = new BufferedWriter(osw); + BufferedOutputStream bos = new BufferedOutputStream(os, bufferSize); + OutputStreamWriter osw = new OutputStreamWriter(bos, StandardCharsets.UTF_8); + BufferedWriter bw = new BufferedWriter(osw); writeTo(bw); bw.flush(); } diff --git a/src/freenet/support/api/Bucket.java b/src/freenet/support/api/Bucket.java index 48f52fc4146..5300d94f44f 100644 --- a/src/freenet/support/api/Bucket.java +++ b/src/freenet/support/api/Bucket.java @@ -2,10 +2,7 @@ * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ package freenet.support.api; -import java.io.DataOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; +import java.io.*; import freenet.client.async.ClientContext; import freenet.support.io.ResumeFailedException; @@ -19,7 +16,7 @@ * * @author oskar */ -public interface Bucket { +public interface Bucket extends Closeable { /** * Returns an OutputStream that is used to put data in this Bucket, from the @@ -28,14 +25,14 @@ public interface Bucket { * OutputStream around. Will be buffered if appropriate (e.g. byte array backed * buckets don't need to be buffered). */ - public OutputStream getOutputStream() throws IOException; + OutputStream getOutputStream() throws IOException; /** Get an OutputStream which is not buffered. Should be called when we will buffer the stream * at a higher level or when we will only be doing large writes (e.g. copying data from one * Bucket to another). Does not make any more persistence guarantees than getOutputStream() * does, this is just to save memory. */ - public OutputStream getOutputStreamUnbuffered() throws IOException; + OutputStream getOutputStreamUnbuffered() throws IOException; /** * Returns an InputStream that reads data from this Bucket. If there is @@ -43,38 +40,49 @@ public interface Bucket { * * You have to call Closer.close(inputStream) on the obtained stream to prevent resource leakage. */ - public InputStream getInputStream() throws IOException; + InputStream getInputStream() throws IOException; - public InputStream getInputStreamUnbuffered() throws IOException; + InputStream getInputStreamUnbuffered() throws IOException; /** * Returns a name for the bucket, may be used to identify them in * certain in certain situations. */ - public String getName(); + String getName(); /** * Returns the amount of data currently in this bucket in bytes. */ - public long size(); + long size(); /** * Is the bucket read-only? */ - public boolean isReadOnly(); + boolean isReadOnly(); /** * Make the bucket read-only. Irreversible. */ - public void setReadOnly(); + void setReadOnly(); /** * Free the bucket, if supported. Note that you must call free() even if you haven't used the * Bucket (haven't called getOutputStream()) for some kinds of Bucket's, as they may have * allocated space (e.g. created a temporary file). */ - public void free(); - + void free(); + + /** + * Synonym for method {@link #free()}, + * but overrides {@link Closeable#close()}, + * and makes it possible to use this object within try-with-resources blocks + * @throws IOException when close fails + */ + @Override + default void close() throws IOException { + free(); + } + /** * Create a shallow read-only copy of this bucket, using different * objects but using the same external storage. If this is not possible, @@ -83,18 +91,18 @@ public interface Bucket { * possibly return too-short data etc. In some use cases e.g. on fproxy, * this is acceptable. */ - public Bucket createShadow(); + Bucket createShadow(); /** Called after restarting. The Bucket should do any necessary housekeeping after resuming, * e.g. registering itself with the appropriate persistent bucket tracker to avoid being * garbage-collected. May be called twice, so the Bucket may need to track this internally. * @param context All the necessary runtime support will be on this object. * @throws ResumeFailedException */ - public void onResume(ClientContext context) throws ResumeFailedException; + void onResume(ClientContext context) throws ResumeFailedException; /** Write enough data to reconstruct the Bucket, or throw UnsupportedOperationException. Used * for recovering in emergencies, should be versioned if necessary. * @throws IOException */ - public void storeTo(DataOutputStream dos) throws IOException; + void storeTo(DataOutputStream dos) throws IOException; } diff --git a/src/freenet/support/compress/DecompressorThreadManager.java b/src/freenet/support/compress/DecompressorThreadManager.java index df33b99c3db..5edca3631ed 100644 --- a/src/freenet/support/compress/DecompressorThreadManager.java +++ b/src/freenet/support/compress/DecompressorThreadManager.java @@ -21,7 +21,6 @@ import freenet.support.Logger; import freenet.support.Logger.LogLevel; -import freenet.support.io.Closer; /** Creates and manages decompressor threads. This class is * given all decompressors which should be applied to an @@ -32,14 +31,13 @@ */ public class DecompressorThreadManager { - final Queue threads; - PipedInputStream input; - PipedOutputStream output = new PipedOutputStream(); - final long maxLen; + private final Queue threads; + private final PipedInputStream input; + private final PipedOutputStream output; private boolean finished = false; private Throwable error = null; - private static volatile boolean logMINOR; + private static volatile boolean logMINOR; static { Logger.registerLogThresholdCallback(new LogThresholdCallback(){ @Override @@ -54,22 +52,26 @@ public void shouldUpdate(){ * @param maxLen The maximum number of bytes to extract */ public DecompressorThreadManager(PipedInputStream inputStream, List decompressors, long maxLen) throws IOException { - threads = new ArrayDeque(decompressors.size()); - this.maxLen = maxLen; if(inputStream == null) { IOException e = new IOException("Input stream may not be null"); onFailure(e); throw e; } - input = inputStream; + this.threads = new ArrayDeque<>(decompressors.size()); + PipedOutputStream os = new PipedOutputStream(); + PipedInputStream is = inputStream; while(!decompressors.isEmpty()) { Compressor compressor = decompressors.remove(decompressors.size()-1); - if(logMINOR) Logger.minor(this, "Decompressing with "+compressor); - DecompressorThread thread = new DecompressorThread(compressor, this, input, output, maxLen); + if (logMINOR) { + Logger.minor(this, "Decompressing with "+compressor); + } + DecompressorThread thread = new DecompressorThread(compressor, this, is, os, maxLen); threads.add(thread); - input = new PipedInputStream(output); - output = new PipedOutputStream(); + is = new PipedInputStream(os); + os = new PipedOutputStream(); } + this.input = is; + this.output = os; } /** Creates and executes a new thread for each decompressor, @@ -77,36 +79,42 @@ public DecompressorThreadManager(PipedInputStream inputStream, ListDecompressorThreadManager * @author sajack */ - class DecompressorThread implements Runnable { + static class DecompressorThread implements Runnable { /**The compressor whose decompress method will be invoked*/ final Compressor compressor; /**The stream compressed data will be read from*/ - private InputStream input; + private final InputStream input; /**The stream decompressed data will be written*/ - private OutputStream output; + private final OutputStream output; /**A upper limit to how much data may be decompressed. This is passed to the decompressor*/ final long maxLen; /**The manager which created the thread*/ @@ -177,23 +185,24 @@ public DecompressorThread(Compressor compressor, DecompressorThreadManager manag /**Begins the decompression */ @Override public void run() { - if(logMINOR) Logger.minor(this, "Decompressing..."); - try { + if(logMINOR) { + Logger.minor(this, "Decompressing..."); + } + try ( + InputStream is = this.input; + OutputStream os = this.output; + ) { if(manager.getError() == null) { - compressor.decompress(input, output, maxLen, maxLen * 4); - input.close(); - output.close(); - // Avoid relatively expensive repeated close on normal completion - input = null; - output = null; - if(isLast) manager.onFinish(); + compressor.decompress(is, os, maxLen, maxLen * 4); + if(isLast) { + manager.onFinish(); + } + } + if(logMINOR) { + Logger.minor(this, "Finished decompressing..."); } - if(logMINOR) Logger.minor(this, "Finished decompressing..."); } catch (Exception e) { manager.onFailure(e); - } finally { - Closer.close(input); - Closer.close(output); } } diff --git a/src/freenet/support/compress/GzipCompressor.java b/src/freenet/support/compress/GzipCompressor.java index 4afe3a984ac..f42798b13bc 100644 --- a/src/freenet/support/compress/GzipCompressor.java +++ b/src/freenet/support/compress/GzipCompressor.java @@ -11,27 +11,18 @@ import freenet.support.Logger; import freenet.support.api.Bucket; import freenet.support.api.BucketFactory; -import freenet.support.io.Closer; import freenet.support.io.CountedOutputStream; public class GzipCompressor extends AbstractCompressor { @Override - public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) - throws IOException, CompressionOutputSizeException { + public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) throws IOException { Bucket output = bf.makeBucket(maxWriteLength); - InputStream is = null; - OutputStream os = null; - try { - is = data.getInputStream(); - os = output.getOutputStream(); + try ( + InputStream is = data.getInputStream(); + OutputStream os = output.getOutputStream() + ) { compress(is, os, maxReadLength, maxWriteLength); - // It is essential that the close()'s throw if there is any problem. - is.close(); is = null; - os.close(); os = null; - } finally { - Closer.close(is); - Closer.close(os); } return output; } diff --git a/src/freenet/support/compress/NewLZMACompressor.java b/src/freenet/support/compress/NewLZMACompressor.java index 2751933a5d1..c4bd03cfcbc 100644 --- a/src/freenet/support/compress/NewLZMACompressor.java +++ b/src/freenet/support/compress/NewLZMACompressor.java @@ -18,7 +18,6 @@ import freenet.support.Logger.LogLevel; import freenet.support.api.Bucket; import freenet.support.api.BucketFactory; -import freenet.support.io.Closer; import freenet.support.io.CountedInputStream; import freenet.support.io.CountedOutputStream; @@ -40,24 +39,16 @@ public void shouldUpdate(){ // Copied from EncoderThread. See below re licensing. @Override - public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) - throws IOException, CompressionOutputSizeException { - Bucket output; - InputStream is = null; - OutputStream os = null; - try { - output = bf.makeBucket(maxWriteLength); - is = data.getInputStream(); - os = output.getOutputStream(); - if(logMINOR) + public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) throws IOException { + Bucket output = bf.makeBucket(maxWriteLength); + try ( + InputStream is = data.getInputStream(); + OutputStream os = output.getOutputStream() + ) { + if(logMINOR) { Logger.minor(this, "Compressing "+data+" size "+data.size()+" to new bucket "+output); + } compress(is, os, maxReadLength, maxWriteLength); - // It is essential that the close()'s throw if there is any problem. - is.close(); is = null; - os.close(); os = null; - } finally { - Closer.close(is); - Closer.close(os); } return output; } @@ -113,28 +104,24 @@ public void SetProgress(long processedInSize, long processedOutSize) { return cos.written(); } - public Bucket decompress(Bucket data, BucketFactory bf, long maxLength, long maxCheckSizeLength, Bucket preferred) throws IOException, CompressionOutputSizeException { + public Bucket decompress(Bucket data, BucketFactory bf, long maxLength, long maxCheckSizeLength, Bucket preferred) throws IOException { Bucket output; - if(preferred != null) + if(preferred != null) { output = preferred; - else + } else { output = bf.makeBucket(maxLength); - if(logMINOR) + } + if(logMINOR) { Logger.minor(this, "Decompressing "+data+" size "+data.size()+" to new bucket "+output); - CountedInputStream is = null; - OutputStream os = null; - try { - is = new CountedInputStream(data.getInputStream()); - os = output.getOutputStream(); + } + try ( + CountedInputStream is = new CountedInputStream(data.getInputStream()); + OutputStream os = output.getOutputStream(); + ) { decompress(is, os, maxLength, maxCheckSizeLength); - if(logMINOR) + if (logMINOR) { Logger.minor(this, "Output: "+output+" size "+output.size()+" read "+is.count()); - // It is essential that the close()'s throw if there is any problem. - is.close(); is = null; - os.close(); os = null; - } finally { - Closer.close(os); - Closer.close(is); + } } return output; } diff --git a/src/freenet/support/compress/OldLZMACompressor.java b/src/freenet/support/compress/OldLZMACompressor.java index f8f05434db3..c81f799dc13 100644 --- a/src/freenet/support/compress/OldLZMACompressor.java +++ b/src/freenet/support/compress/OldLZMACompressor.java @@ -17,7 +17,6 @@ import freenet.support.Logger.LogLevel; import freenet.support.api.Bucket; import freenet.support.api.BucketFactory; -import freenet.support.io.Closer; import freenet.support.io.CountedInputStream; import freenet.support.io.CountedOutputStream; @@ -35,24 +34,17 @@ public void shouldUpdate(){ // Copied from EncoderThread. See below re licensing. @Deprecated @Override - public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) throws IOException, CompressionOutputSizeException { + public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) throws IOException { Logger.warning(this, "OldLZMA compression is buggy and no longer supported. It only exists to allow reinserting keys."); - Bucket output; - InputStream is = null; - OutputStream os = null; - try { - output = bf.makeBucket(maxWriteLength); - is = data.getInputStream(); - os = output.getOutputStream(); - if(logMINOR) + Bucket output = bf.makeBucket(maxWriteLength); + try ( + InputStream is = data.getInputStream(); + OutputStream os = output.getOutputStream() + ) { + if(logMINOR) { Logger.minor(this, "Compressing "+data+" size "+data.size()+" to new bucket "+output); + } compress(is, os, maxReadLength, maxWriteLength); - // It is essential that the close()'s throw if there is any problem. - is.close(); is = null; - os.close(); os = null; - } finally { - Closer.close(is); - Closer.close(os); } return output; } @@ -86,28 +78,24 @@ public long compress(InputStream input, OutputStream output, long maxReadLength, throw new UnsupportedEncodingException(); } - public Bucket decompress(Bucket data, BucketFactory bf, long maxLength, long maxCheckSizeLength, Bucket preferred) throws IOException, CompressionOutputSizeException { + public Bucket decompress(Bucket data, BucketFactory bf, long maxLength, long maxCheckSizeLength, Bucket preferred) throws IOException { Bucket output; - if(preferred != null) + if(preferred != null) { output = preferred; - else + } else { output = bf.makeBucket(maxLength); - if(logMINOR) + } + if (logMINOR) { Logger.minor(this, "Decompressing "+data+" size "+data.size()+" to new bucket "+output); - CountedInputStream is = null; - OutputStream os = null; - try { - is = new CountedInputStream(data.getInputStream()); - os = output.getOutputStream(); + } + try ( + CountedInputStream is = new CountedInputStream(data.getInputStream()); + OutputStream os = output.getOutputStream() + ) { decompress(is, os, maxLength, maxCheckSizeLength); - if(logMINOR) + if(logMINOR) { Logger.minor(this, "Output: "+output+" size "+output.size()+" read "+is.count()); - // It is essential that the close()'s throw if there is any problem. - is.close(); is = null; - os.close(); os = null; - } finally { - Closer.close(is); - Closer.close(os); + } } return output; } diff --git a/src/freenet/support/io/BucketTools.java b/src/freenet/support/io/BucketTools.java index 04af25016ed..d18a1ade671 100644 --- a/src/freenet/support/io/BucketTools.java +++ b/src/freenet/support/io/BucketTools.java @@ -58,63 +58,49 @@ public void shouldUpdate(){ public static void copy(Bucket src, Bucket dst) throws IOException { OutputStream out = dst.getOutputStreamUnbuffered(); InputStream in = src.getInputStreamUnbuffered(); - ReadableByteChannel readChannel = Channels.newChannel(in); - WritableByteChannel writeChannel = Channels.newChannel(out); - try { - - // No benefit to allocateDirect() as we're wrapping streams anyway, and worse, it'd be a memory leak. - ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE); - while (readChannel.read(buffer) != -1) { - buffer.flip(); - while(buffer.hasRemaining()) - writeChannel.write(buffer); - buffer.clear(); - } - - } finally { - writeChannel.close(); - readChannel.close(); + try ( + ReadableByteChannel readChannel = Channels.newChannel(in); + WritableByteChannel writeChannel = Channels.newChannel(out) + ) { + // No benefit to allocateDirect() as we're wrapping streams anyway, and worse, it'd be a memory leak. + ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE); + while (readChannel.read(buffer) != -1) { + buffer.flip(); + while (buffer.hasRemaining()) { + writeChannel.write(buffer); + } + buffer.clear(); + } } } public static void zeroPad(Bucket b, long size) throws IOException { - OutputStream out = b.getOutputStreamUnbuffered(); - - try { - // Initialized to zero by default. - byte[] buffer = new byte[16384]; + try (OutputStream out = b.getOutputStreamUnbuffered()) { + // Initialized to zero by default. + byte[] buffer = new byte[16384]; - long count = 0; - while (count < size) { - long nRequired = buffer.length; - if (nRequired > size - count) { - nRequired = size - count; + long count = 0; + while (count < size) { + long nRequired = buffer.length; + if (nRequired > size - count) { + nRequired = size - count; + } + out.write(buffer, 0, (int) nRequired); + count += nRequired; } - out.write(buffer, 0, (int) nRequired); - count += nRequired; - } - - } finally { - out.close(); } } - public static void paddedCopy(Bucket from, Bucket to, long nBytes, - int blockSize) throws IOException { - + public static void paddedCopy(Bucket from, Bucket to, long nBytes, int blockSize) throws IOException { if (nBytes > blockSize) { throw new IllegalArgumentException("nBytes > blockSize"); } - OutputStream out = null; - InputStream in = null; - - try { - - out = to.getOutputStreamUnbuffered(); - byte[] buffer = new byte[16384]; - in = from.getInputStreamUnbuffered(); - + byte[] buffer = new byte[16384]; + try ( + OutputStream out = to.getOutputStreamUnbuffered(); + InputStream in = from.getInputStreamUnbuffered() + ) { long count = 0; while (count != nBytes) { long nRequired = nBytes - count; @@ -150,11 +136,6 @@ public static void paddedCopy(Bucket from, Bucket to, long nBytes, count += nRequired; } } - } finally { - if (in != null) - in.close(); - if (out != null) - out.close(); } } @@ -168,7 +149,7 @@ public static Bucket[] makeBuckets(BucketFactory bf, int count, int size) } public static int[] nullIndices(Bucket[] array) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (int i = 0; i < array.length; i++) { if (array[i] == null) { list.add(i); @@ -183,7 +164,7 @@ public static int[] nullIndices(Bucket[] array) { } public static int[] nonNullIndices(Bucket[] array) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (int i = 0; i < array.length; i++) { if (array[i] != null) { list.add(i); @@ -198,15 +179,14 @@ public static int[] nonNullIndices(Bucket[] array) { } public static Bucket[] nonNullBuckets(Bucket[] array) { - List list = new ArrayList(array.length); - for (int i = 0; i < array.length; i++) { - if (array[i] != null) { - list.add(array[i]); + List list = new ArrayList<>(array.length); + for (Bucket bucket : array) { + if (bucket != null) { + list.add(bucket); } } - Bucket[] ret = new Bucket[list.size()]; - return list.toArray(ret); + return list.toArray(new Bucket[0]); } /** @@ -219,36 +199,36 @@ public static Bucket[] nonNullBuckets(Bucket[] array) { */ public static byte[] toByteArray(Bucket bucket) throws IOException { long size = bucket.size(); - if(size > Integer.MAX_VALUE) throw new OutOfMemoryError(); + if (size > Integer.MAX_VALUE) { + throw new IllegalArgumentException(); + } byte[] data = new byte[(int)size]; - InputStream is = bucket.getInputStreamUnbuffered(); - DataInputStream dis = null; - try { - dis = new DataInputStream(is); + try ( + InputStream is = bucket.getInputStreamUnbuffered(); + DataInputStream dis = new DataInputStream(is) + ) { dis.readFully(data); - } finally { - Closer.close(dis); - Closer.close(is); } return data; } public static int toByteArray(Bucket bucket, byte[] output) throws IOException { long size = bucket.size(); - if(size > output.length) + if (size > output.length) { throw new IllegalArgumentException("Data does not fit in provided buffer"); - InputStream is = null; - try { - is = bucket.getInputStreamUnbuffered(); + } + try (InputStream is = bucket.getInputStreamUnbuffered()) { int moved = 0; - while(true) { - if(moved == size) return moved; - int x = is.read(output, moved, (int)(size - moved)); - if(x == -1) return moved; + while (true) { + if (moved == size) { + return moved; + } + int x = is.read(output, moved, (int) (size - moved)); + if (x == -1) { + return moved; + } moved += x; } - } finally { - if(is != null) is.close(); } } @@ -262,106 +242,110 @@ public static RandomAccessBucket makeImmutableBucket(BucketFactory bucketFactory public static RandomAccessBucket makeImmutableBucket(BucketFactory bucketFactory, byte[] data, int offset, int length) throws IOException { RandomAccessBucket bucket = bucketFactory.makeBucket(length); - OutputStream os = bucket.getOutputStreamUnbuffered(); - try { - os.write(data, offset, length); - } finally { - os.close(); + try (OutputStream os = bucket.getOutputStreamUnbuffered()) { + os.write(data, offset, length); } bucket.setReadOnly(); return bucket; } public static byte[] hash(Bucket data) throws IOException { - InputStream is = data.getInputStreamUnbuffered(); - try { + try (InputStream is = data.getInputStreamUnbuffered()) { MessageDigest md = SHA256.getMessageDigest(); - try { + try { long bucketLength = data.size(); long bytesRead = 0; byte[] buf = new byte[BUFFER_SIZE]; while ((bytesRead < bucketLength) || (bucketLength == -1)) { int readBytes = is.read(buf); - if (readBytes < 0) + if (readBytes < 0) { break; + } bytesRead += readBytes; - if (readBytes > 0) + if (readBytes > 0) { md.update(buf, 0, readBytes); + } } - if ((bytesRead < bucketLength) && (bucketLength > 0)) + if ((bytesRead < bucketLength) && (bucketLength > 0)) { throw new EOFException(); - if ((bytesRead != bucketLength) && (bucketLength > 0)) + } + if ((bytesRead != bucketLength) && (bucketLength > 0)) { throw new IOException("Read " + bytesRead + " but bucket length " + bucketLength + " on " + data + '!'); - byte[] retval = md.digest(); - return retval; + } + return md.digest(); } finally { SHA256.returnMessageDigest(md); } - } finally { - if(is != null) is.close(); } } /** Copy the given quantity of data from the given bucket to the given OutputStream. * @throws IOException If there was an error reading from the bucket or writing to the stream. */ public static long copyTo(Bucket decodedData, OutputStream os, long truncateLength) throws IOException { - if(truncateLength == 0) return 0; - if(truncateLength < 0) truncateLength = Long.MAX_VALUE; - InputStream is = decodedData.getInputStreamUnbuffered(); - try { + if (truncateLength == 0) { + return 0; + } + if (truncateLength < 0) { + truncateLength = Long.MAX_VALUE; + } + try (InputStream is = decodedData.getInputStreamUnbuffered()) { int bufferSize = BUFFER_SIZE; - if(truncateLength > 0 && truncateLength < bufferSize) bufferSize = (int) truncateLength; + if (truncateLength < bufferSize) { + bufferSize = (int) truncateLength; + } byte[] buf = new byte[bufferSize]; long moved = 0; - while(moved < truncateLength) { + while (moved < truncateLength) { // DO NOT move the (int) inside the Math.min()! big numbers truncate to negative numbers. int bytes = (int) Math.min(buf.length, truncateLength - moved); - if(bytes <= 0) - throw new IllegalStateException("bytes="+bytes+", truncateLength="+truncateLength+", moved="+moved); + if (bytes <= 0) { + throw new IllegalStateException("bytes=" + bytes + ", truncateLength=" + truncateLength + ", moved=" + moved); + } bytes = is.read(buf, 0, bytes); - if(bytes <= 0) { - if(truncateLength == Long.MAX_VALUE) + if (bytes <= 0) { + if (truncateLength == Long.MAX_VALUE) { break; - IOException ioException = new IOException("Could not move required quantity of data in copyTo: "+bytes+" (moved "+moved+" of "+truncateLength+"): unable to read from "+is); + } + IOException ioException = new IOException("Could not move required quantity of data in copyTo: " + bytes + " (moved " + moved + " of " + truncateLength + "): unable to read from " + is); ioException.printStackTrace(); - throw ioException; + throw ioException; } os.write(buf, 0, bytes); moved += bytes; } return moved; } finally { - is.close(); os.flush(); } } /** Copy data from an InputStream into a Bucket. */ public static void copyFrom(Bucket bucket, InputStream is, long truncateLength) throws IOException { - OutputStream os = bucket.getOutputStreamUnbuffered(); byte[] buf = new byte[BUFFER_SIZE]; - if(truncateLength < 0) truncateLength = Long.MAX_VALUE; - try { + if (truncateLength < 0) { + truncateLength = Long.MAX_VALUE; + } + try (OutputStream os = bucket.getOutputStreamUnbuffered()) { long moved = 0; - while(moved < truncateLength) { + while (moved < truncateLength) { // DO NOT move the (int) inside the Math.min()! big numbers truncate to negative numbers. int bytes = (int) Math.min(buf.length, truncateLength - moved); - if(bytes <= 0) - throw new IllegalStateException("bytes="+bytes+", truncateLength="+truncateLength+", moved="+moved); + if (bytes <= 0) { + throw new IllegalStateException("bytes=" + bytes + ", truncateLength=" + truncateLength + ", moved=" + moved); + } bytes = is.read(buf, 0, bytes); - if(bytes <= 0) { - if(truncateLength == Long.MAX_VALUE) + if (bytes <= 0) { + if (truncateLength == Long.MAX_VALUE) { break; + } IOException ioException = new IOException("Could not move required quantity of data in copyFrom: " - + bytes + " (moved " + moved + " of " + truncateLength + "): unable to read from " + is); + + bytes + " (moved " + moved + " of " + truncateLength + "): unable to read from " + is); ioException.printStackTrace(); throw ioException; } os.write(buf, 0, bytes); moved += bytes; } - } finally { - os.close(); } } @@ -384,49 +368,47 @@ public static void copyFrom(Bucket bucket, InputStream is, long truncateLength) * the provided bucket, or writing to created buckets. */ public static Bucket[] split(Bucket origData, int splitSize, BucketFactory bf, boolean freeData, boolean persistent) throws IOException { - if(origData instanceof FileBucket) { - if(freeData) { + if (origData instanceof FileBucket) { + if (freeData) { Logger.error(BucketTools.class, "Asked to free data when splitting a FileBucket ?!?!? Not freeing as this would clobber the split result..."); } - Bucket[] buckets = ((FileBucket)origData).split(splitSize); - if(persistent) - return buckets; + Bucket[] buckets = ((FileBucket) origData).split(splitSize); + if (persistent) { + return buckets; + } } long length = origData.size(); - if(length > ((long)Integer.MAX_VALUE) * splitSize) - throw new IllegalArgumentException("Way too big!: "+length+" for "+splitSize); + if (length > ((long) Integer.MAX_VALUE) * splitSize) { + throw new IllegalArgumentException("Way too big!: " + length + " for " + splitSize); + } int bucketCount = (int) (length / splitSize); - if(length % splitSize > 0) bucketCount++; - if(logMINOR) - Logger.minor(BucketTools.class, "Splitting bucket "+origData+" of size "+length+" into "+bucketCount+" buckets"); + if (length % splitSize > 0) { + bucketCount++; + } + if (logMINOR) { + Logger.minor(BucketTools.class, "Splitting bucket " + origData + " of size " + length + " into " + bucketCount + " buckets"); + } Bucket[] buckets = new Bucket[bucketCount]; - InputStream is = origData.getInputStreamUnbuffered(); - DataInputStream dis = null; - try { - dis = new DataInputStream(is); + byte[] buf = new byte[splitSize]; + try ( + InputStream is = origData.getInputStreamUnbuffered(); + DataInputStream dis = new DataInputStream(is) + ) { long remainingLength = length; - byte[] buf = new byte[splitSize]; - for(int i=0;i 0 && truncateLength < bufferSize) bufferSize = (int) truncateLength; - byte[] buf = new byte[bufferSize]; - long moved = 0; - while(moved < truncateLength) { - // DO NOT move the (int) inside the Math.min()! big numbers truncate to negative numbers. - int bytes = (int) Math.min(buf.length, truncateLength - moved); - if(bytes <= 0) - throw new IllegalStateException("bytes="+bytes+", truncateLength="+truncateLength+", moved="+moved); - bytes = is.read(buf, 0, bytes); - if(bytes <= 0) { - if(truncateLength == Long.MAX_VALUE) - break; - IOException ioException = new IOException("Could not move required quantity of data in copyTo: "+bytes+" (moved "+moved+" of "+truncateLength+"): unable to read from "+is); - ioException.printStackTrace(); - throw ioException; - } - raf.pwrite(fileOffset, buf, 0, bytes); - moved += bytes; - fileOffset += bytes; - } - return moved; - } finally { - is.close(); - } + public static long copyTo( + Bucket bucket, + RandomAccessBuffer raf, + long fileOffset, + long truncateLength + ) throws IOException { + if (truncateLength == 0) { + return 0; + } + if (truncateLength < 0) { + truncateLength = Long.MAX_VALUE; + } + try (InputStream is = bucket.getInputStreamUnbuffered()) { + int bufferSize = BUFFER_SIZE; + if (truncateLength < bufferSize) { + bufferSize = (int) truncateLength; + } + byte[] buf = new byte[bufferSize]; + long moved = 0; + while (moved < truncateLength) { + // DO NOT move the (int) inside the Math.min()! big numbers truncate to negative numbers. + int bytes = (int) Math.min(buf.length, truncateLength - moved); + if (bytes <= 0) { + throw new IllegalStateException("bytes=" + bytes + ", truncateLength=" + truncateLength + ", moved=" + moved); + } + bytes = is.read(buf, 0, bytes); + if (bytes <= 0) { + if (truncateLength == Long.MAX_VALUE) { + break; + } + IOException ioException = new IOException("Could not move required quantity of data in copyTo: " + bytes + " (moved " + moved + " of " + truncateLength + "): unable to read from " + is); + ioException.printStackTrace(); + throw ioException; + } + raf.pwrite(fileOffset, buf, 0, bytes); + moved += bytes; + fileOffset += bytes; + } + return moved; + } } /** Inverse of Bucket.storeTo(). Uses the magic value to identify the bucket type. diff --git a/src/freenet/support/io/Closer.java b/src/freenet/support/io/Closer.java deleted file mode 100644 index 072f96eab6c..00000000000 --- a/src/freenet/support/io/Closer.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * freenet - Closer.java Copyright © 2007 David Roden - * - * This program is free software; you can redistribute it and/or modify it under - * the terms of the GNU General Public License as published by the Free Software - * Foundation; either version 2 of the License, or (at your option) any later - * version. - * - * This program is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS - * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more - * details. - * - * You should have received a copy of the GNU General Public License along with - * this program; if not, write to the Free Software Foundation, Inc., 59 Temple - * Place - Suite 330, Boston, MA 02111-1307, USA. - */ - -package freenet.support.io; - -import java.io.Closeable; -import java.io.IOException; -import java.util.zip.ZipFile; - -import freenet.support.Logger; -import freenet.support.api.Bucket; - -/** - * Closes various resources. The resources are checked for being - * null before being closed, and every possible execption is - * swallowed. That makes this class perfect for use in the finally blocks of - * try-catch-finally blocks. - * - * @author David ‘Roden’ <bombe@freenetproject.org> - * @version $Id$ - * @deprecated Java 7 has a new language feature which mostly does what this class was for: - * The try with-resources Statement.
- * There are some differences with regards to swallowing Exceptions, please study them carefully when replacing Closer usage with it. - */ -@Deprecated -public class Closer { - /** - * Closes the given stream. - * - * @param closable The output stream to close - */ - public static void close(Closeable closable) { - if (closable != null) { - try { - closable.close(); - } catch (IOException e) { - Logger.error(Closer.class, "Error during close() on "+closable, e); - } - } - } - - /** - * Frees the given bucket. Notice that you have to do removeFrom() for persistent buckets yourself. - * @param bucket The Bucket to close. - */ - public static void close(Bucket bucket) { - if (bucket != null) { - try { - bucket.free(); - } catch(RuntimeException e) { - Logger.error(Closer.class, "Error during free().", e); - } - } - } - - /** - * Closes the given zip file. - * - * @param zipFile - * The zip file to close - */ - public static void close(ZipFile zipFile) { - if (zipFile != null) { - try { - zipFile.close(); - } catch (IOException e) { - Logger.error(Closer.class, "Error during close().", e); - } - } - } - -} diff --git a/src/freenet/support/io/FileUtil.java b/src/freenet/support/io/FileUtil.java index 352ed227cac..a43c4dc79a9 100644 --- a/src/freenet/support/io/FileUtil.java +++ b/src/freenet/support/io/FileUtil.java @@ -3,22 +3,11 @@ * http://www.gnu.org/ for further details of the GPL. */ package freenet.support.io; -import java.io.BufferedInputStream; -import java.io.DataInputStream; -import java.io.EOFException; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.RandomAccessFile; -import java.io.UnsupportedEncodingException; +import java.io.*; import java.lang.reflect.Method; import java.nio.CharBuffer; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.SecureRandom; import java.util.Random; @@ -69,14 +58,26 @@ public static LineReadingInputStream getLogTailReader(File logfile, long byteLim lis.readLine(100000, 200, true); } } catch (IOException e) { - Closer.close(lis); - Closer.close(fis); - throw e; + if (lis != null) { + try { + lis.close(); + } catch (IOException e1) { + Logger.error(FileUtil.class, "Error during close() on "+ lis, e1); + } + } + if (fis != null) { + try { + fis.close(); + } catch (IOException e1) { + Logger.error(FileUtil.class, "Error during close() on "+ fis, e1); + } + } + throw e; } return lis; } - public static enum OperatingSystem { + public enum OperatingSystem { Unknown(false, false, false), // Special-cased in filename sanitising code. MacOS(false, true, true), // OS/X in that it can run scripts. Linux(false, false, true), @@ -94,7 +95,7 @@ public static enum OperatingSystem { }; }; - public static enum CPUArchitecture { + public enum CPUArchitecture { Unknown, X86, X86_64, @@ -148,24 +149,29 @@ private static OperatingSystem detectOperatingSystem() { // TODO: Move to the pr // Please adapt sanitizeFileName when adding new OS. - if(name.indexOf("win") >= 0) + if (name.contains("win")) { return OperatingSystem.Windows; + } - if(name.indexOf("mac") >= 0) + if (name.contains("mac")) { return OperatingSystem.MacOS; + } - if(name.indexOf("linux") >= 0) + if (name.contains("linux")) { return OperatingSystem.Linux; - - if(name.indexOf("freebsd") >= 0) + } + + if (name.contains("freebsd")) { return OperatingSystem.FreeBSD; - - if(name.indexOf("unix") >= 0) + } + + if (name.contains("unix")) { return OperatingSystem.GenericUnix; - else if(File.separatorChar == '/') + } else if (File.separatorChar == '/') { return OperatingSystem.GenericUnix; - else if(File.separatorChar == '\\') + } else if (File.separatorChar == '\\') { return OperatingSystem.Windows; + } Logger.error(FileUtil.class, "Unknown operating system:" + name); } catch(Throwable t) { @@ -231,13 +237,10 @@ public static long estimateUsage(File file, long flen) { long blockUsage = roundup_2n(flen, 4096); // Assume 512 byte filename entries, with 100 bytes overhead, for filename overhead (NTFS) String filename = file.getName(); - int nameLength; - try { - nameLength = Math.max(filename.getBytes("UTF-16").length, filename.getBytes("UTF-8").length) + 100; - } catch (UnsupportedEncodingException e) { - // Impossible. - throw new RuntimeException("UTF-16 or UTF-8 charset not supported?!"); - } + int nameLength = 100 + Math.max( + filename.getBytes(StandardCharsets.UTF_16).length, + filename.getBytes(StandardCharsets.UTF_8).length + ); long filenameUsage = roundup_2n(nameLength, 512); // Assume 50 bytes per block tree overhead with 1kB blocks (reiser3 worst case) long extra = (roundup_2n(flen, 1024) / 1024) * 50; @@ -309,27 +312,19 @@ public static StringBuilder readUTF(File file) throws FileNotFoundException, IOE */ public static StringBuilder readUTF(File file, long offset) throws FileNotFoundException, IOException { StringBuilder result = new StringBuilder(); - FileInputStream fis = null; - BufferedInputStream bis = null; - InputStreamReader isr = null; - try { - fis = new FileInputStream(file); + try (FileInputStream fis = new FileInputStream(file)){ skipFully(fis, offset); - bis = new BufferedInputStream(fis); - isr = new InputStreamReader(bis, "UTF-8"); - - char[] buf = new char[4096]; - int length = 0; - - while((length = isr.read(buf)) > 0) { - result.append(buf, 0, length); + try ( + BufferedInputStream bis = new BufferedInputStream(fis); + InputStreamReader isr = new InputStreamReader(bis, StandardCharsets.UTF_8); + ) { + char[] buf = new char[4096]; + int length; + while((length = isr.read(buf)) > 0) { + result.append(buf, 0, length); + } } - - } finally { - Closer.close(isr); - Closer.close(bis); - Closer.close(fis); } return result; } @@ -354,16 +349,12 @@ public static StringBuilder readUTF(InputStream stream) throws IOException { public static StringBuilder readUTF(InputStream stream, long offset) throws IOException { StringBuilder result = new StringBuilder(); skipFully(stream, offset); - InputStreamReader reader = null; - try { - reader = new InputStreamReader(stream, "UTF-8"); + try (InputStreamReader reader = new InputStreamReader(stream, StandardCharsets.UTF_8)) { char[] buf = new char[4096]; - int length = 0; + int length; while((length = reader.read(buf)) > 0) { result.append(buf, 0, length); } - } finally { - Closer.close(reader); } return result; } @@ -380,30 +371,27 @@ public static void skipFully(InputStream is, long skip) throws IOException { } } - public static boolean writeTo(InputStream input, File target) throws FileNotFoundException, IOException { - DataInputStream dis = null; - FileOutputStream fos = null; + public static boolean writeTo(InputStream input, File target) throws IOException { + File file = File.createTempFile("temp", ".tmp", target.getParentFile()); - if(logMINOR) + if(logMINOR) { Logger.minor(FileUtil.class, "Writing to "+file+" to be renamed to "+target); + } - try { - dis = new DataInputStream(input); - fos = new FileOutputStream(file); - - int len = 0; + try ( + DataInputStream dis = new DataInputStream(input); + FileOutputStream fos = new FileOutputStream(file) + ) { byte[] buffer = new byte[4096]; + int len; while ((len = dis.read(buffer)) > 0) { fos.write(buffer, 0, len); } - } finally { - if(dis != null) dis.close(); - if(fos != null) fos.close(); } - if(FileUtil.renameTo(file, target)) + if(FileUtil.renameTo(file, target)) { return true; - else { + } else { file.delete(); return false; } @@ -679,25 +667,22 @@ public static boolean removeAll(File wd) { public static void secureDelete(File file) throws IOException { // FIXME somebody who understands these things should have a look at this... - if(!file.exists()) return; + if(!file.exists()) { + return; + } long size = file.length(); if(size > 0) { - RandomAccessFile raf = null; - try { - System.out.println("Securely deleting "+file+" which is of length "+size+" bytes..."); - raf = new RandomAccessFile(file, "rw"); + System.out.println("Securely deleting "+file+" which is of length "+size+" bytes..."); + try (RandomAccessFile raf = new RandomAccessFile(file, "rw")) { // Random data first. raf.seek(0); fill(new RandomAccessFileOutputStream(raf), size); raf.getFD().sync(); - raf.close(); - raf = null; - } finally { - Closer.close(raf); } } - if((!file.delete()) && file.exists()) + if((!file.delete()) && file.exists()) { throw new IOException("Unable to delete file "+file); + } } @Deprecated diff --git a/src/freenet/support/io/TempBucketFactory.java b/src/freenet/support/io/TempBucketFactory.java index a12b93d5add..be9f1b1917c 100644 --- a/src/freenet/support/io/TempBucketFactory.java +++ b/src/freenet/support/io/TempBucketFactory.java @@ -5,12 +5,7 @@ import static java.util.concurrent.TimeUnit.MINUTES; -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; +import java.io.*; import java.lang.ref.WeakReference; import java.security.GeneralSecurityException; import java.util.ArrayList; @@ -127,7 +122,7 @@ public TempBucket(long now, RandomAccessBucket cur) { this.currentBucket = cur; this.creationTime = now; this.osIndex = 0; - this.tbis = new ArrayList(1); + this.tbis = new ArrayList<>(1); if(logMINOR) Logger.minor(TempBucket.class, "Created "+this, new Exception("debug")); } @@ -146,7 +141,11 @@ private synchronized void closeInputStreams(boolean forFree) { is._maybeResetInputStream(); } catch(IOException e) { i.remove(); - Closer.close(is); + try { + is.close(); + } catch (IOException e1) { + Logger.error(this, "Caught "+e+" closing "+is); + } } } } @@ -154,7 +153,7 @@ private synchronized void closeInputStreams(boolean forFree) { /** A blocking method to force-migrate from a RAMBucket to a FileBucket */ public final boolean migrateToDisk() throws IOException { - Bucket toMigrate = null; + Bucket toMigrate; long size; synchronized(this) { if(!isRAMBucket() || hasBeenFreed) @@ -172,11 +171,8 @@ public final boolean migrateToDisk() throws IOException { BucketTools.copyTo(toMigrate, os, size); } else { if(size > 0) { - OutputStream temp = tempFB.getOutputStreamUnbuffered(); - try { - BucketTools.copyTo(toMigrate, temp, size); - } finally { - temp.close(); + try (OutputStream temp = tempFB.getOutputStreamUnbuffered()) { + BucketTools.copyTo(toMigrate, temp, size); } } } @@ -282,7 +278,7 @@ public final void write(int b) throws IOException { } @Override - public final void write(byte b[], int off, int len) throws IOException { + public final void write(byte[] b, int off, int len) throws IOException { synchronized(TempBucket.this) { if(hasBeenFreed) throw new IOException("Already freed"); long futureSize = currentSize + len; @@ -351,7 +347,13 @@ public void _maybeResetInputStream() throws IOException { if(idx != osIndex) close(); else { - Closer.close(currentIS); + if (currentIS != null) { + try { + currentIS.close(); + } catch (IOException e) { + Logger.error(this, "Error during close() on "+ currentIS, e); + } + } currentIS = currentBucket.getInputStreamUnbuffered(); long toSkip = index; while(toSkip > 0) { @@ -372,7 +374,7 @@ public final int read() throws IOException { } @Override - public int read(byte b[]) throws IOException { + public int read(byte[] b) throws IOException { synchronized(TempBucket.this) { if(hasBeenFreed) throw new IOException("Already freed"); return read(b, 0, b.length); @@ -380,7 +382,7 @@ public int read(byte b[]) throws IOException { } @Override - public int read(byte b[], int off, int len) throws IOException { + public int read(byte[] b, int off, int len) throws IOException { synchronized(TempBucket.this) { if(hasBeenFreed) throw new IOException("Already freed"); int toReturn = currentIS.read(b, off, len); @@ -416,7 +418,13 @@ public boolean markSupported() { @Override public final void close() throws IOException { synchronized(TempBucket.this) { - Closer.close(currentIS); + if (currentIS != null) { + try { + currentIS.close(); + } catch (IOException e) { + Logger.error(this, "Error during close() on "+ currentIS, e); + } + } tbis.remove(this); } } @@ -448,9 +456,15 @@ public synchronized void free() { synchronized(this) { if(hasBeenFreed) return; hasBeenFreed = true; - - Closer.close(os); - closeInputStreams(true); + + if (os != null) { + try { + os.close(); + } catch (IOException e) { + Logger.error(this, "Error during close() on "+ os, e); + } + } + closeInputStreams(true); if(isRAMBucket()) { // If it's in memory we must free before removing from the queue. currentBucket.free(); diff --git a/src/freenet/tools/CleanupTranslations.java b/src/freenet/tools/CleanupTranslations.java index 0e0d070eff5..3178dc75d1b 100644 --- a/src/freenet/tools/CleanupTranslations.java +++ b/src/freenet/tools/CleanupTranslations.java @@ -9,11 +9,11 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.StringWriter; +import java.nio.charset.StandardCharsets; import freenet.support.Logger; import freenet.support.LoggerHook; import freenet.support.SimpleFieldSet; -import freenet.support.io.Closer; public class CleanupTranslations { @@ -26,56 +26,64 @@ public static void main(String[] args) throws IOException, LoggerHook.InvalidThr File engFile = new File("src/freenet/l10n/freenet.l10n.en.properties"); SimpleFieldSet english = SimpleFieldSet.readFrom(engFile, false, true); File[] translations = new File("src/freenet/l10n").listFiles(); + if (translations == null) { + return; + } for(File f : translations) { String name = f.getName(); - if(!name.startsWith("freenet.l10n.")) continue; - if(name.equals("freenet.1l0n.en.properties")) continue; - FileInputStream fis = new FileInputStream(f); - InputStreamReader isr = new InputStreamReader(new BufferedInputStream(fis), "UTF-8"); - BufferedReader br = new BufferedReader(isr); - StringWriter sw = new StringWriter(); + if(!name.startsWith("freenet.l10n.")) { + continue; + } + if(name.equals("freenet.1l0n.en.properties")) { + continue; + } boolean changed = false; - while(true) { - String line = br.readLine(); - if(line == null) { - System.err.println("File does not end in End: "+f); - System.exit(4); - } - int idx = line.indexOf('='); - if(idx == -1) { - // Last line - if(!line.equals("End")) { - System.err.println("Line with no equals (file does not end in End???): "+f+" - \""+line+"\""); - System.exit(1); + StringWriter sw = new StringWriter(); + try ( + FileInputStream fis = new FileInputStream(f); + InputStreamReader isr = new InputStreamReader(new BufferedInputStream(fis), StandardCharsets.UTF_8); + BufferedReader br = new BufferedReader(isr) + ) { + while (true) { + String line = br.readLine(); + if (line == null) { + System.err.println("File does not end in End: " + f); + System.exit(4); } - sw.append(line+"\n"); - line = br.readLine(); - if(line != null) { - System.err.println("Content after End: \""+line+"\""); - System.exit(2); + int idx = line.indexOf('='); + if (idx == -1) { + // Last line + if (!line.equals("End")) { + System.err.println("Line with no equals (file does not end in End???): " + f + " - \"" + line + "\""); + System.exit(1); + } + sw.append(line).append("\n"); + line = br.readLine(); + if (line != null) { + System.err.println("Content after End: \"" + line + "\""); + System.exit(2); + } + break; } - break; - } - String before = line.substring(0, idx); - //String after = line.substring(idx+1); - String s = english.get(before); - if(s == null) { - System.err.println("Orphaned string: \""+before+"\" in "+f); - changed = true; - continue; + String before = line.substring(0, idx); + //String after = line.substring(idx+1); + String s = english.get(before); + if (s == null) { + System.err.println("Orphaned string: \"" + before + "\" in " + f); + changed = true; + continue; + } + sw.append(line).append("\n"); } - sw.append(line+"\n"); } - Closer.close(fis); - Closer.close(isr); - Closer.close(br); - if(!changed) continue; - FileOutputStream fos = new FileOutputStream(f); - OutputStreamWriter osw = new OutputStreamWriter(fos, "UTF-8"); - try { + if(!changed) { + continue; + } + try ( + FileOutputStream fos = new FileOutputStream(f); + OutputStreamWriter osw = new OutputStreamWriter(fos, StandardCharsets.UTF_8) + ) { osw.write(sw.toString()); - } finally { - osw.close(); } System.out.println("Rewritten "+f); } diff --git a/test/freenet/crypt/ciphers/RijndaelTest.java b/test/freenet/crypt/ciphers/RijndaelTest.java index 99e9af1889c..30419ecea0b 100644 --- a/test/freenet/crypt/ciphers/RijndaelTest.java +++ b/test/freenet/crypt/ciphers/RijndaelTest.java @@ -9,6 +9,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.util.Arrays; @@ -25,7 +26,6 @@ import freenet.crypt.CTRBlockCipherTest; import freenet.crypt.UnsupportedCipherException; import freenet.support.HexUtil; -import freenet.support.io.Closer; /** * @author sdiz @@ -1915,7 +1915,7 @@ public void testNonStandardTestVK() throws UnsupportedCipherException { // System.out.println("\t\t\t/* I="+(i+1)+" */"); // System.out.println("\t\t\t{ HexUtil.hexToBytes(\""+HexUtil.bytesToHex(TEST_VK256x256[i][0])+"\"),"); // System.out.println("\t\t\t\t\tHexUtil.hexToBytes(\""+HexUtil.bytesToHex(cipher)+"\") }, //"); - assertTrue("ECB_VK KEYSIZE=256 I=" + (i + 1), Arrays.equals(cipher, TEST_VK256x256[i][1])); + assertArrayEquals("ECB_VK KEYSIZE=256 I=" + (i + 1), cipher, TEST_VK256x256[i][1]); } } @@ -1938,12 +1938,19 @@ public void testGladmanTestVectors() throws UnsupportedCipherException, IOExcept private void checkGladmanTestVectors(String type) throws UnsupportedCipherException, IOException, NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeyException, IllegalBlockSizeException, BadPaddingException { for(int testNumber : GLADMAN_TEST_NUMBERS) { - InputStream is = null; - try { - is = getClass().getResourceAsStream("/freenet/crypt/ciphers/rijndael-gladman-test-data/ecbn"+type+testNumber+".txt"); - InputStreamReader isr = new InputStreamReader(is, "ISO-8859-1"); - BufferedReader br = new BufferedReader(isr); - for(int i=0;i<7;i++) br.readLine(); // Skip header + String fileName = "/freenet/crypt/ciphers/rijndael-gladman-test-data/ecbn" + type + testNumber + ".txt"; + InputStream is = getClass().getResourceAsStream(fileName); + if (is == null) { + throw new IllegalStateException("File does not exist: " + fileName); + } + try ( + InputStream isToClose = is; + InputStreamReader isr = new InputStreamReader(isToClose, StandardCharsets.ISO_8859_1); + BufferedReader br = new BufferedReader(isr) + ){ + for (int i=0;i<7;i++) { + br.readLine(); // Skip header + } String line = br.readLine(); int blockSize = Integer.parseInt(line.substring("BLOCKSIZE=".length())); line = br.readLine(); @@ -1963,18 +1970,22 @@ private void checkGladmanTestVectors(String type) throws UnsupportedCipherExcept test = Integer.parseInt(line.substring(6)); } else { byte[] data = HexUtil.hexToBytes(line.substring(6)); - if(prefix.equals("PT= ")) { - assertTrue(plaintext == null); - plaintext = data; - assertEquals(plaintext.length, blockSize/8); - } else if(prefix.equals("KEY= ")) { - assertTrue(key == null); - key = data; - assertEquals(key.length, keySize/8); - } else if(prefix.equals("CT= ")) { - assertTrue(ciphertext == null); - ciphertext = data; - assertEquals(ciphertext.length, blockSize/8); + switch (prefix) { + case "PT= ": + assertNull(plaintext); + plaintext = data; + assertEquals(plaintext.length, blockSize / 8); + break; + case "KEY= ": + assertNull(key); + key = data; + assertEquals(key.length, keySize / 8); + break; + case "CT= ": + assertNull(ciphertext); + ciphertext = data; + assertEquals(ciphertext.length, blockSize / 8); + break; } if(plaintext != null && ciphertext != null && key != null) { Rijndael cipher = new Rijndael(keySize, blockSize); @@ -1983,12 +1994,12 @@ private void checkGladmanTestVectors(String type) throws UnsupportedCipherExcept byte[] copyOfPlaintext = Arrays.copyOf(plaintext, plaintext.length); byte[] output = new byte[blockSize/8]; cipher.encipher(copyOfPlaintext, output); - assertTrue(Arrays.equals(output, ciphertext)); + assertArrayEquals(output, ciphertext); // Decrypt byte[] copyOfCiphertext = Arrays.copyOf(ciphertext, ciphertext.length); Arrays.fill(output, (byte)0); cipher.decipher(copyOfCiphertext, output); - assertTrue(Arrays.equals(output, plaintext)); + assertArrayEquals(output, plaintext); if(blockSize == 128) { if(keySize == 128 || CTRBlockCipherTest.TEST_JCA) { // We can test with JCA too. @@ -1998,12 +2009,12 @@ private void checkGladmanTestVectors(String type) throws UnsupportedCipherExcept Cipher c = Cipher.getInstance("AES/ECB/NOPADDING"); c.init(Cipher.ENCRYPT_MODE, k); output = c.doFinal(plaintext); - assertTrue(Arrays.equals(output, ciphertext)); + assertArrayEquals(output, ciphertext); // Decrypt. c.init(Cipher.DECRYPT_MODE, k); output = c.doFinal(ciphertext); - assertTrue(Arrays.equals(output, plaintext)); + assertArrayEquals(output, plaintext); } } // Clear @@ -2015,8 +2026,6 @@ private void checkGladmanTestVectors(String type) throws UnsupportedCipherExcept } } } - } finally { - Closer.close(is); } } } diff --git a/test/freenet/support/compress/Bzip2CompressorTest.java b/test/freenet/support/compress/Bzip2CompressorTest.java index 858395992e1..97c934036db 100644 --- a/test/freenet/support/compress/Bzip2CompressorTest.java +++ b/test/freenet/support/compress/Bzip2CompressorTest.java @@ -17,7 +17,6 @@ import freenet.support.api.BucketFactory; import freenet.support.io.ArrayBucket; import freenet.support.io.ArrayBucketFactory; -import freenet.support.io.Closer; import freenet.support.io.NullBucket; /** @@ -135,21 +134,15 @@ public void testDecompressException() throws IOException { Bucket inBucket = new ArrayBucket(compressedData); NullBucket outBucket = new NullBucket(); - InputStream decompressorInput = null; - OutputStream decompressorOutput = null; - - try { - decompressorInput = inBucket.getInputStream(); - decompressorOutput = outBucket.getOutputStream(); + try ( + InputStream decompressorInput = inBucket.getInputStream(); + OutputStream decompressorOutput = outBucket.getOutputStream() + ) { Compressor.COMPRESSOR_TYPE.BZIP2.decompress(decompressorInput, decompressorOutput, 4096 + 10, 4096 + 20); - decompressorInput.close(); - decompressorOutput.close(); } catch (CompressionOutputSizeException e) { // expect this return; } finally { - Closer.close(decompressorInput); - Closer.close(decompressorOutput); inBucket.free(); outBucket.free(); } @@ -158,21 +151,13 @@ public void testDecompressException() throws IOException { } private byte[] doBucketDecompress(byte[] compressedData) throws IOException { - ByteArrayInputStream decompressorInput = new ByteArrayInputStream(compressedData); - ByteArrayOutputStream decompressorOutput = new ByteArrayOutputStream(); - - Compressor.COMPRESSOR_TYPE.BZIP2.decompress(decompressorInput, decompressorOutput, 32768, 32768 * 2); - - byte[] outBuf = decompressorOutput.toByteArray(); - try { - decompressorInput.close(); - decompressorOutput.close(); - } finally { - Closer.close(decompressorInput); - Closer.close(decompressorOutput); + try ( + ByteArrayInputStream decompressorInput = new ByteArrayInputStream(compressedData); + ByteArrayOutputStream decompressorOutput = new ByteArrayOutputStream() + ) { + Compressor.COMPRESSOR_TYPE.BZIP2.decompress(decompressorInput, decompressorOutput, 32768, 32768 * 2); + return decompressorOutput.toByteArray(); } - - return outBuf; } private byte[] doCompress(byte[] uncompressedData) throws IOException { diff --git a/test/freenet/support/compress/NewLzmaCompressorTest.java b/test/freenet/support/compress/NewLzmaCompressorTest.java index e4b6fc54ce6..41e477d42ec 100644 --- a/test/freenet/support/compress/NewLzmaCompressorTest.java +++ b/test/freenet/support/compress/NewLzmaCompressorTest.java @@ -8,6 +8,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.util.Arrays; import java.util.Random; import org.junit.Test; @@ -16,7 +17,6 @@ import freenet.support.api.BucketFactory; import freenet.support.io.ArrayBucket; import freenet.support.io.ArrayBucketFactory; -import freenet.support.io.Closer; import freenet.support.io.NullBucket; /** @@ -143,29 +143,21 @@ public void testDecompressException() throws IOException { // build 5k array byte[] uncompressedData = new byte[5 * 1024]; - for(int i = 0; i < uncompressedData.length; i++) { - uncompressedData[i] = 1; - } + Arrays.fill(uncompressedData, (byte) 1); byte[] compressedData = doCompress(uncompressedData); Bucket inBucket = new ArrayBucket(compressedData); NullBucket outBucket = new NullBucket(); - InputStream decompressorInput = null; - OutputStream decompressorOutput = null; - - try { - decompressorInput = inBucket.getInputStream(); - decompressorOutput = outBucket.getOutputStream(); + try ( + InputStream decompressorInput = inBucket.getInputStream(); + OutputStream decompressorOutput = outBucket.getOutputStream() + ) { Compressor.COMPRESSOR_TYPE.LZMA_NEW.decompress(decompressorInput, decompressorOutput, 4096 + 10, 4096 + 20); - decompressorInput.close(); - decompressorOutput.close(); } catch (CompressionOutputSizeException e) { // expect this return; } finally { - Closer.close(decompressorInput); - Closer.close(decompressorOutput); inBucket.free(); outBucket.free(); } @@ -176,16 +168,16 @@ public void testDecompressException() throws IOException { private byte[] doCompress(byte[] uncompressedData) throws IOException { Bucket inBucket = new ArrayBucket(uncompressedData); BucketFactory factory = new ArrayBucketFactory(); - Bucket outBucket = null; - outBucket = Compressor.COMPRESSOR_TYPE.LZMA_NEW.compress(inBucket, factory, uncompressedData.length, uncompressedData.length * 2 + 64); + Bucket outBucket = Compressor.COMPRESSOR_TYPE.LZMA_NEW.compress(inBucket, factory, uncompressedData.length, uncompressedData.length * 2L + 64); - InputStream in = null; - in = outBucket.getInputStream(); - long size = outBucket.size(); - byte[] outBuf = new byte[(int) size]; + byte[] outBuf; + try (InputStream in = outBucket.getInputStream()) { + long size = outBucket.size(); + outBuf = new byte[(int) size]; - in.read(outBuf); + in.read(outBuf); + } return outBuf; } diff --git a/test/freenet/support/compress/OldLZMACompressorTest.java b/test/freenet/support/compress/OldLZMACompressorTest.java index 3f785d91529..f037e0ab58a 100644 --- a/test/freenet/support/compress/OldLZMACompressorTest.java +++ b/test/freenet/support/compress/OldLZMACompressorTest.java @@ -10,6 +10,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.util.Arrays; import org.junit.Test; @@ -21,7 +22,6 @@ import freenet.support.compress.Compressor.COMPRESSOR_TYPE; import freenet.support.io.ArrayBucket; import freenet.support.io.ArrayBucketFactory; -import freenet.support.io.Closer; import freenet.support.io.NullBucket; /** @@ -244,33 +244,25 @@ public void testCompressException() throws IOException { } @Test - public void testDecompressException() throws IOException, CompressionRatioException { + public void testDecompressException() throws IOException { // build 5k array byte[] uncompressedData = new byte[5 * 1024]; - for(int i = 0; i < uncompressedData.length; i++) { - uncompressedData[i] = 1; - } + Arrays.fill(uncompressedData, (byte) 1); byte[] compressedData = doCompress(uncompressedData); Bucket inBucket = new ArrayBucket(compressedData); NullBucket outBucket = new NullBucket(); - InputStream decompressorInput = null; - OutputStream decompressorOutput = null; - - try { - decompressorInput = inBucket.getInputStream(); - decompressorOutput = outBucket.getOutputStream(); + try ( + InputStream decompressorInput = inBucket.getInputStream(); + OutputStream decompressorOutput = outBucket.getOutputStream() + ) { Compressor.COMPRESSOR_TYPE.LZMA.decompress(decompressorInput, decompressorOutput, 4096 + 10, 4096 + 20); - decompressorInput.close(); - decompressorOutput.close(); } catch (CompressionOutputSizeException e) { // expect this return; } finally { - Closer.close(decompressorInput); - Closer.close(decompressorOutput); inBucket.free(); outBucket.free(); } @@ -278,38 +270,30 @@ public void testDecompressException() throws IOException, CompressionRatioExcept //fail("did not throw expected CompressionOutputSizeException"); } - private byte[] doCompress(byte[] uncompressedData) throws IOException, CompressionRatioException { + private byte[] doCompress(byte[] uncompressedData) throws IOException { Bucket inBucket = new ArrayBucket(uncompressedData); BucketFactory factory = new ArrayBucketFactory(); - Bucket outBucket = null; - outBucket = Compressor.COMPRESSOR_TYPE.LZMA.compress(inBucket, factory, uncompressedData.length, uncompressedData.length * 2 + 64); + Bucket outBucket = COMPRESSOR_TYPE.LZMA.compress(inBucket, factory, uncompressedData.length, uncompressedData.length * 2L + 64); - InputStream in = null; - in = outBucket.getInputStream(); - long size = outBucket.size(); - byte[] outBuf = new byte[(int) size]; + byte[] outBuf; + try (InputStream in = outBucket.getInputStream()) { + long size = outBucket.size(); + outBuf = new byte[(int) size]; - in.read(outBuf); + in.read(outBuf); + } return outBuf; } private byte[] doBucketDecompress(byte[] compressedData) throws IOException { - ByteArrayInputStream decompressorInput = new ByteArrayInputStream(compressedData); - ByteArrayOutputStream decompressorOutput = new ByteArrayOutputStream(); - - COMPRESSOR_TYPE.LZMA.decompress(decompressorInput, decompressorOutput, 32768, 32768 * 2); - - byte[] outBuf = decompressorOutput.toByteArray(); - try { - decompressorInput.close(); - decompressorOutput.close(); - } finally { - Closer.close(decompressorInput); - Closer.close(decompressorOutput); + try ( + ByteArrayInputStream decompressorInput = new ByteArrayInputStream(compressedData); + ByteArrayOutputStream decompressorOutput = new ByteArrayOutputStream(); + ) { + COMPRESSOR_TYPE.LZMA.decompress(decompressorInput, decompressorOutput, 32768, 32768 * 2); + return decompressorOutput.toByteArray(); } - - return outBuf; } }