diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/analysis/Ctags.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/analysis/Ctags.java index faaafc3bb14..322c5186519 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/analysis/Ctags.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/analysis/Ctags.java @@ -110,7 +110,7 @@ public void close() throws IOException { IOUtils.close(ctagsIn); if (ctags != null) { closing = true; - LOGGER.log(Level.FINE, "Destroying ctags command"); + LOGGER.log(Level.FINEST, "Destroying ctags command"); ctags.destroyForcibly(); } } @@ -178,7 +178,7 @@ private void initialize() throws IOException { /* Add extra command line options for ctags. */ if (CTagsExtraOptionsFile != null) { - LOGGER.log(Level.INFO, "Adding extra options to ctags"); + LOGGER.log(Level.FINEST, "Adding extra options to ctags"); command.add("--options=" + CTagsExtraOptionsFile); } @@ -187,7 +187,7 @@ private void initialize() throws IOException { sb.append(s).append(" "); } String commandStr = sb.toString(); - LOGGER.log(Level.FINE, "Executing ctags command [{0}]", commandStr); + LOGGER.log(Level.FINEST, "Executing ctags command [{0}]", commandStr); processBuilder = new ProcessBuilder(command); diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/analysis/executables/ELFAnalyzer.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/analysis/executables/ELFAnalyzer.java index 840dc07d650..1872f50649c 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/analysis/executables/ELFAnalyzer.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/analysis/executables/ELFAnalyzer.java @@ -108,7 +108,7 @@ public String parseELF(FileChannel fch) throws IOException { ELFHeader eh = new ELFHeader(fmap); if (eh.e_shnum <= 0) { - LOGGER.log(Level.FINE, "Skipping file, no section headers"); + LOGGER.log(Level.FINEST, "Skipping file, no section headers"); return null; } @@ -116,7 +116,7 @@ public String parseELF(FileChannel fch) throws IOException { ELFSection stringSection = new ELFSection(fmap); if (stringSection.sh_size == 0) { - LOGGER.log(Level.FINE, "Skipping file, no section name string table"); + LOGGER.log(Level.FINEST, "Skipping file, no section name string table"); return null; } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationFramework.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationFramework.java index 6826d64e803..cb1d5fbda44 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationFramework.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationFramework.java @@ -308,7 +308,7 @@ public void unloadAllPlugins(AuthorizationStack stack) { @Override protected void classLoaded(IAuthorizationPlugin plugin) { if (!loadingStack.setPlugin(plugin)) { - LOGGER.log(Level.INFO, "plugin {0} is not configured in the stack", plugin.getClass().getCanonicalName()); + LOGGER.log(Level.FINEST, "plugin {0} is not configured in the stack", plugin.getClass().getCanonicalName()); } } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationPlugin.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationPlugin.java index b168fa79229..9a8b0fa93a4 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationPlugin.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationPlugin.java @@ -104,7 +104,7 @@ public synchronized void load(Map parameters) { + "This can cause the authorization to fail always.", getName()); setFailed(); - LOGGER.log(Level.INFO, "[{0}] Plugin \"{1}\" {2} and is {3}.", + LOGGER.log(Level.FINEST, "[{0}] Plugin \"{1}\" {2} and is {3}.", new Object[]{ getFlag().toString().toUpperCase(Locale.ROOT), getName(), @@ -125,7 +125,7 @@ public synchronized void load(Map parameters) { setFailed(); } - LOGGER.log(Level.INFO, "[{0}] Plugin \"{1}\" {2} and is {3}.", + LOGGER.log(Level.FINEST, "[{0}] Plugin \"{1}\" {2} and is {3}.", new Object[]{ getFlag().toString().toUpperCase(Locale.ROOT), getName(), diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationStack.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationStack.java index 92d297f3df3..9658beb3e92 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationStack.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/authorization/AuthorizationStack.java @@ -134,7 +134,7 @@ public void load(Map parameters) { getCurrentSetup().putAll(parameters); getCurrentSetup().putAll(getSetup()); - LOGGER.log(Level.INFO, "[{0}] Stack \"{1}\" is loading.", + LOGGER.log(Level.FINEST, "[{0}] Stack \"{1}\" is loading.", new Object[]{getFlag().toString().toUpperCase(Locale.ROOT), getName()}); @@ -155,7 +155,7 @@ public void load(Map parameters) { setFailed(); } - LOGGER.log(Level.INFO, "[{0}] Stack \"{1}\" is {2}.", + LOGGER.log(Level.FINEST, "[{0}] Stack \"{1}\" is {2}.", new Object[]{ getFlag().toString().toUpperCase(Locale.ROOT), getName(), @@ -192,17 +192,17 @@ public boolean isAllowed(Nameable entity, PluginDecisionPredicate pluginPredicate, PluginSkippingPredicate skippingPredicate) { boolean overallDecision = true; - LOGGER.log(Level.FINER, "Authorization for \"{0}\" in \"{1}\" [{2}]", + LOGGER.log(Level.FINEST, "Authorization for \"{0}\" in \"{1}\" [{2}]", new Object[]{entity.getName(), this.getName(), this.getFlag()}); if (skippingPredicate.shouldSkip(this)) { - LOGGER.log(Level.FINER, "AuthEntity \"{0}\" [{1}] skipping testing of name \"{2}\"", + LOGGER.log(Level.FINEST, "AuthEntity \"{0}\" [{1}] skipping testing of name \"{2}\"", new Object[]{this.getName(), this.getFlag(), entity.getName()}); } else { overallDecision = processStack(entity, pluginPredicate, skippingPredicate); } - LOGGER.log(Level.FINER, "Authorization for \"{0}\" in \"{1}\" [{2}] => {3}", + LOGGER.log(Level.FINEST, "Authorization for \"{0}\" in \"{1}\" [{2}] => {3}", new Object[]{entity.getName(), this.getName(), this.getFlag(), overallDecision ? "true" : "false"}); return overallDecision; } @@ -229,18 +229,18 @@ protected boolean processStack(Nameable entity, for (AuthorizationEntity authEntity : getStack()) { if (skippingPredicate.shouldSkip(authEntity)) { - LOGGER.log(Level.FINEST, "AuthEntity \"{0}\" [{1}] skipping testing of name \"{2}\"", + LOGGER.log(Level.FINER, "AuthEntity \"{0}\" [{1}] skipping testing of name \"{2}\"", new Object[]{authEntity.getName(), authEntity.getFlag(), entity.getName()}); continue; } // run the plugin's test method try { - LOGGER.log(Level.FINEST, "AuthEntity \"{0}\" [{1}] testing a name \"{2}\"", + LOGGER.log(Level.FINER, "AuthEntity \"{0}\" [{1}] testing a name \"{2}\"", new Object[]{authEntity.getName(), authEntity.getFlag(), entity.getName()}); boolean pluginDecision = authEntity.isAllowed(entity, pluginPredicate, skippingPredicate); - LOGGER.log(Level.FINEST, "AuthEntity \"{0}\" [{1}] testing a name \"{2}\" => {3}", + LOGGER.log(Level.FINER, "AuthEntity \"{0}\" [{1}] testing a name \"{2}\" => {3}", new Object[]{authEntity.getName(), authEntity.getFlag(), entity.getName(), pluginDecision ? "true" : "false"}); @@ -259,7 +259,7 @@ protected boolean processStack(Nameable entity, } } catch (AuthorizationException ex) { // Propagate up so that proper HTTP error can be given. - LOGGER.log(Level.FINEST, "got authorization exception: " + ex.getMessage()); + LOGGER.log(Level.FINER, "got authorization exception: " + ex.getMessage()); throw ex; } catch (Throwable ex) { LOGGER.log(Level.WARNING, @@ -268,7 +268,7 @@ protected boolean processStack(Nameable entity, entity.getName()), ex); - LOGGER.log(Level.FINEST, "AuthEntity \"{0}\" [{1}] testing a name \"{2}\" => {3}", + LOGGER.log(Level.FINER, "AuthEntity \"{0}\" [{1}] testing a name \"{2}\" => {3}", new Object[]{authEntity.getName(), authEntity.getFlag(), entity.getName(), "false (failed)"}); diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/Configuration.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/Configuration.java index 0bd69550593..b994da84d48 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/Configuration.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/Configuration.java @@ -1239,7 +1239,7 @@ public void encodeObject(OutputStream out) { } public static Configuration read(File file) throws IOException { - LOGGER.log(Level.INFO, "Reading configuration from {0}", file.getCanonicalPath()); + LOGGER.log(Level.FINEST, "Reading configuration from {0}", file.getCanonicalPath()); try (FileInputStream in = new FileInputStream(file)) { return decodeObject(in); } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/RuntimeEnvironment.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/RuntimeEnvironment.java index 4c04622689b..1ec57e8379b 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/RuntimeEnvironment.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/RuntimeEnvironment.java @@ -1647,7 +1647,7 @@ public void applyConfig(String configuration, boolean reindex, boolean interacti */ public void applyConfig(Configuration config, boolean reindex, boolean interactive) { setConfiguration(config, interactive); - LOGGER.log(Level.INFO, "Configuration updated"); + LOGGER.log(Level.FINEST, "Configuration updated"); if (reindex) { // We are assuming that each update of configuration means reindex. If dedicated thread is introduced @@ -1751,7 +1751,7 @@ public void refreshSearcherManagerMap() { // so that it cannot produce new IndexSearcher objects. if (!getProjectNames().contains(entry.getKey())) { try { - LOGGER.log(Level.FINE, + LOGGER.log(Level.FINEST, "closing SearcherManager for project" + entry.getKey()); entry.getValue().close(); } catch (IOException ex) { diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/WatchDogService.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/WatchDogService.java index 555bdf4f5dd..970406326cb 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/WatchDogService.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/configuration/WatchDogService.java @@ -70,10 +70,10 @@ public void start(File directory) { stop(); if (directory == null || !directory.isDirectory() || !directory.canRead()) { - LOGGER.log(Level.INFO, "Watch dog cannot be started - invalid directory: {0}", directory); + LOGGER.log(Level.FINEST, "Watch dog cannot be started - invalid directory: {0}", directory); return; } - LOGGER.log(Level.INFO, "Starting watchdog in: {0}", directory); + LOGGER.log(Level.FINEST, "Starting watchdog in: {0}", directory); watchDogThread = new Thread(() -> { try { watchDogWatcher = FileSystems.getDefault().newWatchService(); @@ -89,7 +89,7 @@ public FileVisitResult postVisitDirectory(Path d, IOException exc) throws IOExce } }); - LOGGER.log(Level.INFO, "Watch dog started {0}", directory); + LOGGER.log(Level.FINEST, "Watch dog started {0}", directory); while (!Thread.currentThread().isInterrupted()) { final WatchKey key; try { @@ -117,7 +117,7 @@ public FileVisitResult postVisitDirectory(Path d, IOException exc) throws IOExce LOGGER.log(Level.FINEST, "Watchdog finishing (exiting)", ex); Thread.currentThread().interrupt(); } - LOGGER.log(Level.FINER, "Watchdog finishing (exiting)"); + LOGGER.log(Level.FINEST, "Watchdog finishing (exiting)"); }, "watchDogService"); watchDogThread.start(); } @@ -141,6 +141,6 @@ public void stop() { LOGGER.log(Level.WARNING, "Cannot join WatchDogService thread: ", ex); } } - LOGGER.log(Level.INFO, "Watchdog stoped"); + LOGGER.log(Level.FINEST, "Watchdog stoped"); } } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/framework/PluginClassLoader.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/framework/PluginClassLoader.java index 1d305bca28f..8a00c0700b1 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/framework/PluginClassLoader.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/framework/PluginClassLoader.java @@ -98,7 +98,7 @@ public boolean accept(File dir, String name) { try (InputStream is = jar.getInputStream(entry)) { byte[] bytes = loadBytes(is); Class c = defineClass(classname, bytes, 0, bytes.length); - LOGGER.log(Level.FINE, "Class \"{0}\" found in file \"{1}\"", + LOGGER.log(Level.FINEST, "Class \"{0}\" found in file \"{1}\"", new Object[]{ classname, f.getAbsolutePath() diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/framework/PluginFramework.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/framework/PluginFramework.java index e6586a54447..9b2215f815c 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/framework/PluginFramework.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/framework/PluginFramework.java @@ -358,7 +358,7 @@ public final void reload() { return; } - LOGGER.log(Level.INFO, "Plugins are being reloaded from {0}", pluginDirectory.getAbsolutePath()); + LOGGER.log(Level.FINEST, "Plugins are being reloaded from {0}", pluginDirectory.getAbsolutePath()); // trashing out the old instance of the loaded enables us // to reload the stack at runtime diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/AccuRevRepository.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/AccuRevRepository.java index 1d542036d77..b2da65ef340 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/AccuRevRepository.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/AccuRevRepository.java @@ -347,7 +347,7 @@ else if (line.startsWith("Top")) { // from the path known by Accurev) if (Files.isSymbolicLink(given)) { - LOGGER.log(Level.INFO,"{0} is symbolic link.", wsPath); + LOGGER.log(Level.FINEST,"{0} is symbolic link.", wsPath); // When we know that the two paths DO NOT point to the // same place (that is, the given path is deeper into @@ -372,7 +372,7 @@ else if (line.startsWith("Top")) { } if (relRoot.length() > 0) { - LOGGER.log(Level.INFO,"Source root relative to workspace root by: {0}", relRoot); + LOGGER.log(Level.FINEST,"Source root relative to workspace root by: {0}", relRoot); } } } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/FileHistoryCache.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/FileHistoryCache.java index 4582f1139aa..0875443ce3a 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/FileHistoryCache.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/FileHistoryCache.java @@ -387,7 +387,7 @@ private void finishStore(Repository repository, String latestRev) { repository.getDirectoryName()); } else { storeLatestCachedRevision(repository, latestRev); - LOGGER.log(Level.FINE, + LOGGER.log(Level.FINEST, "Done storing history for repository {0}", repository.getDirectoryName()); } @@ -417,7 +417,7 @@ public void store(History history, Repository repository) return; } - LOGGER.log(Level.FINE, + LOGGER.log(Level.FINEST, "Storing history for repository {0}", new Object[] {repository.getDirectoryName()}); @@ -491,7 +491,7 @@ public void store(History history, Repository repository) fileHistoryCount++; } - LOGGER.log(Level.FINE, "Stored history for {0} files", fileHistoryCount); + LOGGER.log(Level.FINEST, "Stored history for {0} files", fileHistoryCount); if (!handleRenamedFiles) { finishStore(repository, latestRev); @@ -560,7 +560,7 @@ public void store(History history, Repository repository) } catch (InterruptedException ex) { LOGGER.log(Level.SEVERE, "latch exception ",ex); } - LOGGER.log(Level.FINE, "Stored history for {0} renamed files", + LOGGER.log(Level.FINEST, "Stored history for {0} renamed files", renamedFileHistoryCount.intValue()); finishStore(repository, latestRev); } @@ -817,7 +817,7 @@ public void clearFile(String path) { } if (parent.delete()) { - LOGGER.log(Level.FINE, "Removed empty history cache dir:{0}", + LOGGER.log(Level.FINEST, "Removed empty history cache dir:{0}", parent.getAbsolutePath()); } } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/HistoryEntry.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/HistoryEntry.java index 5b3d0a92697..f771e090a7d 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/HistoryEntry.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/HistoryEntry.java @@ -89,22 +89,22 @@ public String getLine() { public void dump() { - LOGGER.log(Level.FINE, "HistoryEntry : revision = {0}", revision); - LOGGER.log(Level.FINE, "HistoryEntry : tags = {0}", tags); - LOGGER.log(Level.FINE, "HistoryEntry : date = {0}", date); - LOGGER.log(Level.FINE, "HistoryEntry : author = {0}", author); - LOGGER.log(Level.FINE, "HistoryEntry : active = {0}", (active ? + LOGGER.log(Level.FINEST, "HistoryEntry : revision = {0}", revision); + LOGGER.log(Level.FINEST, "HistoryEntry : tags = {0}", tags); + LOGGER.log(Level.FINEST, "HistoryEntry : date = {0}", date); + LOGGER.log(Level.FINEST, "HistoryEntry : author = {0}", author); + LOGGER.log(Level.FINEST, "HistoryEntry : active = {0}", (active ? "True" : "False")); String[] lines = message.toString().split("\n"); String separator = "="; for (String line : lines) { - LOGGER.log(Level.FINE, "HistoryEntry : message {0} {1}", + LOGGER.log(Level.FINEST, "HistoryEntry : message {0} {1}", new Object[]{separator, line}); separator = ">"; } separator = "="; for (String file : files) { - LOGGER.log(Level.FINE, "HistoryEntry : files {0} {1}", + LOGGER.log(Level.FINEST, "HistoryEntry : files {0} {1}", new Object[]{separator, file}); separator = ">"; } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/HistoryGuru.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/HistoryGuru.java index 9d72f94375c..20b88820ee6 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/HistoryGuru.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/HistoryGuru.java @@ -519,7 +519,7 @@ public void updateRepositories() { String type = repository.getClass().getSimpleName(); if (repository.isWorking()) { - LOGGER.info(String.format("Update %s repository in %s", + LOGGER.finest(String.format("Update %s repository in %s", type, path)); try { @@ -550,7 +550,7 @@ public void updateRepositories(Collection paths) { String type = repository.getClass().getSimpleName(); if (repository.isWorking()) { - LOGGER.info(String.format("Update %s repository in %s", type, + LOGGER.finest(String.format("Update %s repository in %s", type, repository.getDirectoryName())); try { @@ -576,7 +576,7 @@ private void createCache(Repository repository, String sinceRevision) { String type = repository.getClass().getSimpleName(); if (!repository.isHistoryEnabled()) { - LOGGER.log(Level.INFO, + LOGGER.log(Level.FINEST, "Skipping history cache creation of {0} repository in {1} and its subdirectories", new Object[]{type, path}); return; @@ -585,7 +585,7 @@ private void createCache(Repository repository, String sinceRevision) { if (repository.isWorking()) { Statistics elapsed = new Statistics(); - LOGGER.log(Level.INFO, "Creating historycache for {0} ({1}) {2} renamed file handling", + LOGGER.log(Level.FINEST, "Creating historycache for {0} ({1}) {2} renamed file handling", new Object[]{path, type, repository.isHandleRenamedFiles() ? "with" : "without"}); try { @@ -631,7 +631,7 @@ private void createCacheReal(Collection repositories) { } } - LOGGER.log(Level.INFO, "Creating historycache for {0} repositories", + LOGGER.log(Level.FINEST, "Creating historycache for {0} repositories", repos2process.size()); final CountDownLatch latch = new CountDownLatch(repos2process.size()); for (final Map.Entry entry : repos2process.entrySet()) { @@ -711,7 +711,7 @@ public List clearCache(Collection repositories) throws HistoryEx try { cache.clear(r); clearedRepos.add(r.getDirectoryName()); - LOGGER.log(Level.INFO, + LOGGER.log(Level.FINEST, "History cache for {0} cleared.", r.getDirectoryName()); } catch (HistoryException e) { LOGGER.log(Level.WARNING, @@ -924,7 +924,7 @@ public void invalidateRepositories(Collection repos, b Collections.synchronizedMap(new HashMap<>(repos.size())); Statistics elapsed = new Statistics(); - LOGGER.log(Level.FINE, "invalidating {0} repositories", repos.size()); + LOGGER.log(Level.FINEST, "invalidating {0} repositories", repos.size()); /* * getRepository() below does various checks of the repository diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/RazorHistoryParser.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/RazorHistoryParser.java index f1566f24a15..34f6cba1238 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/RazorHistoryParser.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/RazorHistoryParser.java @@ -201,7 +201,7 @@ private void dumpEntry(HistoryEntry entry) { } private void parseDebug(String message) { - LOGGER.log(Level.FINE, "RazorHistoryParser: " + message ); + LOGGER.log(Level.FINEST, "RazorHistoryParser: " + message ); } private void parseProblem(String message) { diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/Repository.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/Repository.java index 5b7bec4841b..dce840c765a 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/Repository.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/Repository.java @@ -368,7 +368,7 @@ final void createCache(HistoryCache cache, String sinceRevision) // this way. Just give up and return. if (!hasHistoryForDirectories()) { LOGGER.log( - Level.INFO, + Level.FINEST, "Skipping creation of history cache for {0}, since retrieval " + "of history for directories is not implemented for this " + "repository type.", getDirectoryName()); diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/SubversionHistoryParser.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/SubversionHistoryParser.java index ec814db93d4..146d1689d82 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/history/SubversionHistoryParser.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/history/SubversionHistoryParser.java @@ -105,7 +105,7 @@ public void endElement(String uri, String localName, String qname) throws SAXExc // so intern them to reduce the memory footprint. entry.addFile(path.intern()); } else { - LOGGER.log(Level.FINER, "Skipping file outside repository: " + s); + LOGGER.log(Level.FINEST, "Skipping file outside repository: " + s); } } else if ("msg".equals(qname)) { entry.setMessage(s); diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/DefaultIndexChangedListener.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/DefaultIndexChangedListener.java index d7cd2fcfbf6..80cde4c49e7 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/DefaultIndexChangedListener.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/DefaultIndexChangedListener.java @@ -48,11 +48,11 @@ public void fileAdd(String path, String analyzer) { @Override public void fileRemove(String path) { - LOGGER.log(Level.FINE, "Remove file:{0}", path); + LOGGER.log(Level.FINEST, "Remove file:{0}", path); } @Override public void fileUpdate(String path) { - LOGGER.log(Level.FINE, "Update: {0}", path); + LOGGER.log(Level.FINEST, "Update: {0}", path); } @Override @@ -64,6 +64,6 @@ public void fileAdded(String path, String analyzer) { @Override public void fileRemoved(String path) { - LOGGER.log(Level.FINER, "Removed file:{0}", path); + LOGGER.log(Level.FINEST, "Removed file:{0}", path); } } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/IndexDatabase.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/IndexDatabase.java index 5bd36c17b06..529383da650 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/IndexDatabase.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/IndexDatabase.java @@ -486,13 +486,13 @@ public void update() throws IOException { IndexDownArgs args = new IndexDownArgs(); Statistics elapsed = new Statistics(); - LOGGER.log(Level.INFO, "Starting traversal of directory {0}", dir); + LOGGER.log(Level.FINER, "Starting traversal of directory {0}", dir); indexDown(sourceRoot, dir, args); showFileCount(dir, args, elapsed); args.cur_count = 0; elapsed = new Statistics(); - LOGGER.log(Level.INFO, "Starting indexing of directory {0}", dir); + LOGGER.log(Level.FINER, "Starting indexing of directory {0}", dir); indexParallel(dir, args); elapsed.report(LOGGER, String.format("Done indexing of directory %s", dir)); @@ -613,7 +613,7 @@ public void optimize() throws IOException { try { Statistics elapsed = new Statistics(); String projectDetail = this.project != null ? " for project " + project.getName() : ""; - LOGGER.log(Level.INFO, "Optimizing the index{0}", projectDetail); + LOGGER.log(Level.FINEST, "Optimizing the index{0}", projectDetail); Analyzer analyzer = new StandardAnalyzer(); IndexWriterConfig conf = new IndexWriterConfig(analyzer); conf.setOpenMode(OpenMode.CREATE_OR_APPEND); @@ -623,7 +623,7 @@ public void optimize() throws IOException { elapsed.report(LOGGER, String.format("Done optimizing index%s", projectDetail)); synchronized (lock) { if (dirtyFile.exists() && !dirtyFile.delete()) { - LOGGER.log(Level.FINE, "Failed to remove \"dirty-file\": {0}", + LOGGER.log(Level.FINEST, "Failed to remove \"dirty-file\": {0}", dirtyFile.getAbsolutePath()); } dirty = false; @@ -664,7 +664,7 @@ private void setDirty() { try { if (!dirty) { if (!dirtyFile.createNewFile() && !dirtyFile.exists()) { - LOGGER.log(Level.FINE, + LOGGER.log(Level.FINEST, "Failed to create \"dirty-file\": {0}", dirtyFile.getAbsolutePath()); } @@ -837,12 +837,12 @@ private boolean accept(File file, String[] outLocalRelPath) { if (!includedNames.isEmpty() && // the filter should not affect directory names (!(file.isDirectory() || includedNames.match(file)))) { - LOGGER.log(Level.FINER, "not including {0}", absolutePath); + LOGGER.log(Level.FINEST, "not including {0}", absolutePath); return false; } if (ignoredNames.ignore(file)) { - LOGGER.log(Level.FINER, "ignoring {0}", absolutePath); + LOGGER.log(Level.FINEST, "ignoring {0}", absolutePath); return false; } @@ -858,7 +858,7 @@ private boolean accept(File file, String[] outLocalRelPath) { if (!absolutePath.equals(canonical.getPath()) && !acceptSymlink(absolute, canonical, outLocalRelPath)) { - LOGGER.log(Level.FINE, "Skipped symlink ''{0}'' -> ''{1}''", + LOGGER.log(Level.FINEST, "Skipped symlink ''{0}'' -> ''{1}''", new Object[]{absolutePath, canonical}); return false; } @@ -889,7 +889,7 @@ private boolean accept(File file, String[] outLocalRelPath) { RuntimeEnvironment env = RuntimeEnvironment.getInstance(); boolean res = !env.isIndexVersionedFilesOnly(); if (!res) { - LOGGER.log(Level.FINER, "not accepting unversioned {0}", + LOGGER.log(Level.FINEST, "not accepting unversioned {0}", absolutePath); } return res; @@ -913,7 +913,7 @@ private boolean accept(File parent, File file, String[] outLocalRelPath) { File f1 = parent.getCanonicalFile(); File f2 = file.getCanonicalFile(); if (f1.equals(f2)) { - LOGGER.log(Level.INFO, "Skipping links to itself...: {0} {1}", + LOGGER.log(Level.FINEST, "Skipping links to itself...: {0} {1}", new Object[]{parent.getAbsolutePath(), file.getAbsolutePath()}); return false; } @@ -922,7 +922,7 @@ private boolean accept(File parent, File file, String[] outLocalRelPath) { File t1 = f1; while ((t1 = t1.getParentFile()) != null) { if (f2.equals(t1)) { - LOGGER.log(Level.INFO, "Skipping links to parent...: {0} {1}", + LOGGER.log(Level.FINEST, "Skipping links to parent...: {0} {1}", new Object[]{parent.getAbsolutePath(), file.getAbsolutePath()}); return false; } @@ -1658,7 +1658,7 @@ private void finishWriting() throws IOException { hasPendingCommit = true; int n = completer.complete(); - LOGGER.log(Level.FINE, "completed {0} object(s)", n); + LOGGER.log(Level.FINEST, "completed {0} object(s)", n); // Just before commit(), reset the `hasPendingCommit' flag, // since after commit() is called, there is no need for @@ -1691,7 +1691,7 @@ private boolean checkSettings(File file, project.getTabSize() : 0; Integer actTabSize = settings.getTabSize(); if (actTabSize != null && !actTabSize.equals(reqTabSize)) { - LOGGER.log(Level.FINE, "Tabsize mismatch: {0}", path); + LOGGER.log(Level.FINEST, "Tabsize mismatch: {0}", path); return false; } @@ -1702,7 +1702,7 @@ private boolean checkSettings(File file, // Read a limited-fields version of the document. Document doc = reader.document(postsIter.docID(), CHECK_FIELDS); if (doc == null) { - LOGGER.log(Level.FINER, "No Document: {0}", path); + LOGGER.log(Level.FINEST, "No Document: {0}", path); continue; } @@ -1738,7 +1738,7 @@ private boolean checkSettings(File file, * selection of analyzer or return a value to indicate the * analyzer is now mis-matched. */ - LOGGER.log(Level.FINER, "Guru version mismatch: {0}", path); + LOGGER.log(Level.FINEST, "Guru version mismatch: {0}", path); fa = getAnalyzerFor(file, path); fileTypeName = fa.getFileTypeName(); @@ -1771,7 +1771,7 @@ private boolean checkSettings(File file, break; } if (n < 1) { - LOGGER.log(Level.FINER, "Missing index Documents: {0}", path); + LOGGER.log(Level.FINEST, "Missing index Documents: {0}", path); return false; } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/IndexVersion.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/IndexVersion.java index 6a52de5a40c..8692a878d6f 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/IndexVersion.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/IndexVersion.java @@ -66,13 +66,13 @@ public IndexVersionException(String s) { public static void check(List subFilesList) throws Exception { RuntimeEnvironment env = RuntimeEnvironment.getInstance(); File indexRoot = new File(env.getDataRootPath(), IndexDatabase.INDEX_DIR); - LOGGER.log(Level.FINE, "Checking for Lucene index version mismatch in {0}", + LOGGER.log(Level.FINEST, "Checking for Lucene index version mismatch in {0}", indexRoot); if (!subFilesList.isEmpty()) { // Assumes projects are enabled. for (String projectName : subFilesList) { - LOGGER.log(Level.FINER, + LOGGER.log(Level.FINEST, "Checking Lucene index version in project {0}", projectName); checkDir(new File(indexRoot, projectName)); @@ -80,13 +80,13 @@ public static void check(List subFilesList) throws Exception { } else { if (env.isProjectsEnabled()) { for (String projectName : env.getProjects().keySet()) { - LOGGER.log(Level.FINER, + LOGGER.log(Level.FINEST, "Checking Lucene index version in project {0}", projectName); checkDir(new File(indexRoot, projectName)); } } else { - LOGGER.log(Level.FINER, "Checking Lucene index version in {0}", + LOGGER.log(Level.FINEST, "Checking Lucene index version in {0}", indexRoot); checkDir(indexRoot); } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/Indexer.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/Indexer.java index 51ec969b05b..b23d12586a4 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/Indexer.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/Indexer.java @@ -312,7 +312,7 @@ public static void main(String[] argv) { } } - LOGGER.log(Level.INFO, "Indexer version {0} ({1})", + LOGGER.log(Level.FINEST, "Indexer version {0} ({1})", new Object[]{Info.getVersion(), Info.getRevision()}); // Create history cache first. @@ -886,9 +886,9 @@ private static void configureFileAnalyzer(String fileSpec, String analyzer) { */ public static void writeConfigToFile(RuntimeEnvironment env, String filename) throws IOException { if (filename != null) { - LOGGER.log(Level.INFO, "Writing configuration to {0}", filename); + LOGGER.log(Level.FINEST, "Writing configuration to {0}", filename); env.writeConfiguration(new File(filename)); - LOGGER.info("Done..."); + LOGGER.finest("Done..."); } } @@ -982,7 +982,7 @@ public void prepareIndexer(RuntimeEnvironment env, } if (searchRepositories) { - LOGGER.log(Level.INFO, "Scanning for repositories..."); + LOGGER.log(Level.FINEST, "Scanning for repositories..."); Statistics stats = new Statistics(); env.setRepositories(env.getSourceRootPath()); stats.report(LOGGER, String.format("Done scanning for repositories, found %d repositories", @@ -992,14 +992,14 @@ public void prepareIndexer(RuntimeEnvironment env, if (createHistoryCache) { // Even if history is disabled globally, it can be enabled for some repositories. if (repositories != null && !repositories.isEmpty()) { - LOGGER.log(Level.INFO, "Generating history cache for repositories: " + + LOGGER.log(Level.FINEST, "Generating history cache for repositories: " + repositories.stream().collect(Collectors.joining(","))); HistoryGuru.getInstance().createCache(repositories); - LOGGER.info("Done..."); + LOGGER.finest("Done..."); } else { - LOGGER.log(Level.INFO, "Generating history cache for all repositories ..."); + LOGGER.log(Level.FINEST, "Generating history cache for all repositories ..."); HistoryGuru.getInstance().createCache(); - LOGGER.info("Done..."); + LOGGER.finest("Done..."); } } @@ -1024,7 +1024,7 @@ public void doIndexerExecution(final boolean update, List subFiles, throws IOException { Statistics elapsed = new Statistics(); RuntimeEnvironment env = RuntimeEnvironment.getInstance(); - LOGGER.info("Starting indexing"); + LOGGER.finest("Starting indexing"); IndexerParallelizer parallelizer = env.getIndexerParallelizer(); final CountDownLatch latch; @@ -1092,7 +1092,7 @@ public void run() { // Wait forever for the executors to finish. try { - LOGGER.info("Waiting for the executors to finish"); + LOGGER.finest("Waiting for the executors to finish"); latch.await(999, TimeUnit.DAYS); } catch (InterruptedException exp) { LOGGER.log(Level.WARNING, "Received interrupt while waiting" + @@ -1102,13 +1102,13 @@ public void run() { } public void refreshSearcherManagers(RuntimeEnvironment env, List projects, String host) { - LOGGER.log(Level.INFO, "Refreshing searcher managers to: {0}", host); + LOGGER.log(Level.FINEST, "Refreshing searcher managers to: {0}", host); env.signalTorefreshSearcherManagers(projects, host); } public void sendToConfigHost(RuntimeEnvironment env, String host) { - LOGGER.log(Level.INFO, "Sending configuration to: {0}", host); + LOGGER.log(Level.FINEST, "Sending configuration to: {0}", host); try { env.writeConfiguration(host); } catch (IOException ex) { @@ -1116,7 +1116,7 @@ public void sendToConfigHost(RuntimeEnvironment env, String host) { "Failed to send configuration to %s " + "(is web application server running with opengrok deployed?)", host), ex); } - LOGGER.info("Configuration update routine done, check log output for errors."); + LOGGER.finest("Configuration update routine done, check log output for errors."); } private static void pauseToAwaitProfiler() { diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/PendingFileCompleter.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/PendingFileCompleter.java index 284894cf00a..2ad2f8b433a 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/index/PendingFileCompleter.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/index/PendingFileCompleter.java @@ -167,11 +167,11 @@ public boolean add(PendingFileRenaming e) { */ public int complete() throws IOException { int numDeletions = completeDeletions(); - LOGGER.log(Level.FINE, "deleted {0} file(s)", numDeletions); + LOGGER.log(Level.FINEST, "deleted {0} file(s)", numDeletions); int numRenamings = completeRenamings(); - LOGGER.log(Level.FINE, "renamed {0} file(s)", numRenamings); + LOGGER.log(Level.FINEST, "renamed {0} file(s)", numRenamings); int numLinkages = completeLinkages(); - LOGGER.log(Level.FINE, "affirmed links for {0} path(s)", numLinkages); + LOGGER.log(Level.FINEST, "affirmed links for {0} path(s)", numLinkages); return numDeletions + numRenamings + numLinkages; } @@ -331,7 +331,7 @@ private void doDelete(PendingFileDeletionExec del) throws IOException { private void doDelete(File f) { if (f.delete()) { - LOGGER.log(Level.FINER, "Deleted obsolete file: {0}", f.getPath()); + LOGGER.log(Level.FINEST, "Deleted obsolete file: {0}", f.getPath()); } else if (f.exists()) { LOGGER.log(Level.WARNING, "Failed to delete obsolete file: {0}", f.getPath()); @@ -471,7 +471,7 @@ private void tryDeleteParents(List dels) { private void tryDeleteDirectory(File dir) { if (dir.delete()) { - LOGGER.log(Level.FINE, "Removed empty parent dir: {0}", + LOGGER.log(Level.FINEST, "Removed empty parent dir: {0}", dir.getAbsolutePath()); } } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/search/context/OGKUnifiedHighlighter.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/search/context/OGKUnifiedHighlighter.java index e8161de6d43..4cf7a884c24 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/search/context/OGKUnifiedHighlighter.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/search/context/OGKUnifiedHighlighter.java @@ -285,7 +285,7 @@ private String getRepoFileContent(String repoRelPath, String storedU) throws IOException { if (storedU == null) { - LOGGER.log(Level.FINE, "Missing U[UID] for: {0}", + LOGGER.log(Level.FINEST, "Missing U[UID] for: {0}", repoRelPath); return null; } @@ -293,7 +293,7 @@ private String getRepoFileContent(String repoRelPath, String storedU) String repoAbsPath = env.getSourceRootPath() + repoRelPath; File repoAbsFile = new File(repoAbsPath); if (!repoAbsFile.exists()) { - LOGGER.log(Level.FINE, "Missing file: {0}", repoAbsPath); + LOGGER.log(Level.FINEST, "Missing file: {0}", repoAbsPath); return null; } @@ -305,7 +305,7 @@ private String getRepoFileContent(String repoRelPath, String storedU) BytesRef buid = new BytesRef(uid); BytesRef storedBuid = new BytesRef(storedU); if (storedBuid.compareTo(buid) != 0) { - LOGGER.log(Level.FINE, "Last-modified differs for: {0}", + LOGGER.log(Level.FINEST, "Last-modified differs for: {0}", repoRelPath); return null; } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/util/CtagsUtil.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/util/CtagsUtil.java index 11734dd3075..459ddca79e1 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/util/CtagsUtil.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/util/CtagsUtil.java @@ -49,7 +49,7 @@ public static boolean validate(String ctagsBinary) { return false; } - LOGGER.log(Level.INFO, "Using ctags: {0}", output.trim()); + LOGGER.log(Level.FINEST, "Using ctags: {0}", output.trim()); return true; } diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/util/Executor.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/util/Executor.java index ce3a64bc4ae..28df72bfbce 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/util/Executor.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/util/Executor.java @@ -165,7 +165,7 @@ public int exec(final boolean reportExceptions, StreamHandler handler) { dir_str = cwd.toString(); } - LOGGER.log(Level.FINE, + LOGGER.log(Level.FINEST, "Executing command {0} in directory {1}", new Object[] {cmd_str,dir_str}); @@ -217,7 +217,7 @@ public void run() { ret = process.waitFor(); - LOGGER.log(Level.FINE, + LOGGER.log(Level.FINEST, "Finished command {0} in directory {1}", new Object[] {cmd_str,dir_str}); @@ -383,7 +383,7 @@ public static void registerErrorHandler() { UncaughtExceptionHandler dueh = Thread.getDefaultUncaughtExceptionHandler(); if (dueh == null) { - LOGGER.log(Level.FINE, "Installing default uncaught exception handler"); + LOGGER.log(Level.FINEST, "Installing default uncaught exception handler"); Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { diff --git a/opengrok-indexer/src/main/java/org/opengrok/indexer/util/Statistics.java b/opengrok-indexer/src/main/java/org/opengrok/indexer/util/Statistics.java index 9969a4cff12..b1efaa1f0ee 100644 --- a/opengrok-indexer/src/main/java/org/opengrok/indexer/util/Statistics.java +++ b/opengrok-indexer/src/main/java/org/opengrok/indexer/util/Statistics.java @@ -38,17 +38,17 @@ public Statistics() { public void report(Logger log, String msg) { long stopTime = System.currentTimeMillis(); String time_str = StringUtils.getReadableTime(stopTime - startTime); - log.log(Level.INFO, msg + " (took {0})", time_str); + log.log(Level.FINEST, msg + " (took {0})", time_str); } public void report(Logger log) { long stopTime = System.currentTimeMillis() - startTime; - log.log(Level.INFO, "Total time: {0}", getReadableTime(stopTime)); + log.log(Level.FINEST, "Total time: {0}", getReadableTime(stopTime)); System.gc(); Runtime r = Runtime.getRuntime(); long mb = 1024L * 1024; - log.log(Level.INFO, "Final Memory: {0}M/{1}M", + log.log(Level.FINEST, "Final Memory: {0}M/{1}M", new Object[]{(r.totalMemory() - r.freeMemory()) / mb, r.totalMemory() / mb}); } diff --git a/opengrok-web/src/main/java/org/opengrok/web/AuthorizationFilter.java b/opengrok-web/src/main/java/org/opengrok/web/AuthorizationFilter.java index 927ca59170a..5fdf8eac384 100644 --- a/opengrok-web/src/main/java/org/opengrok/web/AuthorizationFilter.java +++ b/opengrok-web/src/main/java/org/opengrok/web/AuthorizationFilter.java @@ -68,11 +68,11 @@ public void doFilter(ServletRequest sr, ServletResponse sr1, FilterChain fc) thr Project p = config.getProject(); if (p != null && !config.isAllowed(p)) { if (httpReq.getRemoteUser() != null) { - LOGGER.log(Level.INFO, "Access denied for user ''{0}'' for URI: {1}", + LOGGER.log(Level.FINER, "Access denied for user ''{0}'' for URI: {1}", new Object[]{httpReq.getRemoteUser(), httpReq.getRequestURI()}); } else { - LOGGER.log(Level.INFO, "Access denied for URI: {0}", httpReq.getRequestURI()); + LOGGER.log(Level.FINER, "Access denied for URI: {0}", httpReq.getRequestURI()); } /* diff --git a/opengrok-web/src/main/java/org/opengrok/web/WebappListener.java b/opengrok-web/src/main/java/org/opengrok/web/WebappListener.java index 39714e532b6..9582b4f64f4 100644 --- a/opengrok-web/src/main/java/org/opengrok/web/WebappListener.java +++ b/opengrok-web/src/main/java/org/opengrok/web/WebappListener.java @@ -63,7 +63,7 @@ public void contextInitialized(final ServletContextEvent servletContextEvent) { ServletContext context = servletContextEvent.getServletContext(); RuntimeEnvironment env = RuntimeEnvironment.getInstance(); - LOGGER.log(Level.INFO, "Starting webapp with version {0} ({1})", + LOGGER.log(Level.FINEST, "Starting webapp with version {0} ({1})", new Object[]{Info.getVersion(), Info.getRevision()}); String config = context.getInitParameter("CONFIGURATION"); diff --git a/opengrok-web/src/main/java/org/opengrok/web/api/v1/controller/ProjectsController.java b/opengrok-web/src/main/java/org/opengrok/web/api/v1/controller/ProjectsController.java index f5d21ac5192..1ec5a6891c1 100644 --- a/opengrok-web/src/main/java/org/opengrok/web/api/v1/controller/ProjectsController.java +++ b/opengrok-web/src/main/java/org/opengrok/web/api/v1/controller/ProjectsController.java @@ -204,7 +204,7 @@ public void deleteProjectData(@PathParam("project") String projectName) throws H public void deleteHistoryCache(@PathParam("project") String projectName) throws HistoryException { Project project = disableProject(projectName); - logger.log(Level.INFO, "deleting history cache for project {0}", projectName); + logger.log(Level.FINER, "deleting history cache for project {0}", projectName); List repos = env.getProjectRepositoriesMap().get(project); diff --git a/opengrok-web/src/main/java/org/opengrok/web/api/v1/controller/SuggesterController.java b/opengrok-web/src/main/java/org/opengrok/web/api/v1/controller/SuggesterController.java index 22b81cd2477..bfebf0b49aa 100644 --- a/opengrok-web/src/main/java/org/opengrok/web/api/v1/controller/SuggesterController.java +++ b/opengrok-web/src/main/java/org/opengrok/web/api/v1/controller/SuggesterController.java @@ -112,7 +112,7 @@ public Result getSuggestions(@Valid @BeanParam final SuggesterQueryData data) th modifyDataBasedOnConfiguration(suggesterData, config); if (!satisfiesConfiguration(suggesterData, config)) { - logger.log(Level.FINER, "Suggester request with data {0} does not satisfy configuration settings", data); + logger.log(Level.FINEST, "Suggester request with data {0} does not satisfy configuration settings", data); throw new WebApplicationException(Response.Status.NOT_FOUND); } diff --git a/opengrok-web/src/main/java/org/opengrok/web/api/v1/suggester/provider/service/impl/SuggesterServiceImpl.java b/opengrok-web/src/main/java/org/opengrok/web/api/v1/suggester/provider/service/impl/SuggesterServiceImpl.java index c0ddacb040d..a54c324667c 100644 --- a/opengrok-web/src/main/java/org/opengrok/web/api/v1/suggester/provider/service/impl/SuggesterServiceImpl.java +++ b/opengrok-web/src/main/java/org/opengrok/web/api/v1/suggester/provider/service/impl/SuggesterServiceImpl.java @@ -156,7 +156,7 @@ private List getNamedIndexReaders( /** {@inheritDoc} */ @Override public void refresh() { - logger.log(Level.FINE, "Refreshing suggester for new configuration {0}", env.getSuggesterConfig()); + logger.log(Level.FINEST, "Refreshing suggester for new configuration {0}", env.getSuggesterConfig()); lock.writeLock().lock(); try { // close and init from scratch because many things may have changed in the configuration @@ -178,7 +178,7 @@ public void rebuild() { lock.readLock().lock(); try { if (suggester == null) { - logger.log(Level.FINE, "Cannot perform rebuild because suggester is not initialized"); + logger.log(Level.FINEST, "Cannot perform rebuild because suggester is not initialized"); return; } suggester.rebuild(getAllProjectIndexDirs()); @@ -203,7 +203,7 @@ public void rebuild(final String project) { lock.readLock().lock(); try { if (suggester == null) { - logger.log(Level.FINE, "Cannot rebuild {0} because suggester is not initialized", project); + logger.log(Level.FINEST, "Cannot rebuild {0} because suggester is not initialized", project); return; } suggester.rebuild(Collections.singleton(getNamedIndexDir(p))); @@ -223,7 +223,7 @@ public void delete(final String project) { lock.readLock().lock(); try { if (suggester == null) { - logger.log(Level.FINE, "Cannot remove {0} because suggester is not initialized", project); + logger.log(Level.FINEST, "Cannot remove {0} because suggester is not initialized", project); return; } suggester.remove(Collections.singleton(project)); @@ -272,7 +272,7 @@ public List> getPopularityData( lock.readLock().lock(); try { if (suggester == null) { - logger.log(Level.FINE, "Cannot retrieve popularity data because suggester is not initialized"); + logger.log(Level.FINEST, "Cannot retrieve popularity data because suggester is not initialized"); return Collections.emptyList(); } return suggester.getSearchCounts(project, field, page, pageSize); @@ -284,7 +284,7 @@ public List> getPopularityData( private void initSuggester() { SuggesterConfig suggesterConfig = env.getSuggesterConfig(); if (!suggesterConfig.isEnabled()) { - logger.log(Level.INFO, "Suggester disabled"); + logger.log(Level.FINEST, "Suggester disabled"); return; } @@ -293,7 +293,7 @@ private void initSuggester() { if (rebuildParalleismLevel == 0) { rebuildParalleismLevel = 1; } - logger.log(Level.FINER, "Suggester rebuild parallelism level: " + rebuildParalleismLevel); + logger.log(Level.FINEST, "Suggester rebuild parallelism level: " + rebuildParalleismLevel); suggester = new Suggester(suggesterDir, suggesterConfig.getMaxResults(), Duration.ofSeconds(suggesterConfig.getBuildTerminationTime()), @@ -340,17 +340,17 @@ private void scheduleRebuild() { cancelScheduledRebuild(); if (!env.getSuggesterConfig().isAllowMostPopular()) { // no need to rebuild - logger.log(Level.INFO, "Suggester rebuild not scheduled"); + logger.log(Level.FINEST, "Suggester rebuild not scheduled"); return; } Duration timeToNextRebuild = getTimeToNextRebuild(); if (timeToNextRebuild == null) { - logger.log(Level.INFO, "Suggester rebuild not scheduled"); + logger.log(Level.FINEST, "Suggester rebuild not scheduled"); return; } - logger.log(Level.INFO, "Scheduling suggester rebuild in {0}", timeToNextRebuild); + logger.log(Level.FINEST, "Scheduling suggester rebuild in {0}", timeToNextRebuild); future = instance.scheduler.schedule(instance.getRebuildAllProjectsRunnable(), timeToNextRebuild.toMillis(), TimeUnit.MILLISECONDS); diff --git a/plugins/src/opengrok/auth/plugin/LdapFilterPlugin.java b/plugins/src/opengrok/auth/plugin/LdapFilterPlugin.java index cc8befc8258..feb633813a8 100644 --- a/plugins/src/opengrok/auth/plugin/LdapFilterPlugin.java +++ b/plugins/src/opengrok/auth/plugin/LdapFilterPlugin.java @@ -62,7 +62,7 @@ public void load(Map parameters) { if ((ldapFilter = (String) parameters.get(FILTER_PARAM)) == null) { throw new NullPointerException("Missing param [" + FILTER_PARAM + "] in the setup"); } - LOGGER.log(Level.FINE, "LdapFilter plugin loaded"); + LOGGER.log(Level.FINEST, "LdapFilter plugin loaded"); } @Override diff --git a/plugins/src/opengrok/auth/plugin/LdapUserPlugin.java b/plugins/src/opengrok/auth/plugin/LdapUserPlugin.java index 2e40e1fc099..eb5e663f1eb 100644 --- a/plugins/src/opengrok/auth/plugin/LdapUserPlugin.java +++ b/plugins/src/opengrok/auth/plugin/LdapUserPlugin.java @@ -85,7 +85,7 @@ public void load(Map parameters) { } attributes = attributesVal.split(","); - LOGGER.log(Level.FINE, "LdapUser plugin loaded with objectclass={0}, " + + LOGGER.log(Level.FINEST, "LdapUser plugin loaded with objectclass={0}, " + "attributes={1}", new Object[]{objectClass, String.join(", ", attributes)}); } @@ -109,7 +109,7 @@ protected String getFilter(User user) { Matcher matcher = usernameCnPattern.matcher(user.getUsername()); if (matcher.find()) { commonName = matcher.group(1); - LOGGER.log(Level.FINEST, "extracted common name {0} from {1}", + LOGGER.log(Level.FINER, "extracted common name {0} from {1}", new Object[]{commonName, user.getUsername()}); } else { throw new AuthorizationException(String.format("cannot get common name out of %s", diff --git a/plugins/src/opengrok/auth/plugin/UserPlugin.java b/plugins/src/opengrok/auth/plugin/UserPlugin.java index 2592a54095e..a5139700334 100644 --- a/plugins/src/opengrok/auth/plugin/UserPlugin.java +++ b/plugins/src/opengrok/auth/plugin/UserPlugin.java @@ -74,7 +74,7 @@ public void load(Map parameters) { DECODER_CLASS_PARAM, UserPlugin.class.getName())); } - LOGGER.log(Level.INFO, "loading decoder: {0}", decoder_name); + LOGGER.log(Level.FINEST, "loading decoder: {0}", decoder_name); try { decoder = getDecoder(decoder_name); } catch (ClassNotFoundException|NoSuchMethodException|IllegalAccessException| diff --git a/plugins/src/opengrok/auth/plugin/ldap/LdapServer.java b/plugins/src/opengrok/auth/plugin/ldap/LdapServer.java index d0ee8204391..2bae6371726 100644 --- a/plugins/src/opengrok/auth/plugin/ldap/LdapServer.java +++ b/plugins/src/opengrok/auth/plugin/ldap/LdapServer.java @@ -134,10 +134,10 @@ public synchronized boolean isWorking() { * @return the new connection or null */ private synchronized LdapContext connect() { - LOGGER.log(Level.INFO, "Server {0} connecting", this.url); + LOGGER.log(Level.FINER, "Server {0} connecting", this.url); if (errorTimestamp > 0 && errorTimestamp + interval > System.currentTimeMillis()) { - LOGGER.log(Level.INFO, "LDAP server {0} is down", this.url); + LOGGER.log(Level.FINER, "LDAP server {0} is down", this.url); close(); return null; } @@ -159,7 +159,7 @@ private synchronized LdapContext connect() { ctx = new InitialLdapContext(env, null); ctx.reconnect(null); ctx.setRequestControls(null); - LOGGER.log(Level.INFO, "Connected to LDAP server {0}", env.get(Context.PROVIDER_URL)); + LOGGER.log(Level.FINER, "Connected to LDAP server {0}", env.get(Context.PROVIDER_URL)); errorTimestamp = 0; } catch (NamingException ex) { LOGGER.log(Level.INFO, "LDAP server {0} is not responding", env.get(Context.PROVIDER_URL)); @@ -207,7 +207,7 @@ public NamingEnumeration search(String name, String filter, Search } if (reconnected) { - LOGGER.log(Level.INFO, "LDAP server {0} reconnect", env.get(Context.PROVIDER_URL)); + LOGGER.log(Level.FINEST, "LDAP server {0} reconnect", env.get(Context.PROVIDER_URL)); close(); if ((ctx = connect()) == null) { throw new CommunicationException(String.format("LDAP server \"%s\" cannot reconnect", diff --git a/plugins/src/opengrok/auth/plugin/util/RestfulClient.java b/plugins/src/opengrok/auth/plugin/util/RestfulClient.java index 25a34942a34..9f69687c71a 100644 --- a/plugins/src/opengrok/auth/plugin/util/RestfulClient.java +++ b/plugins/src/opengrok/auth/plugin/util/RestfulClient.java @@ -52,7 +52,7 @@ public static int postIt(String URI, String input) { try { Client client = ClientBuilder.newClient(); - LOGGER.log(Level.FINEST, "sending REST POST request to {0}: {1}", + LOGGER.log(Level.FINE, "sending REST POST request to {0}: {1}", new Object[]{URI, input}); Response response = client.target(URI) .request(MediaType.APPLICATION_JSON) diff --git a/plugins/test/opengrok/auth/plugin/UserPluginTest.java b/plugins/test/opengrok/auth/plugin/UserPluginTest.java index 70e0ab3e8e3..985e6f5f9cf 100644 --- a/plugins/test/opengrok/auth/plugin/UserPluginTest.java +++ b/plugins/test/opengrok/auth/plugin/UserPluginTest.java @@ -56,7 +56,7 @@ public void testNoUser() { @Test public void testUser() { - HttpServletRequest req; + HttpServletRequest req = null; Assert.assertTrue(plugin.isAllowed(req = createRequest("007"), new Group())); Assert.assertEquals("007", ((User) req.getAttribute(UserPlugin.REQUEST_ATTR)).getUsername()); Assert.assertTrue(plugin.isAllowed(req = createRequest("008"), new Project())); @@ -69,7 +69,7 @@ public void testUser() { @Test public void testTimeoutedUser() { - HttpServletRequest req; + HttpServletRequest req = null; Assert.assertFalse(plugin.isAllowed(req = createRequest("007", true), new Group())); Assert.assertNull(req.getAttribute(UserPlugin.REQUEST_ATTR)); Assert.assertFalse(plugin.isAllowed(req = createRequest("008", true), new Project())); diff --git a/suggester/src/main/java/org/opengrok/suggest/Suggester.java b/suggester/src/main/java/org/opengrok/suggest/Suggester.java index 3ed2c93ca8b..8b816e3bf75 100644 --- a/suggester/src/main/java/org/opengrok/suggest/Suggester.java +++ b/suggester/src/main/java/org/opengrok/suggest/Suggester.java @@ -140,7 +140,7 @@ public Suggester( */ public void init(final Collection luceneIndexes) { if (luceneIndexes == null || luceneIndexes.isEmpty()) { - logger.log(Level.INFO, "No index directories found, exiting..."); + logger.log(Level.FINEST, "No index directories found, exiting..."); return; } if (!projectsEnabled && luceneIndexes.size() > 1) { @@ -148,7 +148,7 @@ public void init(final Collection luceneIndexes) { } synchronized (lock) { - logger.log(Level.INFO, "Initializing suggester"); + logger.log(Level.FINEST, "Initializing suggester"); ExecutorService executor = Executors.newWorkStealingPool(rebuildParallelismLevel); @@ -165,7 +165,7 @@ private void submitInitIfIndexExists(final ExecutorService executorService, fina if (indexExists(indexDir.path)) { executorService.submit(getInitRunnable(indexDir)); } else { - logger.log(Level.FINE, "Index in {0} directory does not exist, skipping...", indexDir); + logger.log(Level.FINEST, "Index in {0} directory does not exist, skipping...", indexDir); } } catch (IOException e) { logger.log(Level.WARNING, "Could not check if index exists", e); @@ -176,7 +176,7 @@ private Runnable getInitRunnable(final NamedIndexDir indexDir) { return () -> { try { Instant start = Instant.now(); - logger.log(Level.FINE, "Initializing {0}", indexDir); + logger.log(Level.FINEST, "Initializing {0}", indexDir); SuggesterProjectData wfst = new SuggesterProjectData(FSDirectory.open(indexDir.path), getSuggesterDir(indexDir.name), allowMostPopular, allowedFields); @@ -188,7 +188,7 @@ private Runnable getInitRunnable(final NamedIndexDir indexDir) { } Duration d = Duration.between(start, Instant.now()); - logger.log(Level.FINE, "Finished initialization of {0}, took {1}", new Object[] {indexDir, d}); + logger.log(Level.FINEST, "Finished initialization of {0}, took {1}", new Object[] {indexDir, d}); } catch (Exception e) { logger.log(Level.SEVERE, "Could not initialize suggester data for " + indexDir, e); } @@ -213,7 +213,7 @@ private void shutdownAndAwaitTermination(final ExecutorService executorService, executorService.shutdown(); try { executorService.awaitTermination(awaitTerminationTime.toMillis(), TimeUnit.MILLISECONDS); - logger.log(Level.INFO, logMessageOnSuccess); + logger.log(Level.FINEST, logMessageOnSuccess); } catch (InterruptedException e) { logger.log(Level.SEVERE, "Interrupted while building suggesters", e); Thread.currentThread().interrupt(); @@ -226,12 +226,12 @@ private void shutdownAndAwaitTermination(final ExecutorService executorService, */ public void rebuild(final Collection indexDirs) { if (indexDirs == null || indexDirs.isEmpty()) { - logger.log(Level.INFO, "Not rebuilding suggester data because no index directories were specified"); + logger.log(Level.FINEST, "Not rebuilding suggester data because no index directories were specified"); return; } synchronized (lock) { - logger.log(Level.INFO, "Rebuilding the following suggesters: {0}", indexDirs); + logger.log(Level.FINEST, "Rebuilding the following suggesters: {0}", indexDirs); ExecutorService executor = Executors.newWorkStealingPool(rebuildParallelismLevel); @@ -252,11 +252,11 @@ private Runnable getRebuildRunnable(final SuggesterProjectData data) { return () -> { try { Instant start = Instant.now(); - logger.log(Level.FINE, "Rebuilding {0}", data); + logger.log(Level.FINEST, "Rebuilding {0}", data); data.rebuild(); Duration d = Duration.between(start, Instant.now()); - logger.log(Level.FINE, "Rebuild of {0} finished, took {1}", new Object[] {data, d}); + logger.log(Level.FINEST, "Rebuild of {0} finished, took {1}", new Object[] {data, d}); } catch (Exception e) { logger.log(Level.SEVERE, "Could not rebuild suggester", e); } @@ -273,7 +273,7 @@ public void remove(final Iterable names) { } synchronized (lock) { - logger.log(Level.INFO, "Removing following suggesters: {0}", names); + logger.log(Level.FINEST, "Removing following suggesters: {0}", names); for (String suggesterName : names) { SuggesterProjectData collection = projectData.get(suggesterName); @@ -329,7 +329,7 @@ private Suggestions prefixLookup( List results = readers.parallelStream().flatMap(namedIndexReader -> { SuggesterProjectData data = projectData.get(namedIndexReader.name); if (data == null) { - logger.log(Level.FINE, "{0} not yet initialized", namedIndexReader.name); + logger.log(Level.FINER, "{0} not yet initialized", namedIndexReader.name); partialResult.value = true; return Stream.empty(); } @@ -497,7 +497,7 @@ public List> getSearchCounts( ) { SuggesterProjectData data = projectData.get(project); if (data == null) { - logger.log(Level.FINE, "Cannot retrieve search counts because suggester data for project {0} was not found", + logger.log(Level.FINEST, "Cannot retrieve search counts because suggester data for project {0} was not found", project); return Collections.emptyList(); } @@ -549,7 +549,7 @@ public Void call() { SuggesterProjectData data = projectData.get(namedIndexReader.name); if (data == null) { - logger.log(Level.FINE, "{0} not yet initialized", namedIndexReader.name); + logger.log(Level.FINEST, "{0} not yet initialized", namedIndexReader.name); return null; } boolean gotLock = data.tryLock(); diff --git a/suggester/src/main/java/org/opengrok/suggest/SuggesterProjectData.java b/suggester/src/main/java/org/opengrok/suggest/SuggesterProjectData.java index e34f2642744..48041cd8103 100644 --- a/suggester/src/main/java/org/opengrok/suggest/SuggesterProjectData.java +++ b/suggester/src/main/java/org/opengrok/suggest/SuggesterProjectData.java @@ -192,7 +192,7 @@ private void loadStoredWFSTs() throws IOException { WFSTCompletionLookup WFST = loadStoredWFST(WFSTfile); lookups.put(field, WFST); } else { - logger.log(Level.INFO, "Missing WFST file for {0} field in {1}, creating a new one", + logger.log(Level.FINEST, "Missing WFST file for {0} field in {1}, creating a new one", new Object[] {field, suggesterDir}); WFSTCompletionLookup lookup = build(indexReader, field); @@ -334,7 +334,7 @@ private double getAverageLength(final String field) { if (averageLengths.containsKey(field)) { return averageLengths.get(field); } - logger.log(Level.FINE, "Could not determine average length for field {0}, using default one", field); + logger.log(Level.FINEST, "Could not determine average length for field {0}, using default one", field); return AVERAGE_LENGTH_DEFAULT; } @@ -513,7 +513,7 @@ public List> getSearchCountsSorted(final String field, try { PopularityMap map = searchCountMaps.get(field); if (map == null) { - logger.log(Level.FINE, "No search count map initialized for field {0}", field); + logger.log(Level.FINEST, "No search count map initialized for field {0}", field); return Collections.emptyList(); }