diff --git a/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java index a1bb70e36ab9..95e6811add67 100644 --- a/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java @@ -65,7 +65,6 @@ import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA; import static org.apache.solr.common.params.CommonAdminParams.ASYNC; import static org.apache.solr.common.params.CommonParams.NAME; -import static org.apache.solr.common.util.StrUtils.formatString; public class CreateCollectionCmd implements Cmd { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -200,15 +199,15 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList resul Map requestMap = new HashMap<>(); - log.debug(formatString("Creating SolrCores for new collection {0}, shardNames {1} , replicationFactor : {2}", - collectionName, shardNames, repFactor)); + log.debug("Creating SolrCores for new collection {}, shardNames {} , replicationFactor : {}", + collectionName, shardNames, repFactor); Map coresToCreate = new LinkedHashMap<>(); for (Map.Entry e : positionVsNodes.entrySet()) { ReplicaAssigner.Position position = e.getKey(); String nodeName = e.getValue(); String coreName = collectionName + "_" + position.shard + "_replica" + (position.index + 1); - log.debug(formatString("Creating core {0} as part of shard {1} of collection {2} on {3}" - , coreName, position.shard, collectionName, nodeName)); + log.debug("Creating core {} as part of shard {} of collection {} on {}" + , coreName, position.shard, collectionName, nodeName); String baseUrl = zkStateReader.getBaseUrlForNodeName(nodeName); @@ -307,12 +306,12 @@ String getConfigName(String coll, ZkNodeProps message) throws KeeperException, I } public static void createCollectionZkNode(SolrZkClient zkClient, String collection, Map params) { - log.debug("Check for collection zkNode:" + collection); + log.debug("Check for collection zkNode: {}", collection); String collectionPath = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection; try { if (!zkClient.exists(collectionPath, true)) { - log.debug("Creating collection in ZooKeeper:" + collection); + log.debug("Creating collection in ZooKeeper: {}", collection); try { Map collectionProps = new HashMap<>(); diff --git a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java index e7ac5e5fd161..9cb73e7b6fee 100644 --- a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java +++ b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java @@ -301,7 +301,7 @@ TreeSet fetchZkChildren(Watcher watcher) throws InterruptedException, Ke for (String childName : childNames) { // Check format if (!childName.regionMatches(0, PREFIX, 0, PREFIX.length())) { - LOG.debug("Found child node with improper name: " + childName); + LOG.debug("Found child node with improper name: {}", childName); continue; } orderedChildren.add(childName); diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java index bdbeca9d568c..e1dac3b41791 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java +++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java @@ -462,7 +462,9 @@ void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStart) throws Kee try (SolrCore core = cc.getCore(coreName)) { if (core == null) { - log.debug("SolrCore not found:" + coreName + " in " + cc.getLoadedCoreNames()); + if (log.isDebugEnabled()) { + log.debug("SolrCore not found:" + coreName + " in " + cc.getLoadedCoreNames()); + } return; } @@ -720,9 +722,11 @@ private boolean shouldIBeLeader(ZkNodeProps leaderProps, SolrCore core, boolean log.debug("My last published State was Active, it's okay to be the leader."); return true; } - log.debug("My last published State was " - + core.getCoreDescriptor().getCloudDescriptor().getLastPublished() - + ", I won't be the leader."); + if (log.isDebugEnabled()) { + log.debug("My last published State was " + + core.getCoreDescriptor().getCloudDescriptor().getLastPublished() + + ", I won't be the leader."); + } // TODO: and if no one is a good candidate? return false; diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java b/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java index ea09eeff2137..db8b381ec74c 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java @@ -123,7 +123,9 @@ public void run() { this.thread = Thread.currentThread(); while (!this.isClosed) { // work loop - log.debug("do " + this.getClass().getSimpleName() + " work loop"); + if (log.isDebugEnabled()) { + log.debug("do " + this.getClass().getSimpleName() + " work loop"); + } // every n, look at state and make add / remove calls @@ -165,7 +167,8 @@ private void doWork() { lastClusterStateVersion = clusterState.getZkClusterStateVersion(); Map collections = clusterState.getCollectionsMap(); for (Map.Entry entry : collections.entrySet()) { - log.debug("look at collection={}", entry.getKey()); + String collectionName = entry.getKey(); + log.debug("look at collection={}", collectionName); DocCollection docCollection = entry.getValue(); if (!docCollection.getAutoAddReplicas()) { log.debug("Collection {} is not setup to use autoAddReplicas, skipping..", docCollection.getName()); @@ -175,7 +178,7 @@ private void doWork() { log.debug("Skipping collection because it has no defined replicationFactor, name={}", docCollection.getName()); continue; } - log.debug("Found collection, name={} replicationFactor={}", entry.getKey(), docCollection.getReplicationFactor()); + log.debug("Found collection, name={} replicationFactor={}", collectionName, docCollection.getReplicationFactor()); Collection slices = docCollection.getSlices(); for (Slice slice : slices) { @@ -189,7 +192,7 @@ private void doWork() { if (downReplicas.size() > 0 && goodReplicas < docCollection.getReplicationFactor()) { // badReplicaMap.put(collection, badReplicas); - processBadReplicas(entry.getKey(), downReplicas); + processBadReplicas(collectionName, downReplicas); } else if (goodReplicas > docCollection.getReplicationFactor()) { log.debug("There are too many replicas"); } @@ -215,9 +218,9 @@ private void processBadReplicas(final String collection, final Collection counts = new HashMap<>(); Set unsuitableHosts = new HashSet<>(); diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java index 2c083051626d..5e03c4cabd69 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java @@ -216,7 +216,7 @@ public OverseerCollectionMessageHandler(ZkStateReader zkStateReader, String myId @Override @SuppressWarnings("unchecked") public SolrResponse processMessage(ZkNodeProps message, String operation) { - log.debug("OverseerCollectionMessageHandler.processMessage : "+ operation + " , "+ message.toString()); + log.debug("OverseerCollectionMessageHandler.processMessage : {} , {}", operation, message); NamedList results = new NamedList(); try { @@ -910,9 +910,11 @@ private void processResponse(NamedList results, Throwable e, String nodeName, So @SuppressWarnings("unchecked") private void waitForAsyncCallsToComplete(Map requestMap, NamedList results) { - for (String k:requestMap.keySet()) { - log.debug("I am Waiting for :{}/{}", k, requestMap.get(k)); - results.add(requestMap.get(k), waitForCoreAdminAsyncCallToComplete(k, requestMap.get(k))); + for (Map.Entry entry : requestMap.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + log.debug("I am Waiting for :{}/{}", key, value); + results.add(key, waitForCoreAdminAsyncCallToComplete(key, value)); } } diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java index 4dffb21905ee..16272074e978 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java @@ -271,7 +271,7 @@ public void run() { String operation = message.getStr(Overseer.QUEUE_OPERATION); OverseerMessageHandler.Lock lock = messageHandler.lockTask(message, taskBatch); if (lock == null) { - log.debug("Exclusivity check failed for [{}]", message.toString()); + log.debug("Exclusivity check failed for [{}]", message); //we may end crossing the size of the MAX_BLOCKED_TASKS. They are fine if (blockedTasks.size() < MAX_BLOCKED_TASKS) blockedTasks.put(head.getId(), head); @@ -291,7 +291,7 @@ public void run() { Thread.currentThread().interrupt(); continue; } - log.debug(messageHandler.getName() + ": Get the message id:" + head.getId() + " message:" + message.toString()); + log.debug("{}: Get the message id:{} message:{}", messageHandler.getName(), head.getId(), message); Runner runner = new Runner(messageHandler, message, operation, head, lock); tpe.execute(runner); @@ -566,14 +566,14 @@ private boolean isSuccessful() { private void printTrackingMaps() { if (log.isDebugEnabled()) { synchronized (runningTasks) { - log.debug("RunningTasks: {}", runningTasks.toString()); + log.debug("RunningTasks: " + runningTasks); } - log.debug("BlockedTasks: {}", blockedTasks.keySet().toString()); + log.debug("BlockedTasks: " + blockedTasks.keySet()); synchronized (completedTasks) { - log.debug("CompletedTasks: {}", completedTasks.keySet().toString()); + log.debug("CompletedTasks: " + completedTasks.keySet()); } synchronized (runningZKTasks) { - log.debug("RunningZKTasks: {}", runningZKTasks.toString()); + log.debug("RunningZKTasks: " + runningZKTasks); } } } diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java index 92e34cfe4989..3c733b8c4c81 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java @@ -67,8 +67,9 @@ public boolean containsTaskWithRequestId(String requestIdKey, String requestId) if (data != null) { ZkNodeProps message = ZkNodeProps.load(data); if (message.containsKey(requestIdKey)) { - LOG.debug(">>>> {}", message.get(requestIdKey)); - if(message.get(requestIdKey).equals(requestId)) return true; + Object requestIdInMessage = message.get(requestIdKey); + LOG.debug(">>>> {}", requestIdInMessage); + if(requestIdInMessage.equals(requestId)) return true; } } } catch (KeeperException.NoNodeException e) { diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java index b337bd022cae..ac81e6ce82d6 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java @@ -473,7 +473,7 @@ private void registerAllCoresAsDown( final String coreZkNodeName = descriptor.getCloudDescriptor().getCoreNodeName(); try { - log.debug("calling waitForLeaderToSeeDownState for coreZkNodeName={} collection={} shard={}", new Object[]{coreZkNodeName, collection, slice}); + log.debug("calling waitForLeaderToSeeDownState for coreZkNodeName={} collection={} shard={}", coreZkNodeName, collection, slice); waitForLeaderToSeeDownState(descriptor, coreZkNodeName); } catch (Exception e) { SolrException.log(log, "", e); @@ -903,7 +903,7 @@ public String register(String coreName, final CoreDescriptor desc, boolean recov String leaderUrl = getLeader(cloudDesc, leaderVoteWait + 600000); String ourUrl = ZkCoreNodeProps.getCoreUrl(baseUrl, coreName); - log.debug("We are " + ourUrl + " and leader is " + leaderUrl); + log.debug("We are {} and leader is {}", ourUrl, leaderUrl); boolean isLeader = leaderUrl.equals(ourUrl); try (SolrCore core = cc.getCore(desc.getName())) { @@ -1173,7 +1173,7 @@ public void publish(final CoreDescriptor cd, final Replica.State state, boolean try { String collection = cd.getCloudDescriptor().getCollectionName(); - log.debug("publishing state={}", state.toString()); + log.debug("publishing state={}", state); // System.out.println(Thread.currentThread().getStackTrace()[3]); Integer numShards = cd.getCloudDescriptor().getNumShards(); if (numShards == null) { // XXX sys prop hack @@ -1519,8 +1519,8 @@ private ZkCoreNodeProps waitForLeaderToSeeDownState( } if (lirState != null) { - log.debug("Replica " + myCoreNodeName + - " is already in leader-initiated recovery, so not waiting for leader to see down state."); + log.debug("Replica {}" + + " is already in leader-initiated recovery, so not waiting for leader to see down state.", myCoreNodeName); } else { log.info("Replica " + myCoreNodeName + @@ -1587,7 +1587,7 @@ private ZkCoreNodeProps waitForLeaderToSeeDownState( public static void linkConfSet(SolrZkClient zkClient, String collection, String confSetName) throws KeeperException, InterruptedException { String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection; - log.debug("Load collection config from:" + path); + log.debug("Load collection config from: {}", path); byte[] data; try { data = zkClient.getData(path, null, null, true); @@ -2023,7 +2023,7 @@ public void updateLeaderInitiatedRecoveryState(String collection, String shardId zkClient.makePath(znodePath, znodeData, retryOnConnLoss); } } - log.debug("Wrote {} to {}", state.toString(), znodePath); + log.debug("Wrote {} to {}", state, znodePath); } catch (Exception exc) { if (exc instanceof SolrException) { throw (SolrException) exc; @@ -2120,7 +2120,7 @@ public void addOnReconnectListener(OnReconnect listener) { if (listener != null) { synchronized (reconnectListeners) { reconnectListeners.add(listener); - log.debug("Added new OnReconnect listener "+listener); + log.debug("Added new OnReconnect listener {}", listener); } } } @@ -2135,7 +2135,7 @@ public void removeOnReconnectListener(OnReconnect listener) { wasRemoved = reconnectListeners.remove(listener); } if (wasRemoved) { - log.debug("Removed OnReconnect listener "+listener); + log.debug("Removed OnReconnect listener {}", listener); } else { log.warn("Was asked to remove OnReconnect listener "+listener+ ", but remove operation did not find it in the list of registered listeners."); diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java b/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java index 5f32ef2127c5..f769ac074718 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java @@ -100,7 +100,7 @@ public InputStream openResource(String resource) throws IOException { // Retry in case of session expiry try { Thread.sleep(1000); - log.debug("Sleeping for 1s before retrying fetching resource=" + resource); + log.debug("Sleeping for 1s before retrying fetching resource={}", resource); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); throw new IOException("Could not load resource=" + resource, ie); diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java index 9002c1969dd3..8f15cf0f9bbc 100644 --- a/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java +++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java @@ -50,7 +50,7 @@ public ClusterStateMutator(ZkStateReader zkStateReader) { public ZkWriteCommand createCollection(ClusterState clusterState, ZkNodeProps message) { String cName = message.getStr(NAME); - log.debug("building a new cName: " + cName); + log.debug("building a new cName: {}", cName); if (clusterState.hasCollection(cName)) { log.warn("Collection {} already exists. exit", cName); return ZkStateWriter.NO_OP; diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java index 55fd3efb618c..478561a4712a 100644 --- a/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java +++ b/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java @@ -41,7 +41,7 @@ public List downNode(ClusterState clusterState, ZkNodeProps mess List zkWriteCommands = new ArrayList<>(); String nodeName = message.getStr(ZkStateReader.NODE_NAME_PROP); - log.debug("DownNode state invoked for node: " + nodeName); + log.debug("DownNode state invoked for node: {}", nodeName); Map collections = clusterState.getCollectionsMap(); for (Map.Entry entry : collections.entrySet()) { @@ -59,7 +59,7 @@ public List downNode(ClusterState clusterState, ZkNodeProps mess for (Replica replica : replicas) { String rNodeName = replica.getNodeName(); if (rNodeName.equals(nodeName)) { - log.debug("Update replica state for " + replica + " to " + Replica.State.DOWN.toString()); + log.debug("Update replica state for {} to {}", replica, Replica.State.DOWN); Map props = replica.shallowCopy(); props.put(ZkStateReader.STATE_PROP, Replica.State.DOWN.toString()); Replica newReplica = new Replica(replica.getName(), props); diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java index f03eeeb4e3a5..e3c45d8d1b3d 100644 --- a/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java +++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java @@ -237,7 +237,7 @@ private ZkWriteCommand updateState(final ClusterState prevState, ZkNodeProps mes coreNodeName = ClusterStateMutator.getAssignedCoreNodeName(collection, message.getStr(ZkStateReader.NODE_NAME_PROP), message.getStr(ZkStateReader.CORE_NAME_PROP)); if (coreNodeName != null) { - log.debug("node=" + coreNodeName + " is already registered"); + log.debug("node={} is already registered", coreNodeName); } else { // if coreNodeName is null, auto assign one coreNodeName = Assign.assignNode(collection); @@ -251,7 +251,7 @@ private ZkWriteCommand updateState(final ClusterState prevState, ZkNodeProps mes //get shardId from ClusterState sliceName = ClusterStateMutator.getAssignedId(collection, coreNodeName); if (sliceName != null) { - log.debug("shard=" + sliceName + " is already registered"); + log.debug("shard={} is already registered", sliceName); } } if (sliceName == null) { @@ -259,7 +259,7 @@ private ZkWriteCommand updateState(final ClusterState prevState, ZkNodeProps mes if (collectionExists) { // use existing numShards numShards = collection.getSlices().size(); - log.debug("Collection already exists with " + ZkStateReader.NUM_SHARDS_PROP + "=" + numShards); + log.debug("Collection already exists with {}={}", ZkStateReader.NUM_SHARDS_PROP, numShards); } sliceName = Assign.assignShard(collection, numShards); log.info("Assigning new node to shard shard=" + sliceName); diff --git a/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java index e710063f606a..3235e56b8f47 100644 --- a/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java +++ b/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java @@ -156,12 +156,12 @@ public void doneWithDirectory(Directory directory) throws IOException { @Override public void close() throws IOException { synchronized (this) { - log.debug("Closing {} - {} directories currently being tracked", this.getClass().getSimpleName(), byDirectoryCache.size()); + final String className = this.getClass().getSimpleName(); + log.debug("Closing {} - {} directories currently being tracked", className, byDirectoryCache.size()); this.closed = true; Collection values = byDirectoryCache.values(); for (CacheValue val : values) { - log.debug("Closing {} - currently tracking: {}", - this.getClass().getSimpleName(), val); + log.debug("Closing {} - currently tracking: {}", className, val); try { // if there are still refs out, we have to wait for them assert val.refCnt > -1 : val.refCnt; @@ -189,7 +189,7 @@ public void close() throws IOException { try { for (CacheValue v : val.closeEntries) { assert v.refCnt == 0 : val.refCnt; - log.debug("Closing directory when closing factory: " + v.path); + log.debug("Closing directory when closing factory: {}", v.path); boolean cl = closeCacheValue(v); if (cl) { closedDirs.add(v); @@ -201,7 +201,7 @@ public void close() throws IOException { } for (CacheValue val : removeEntries) { - log.debug("Removing directory after core close: " + val.path); + log.debug("Removing directory after core close: {}", val.path); try { removeDirectory(val); } catch (Exception e) { @@ -224,7 +224,7 @@ private void removeFromCache(CacheValue v) { // be sure this is called with the this sync lock // returns true if we closed the cacheValue, false if it will be closed later private boolean closeCacheValue(CacheValue cacheValue) { - log.debug("looking to close {} {}", cacheValue.path, cacheValue.closeEntries.toString()); + log.debug("looking to close {} {}", cacheValue.path, cacheValue.closeEntries); List listeners = closeListeners.remove(cacheValue.directory); if (listeners != null) { for (CloseListener listener : listeners) { @@ -268,7 +268,7 @@ private boolean closeCacheValue(CacheValue cacheValue) { for (CacheValue val : cacheValue.removeEntries) { if (!val.deleteAfterCoreClose) { - log.debug("Removing directory before core close: " + val.path); + log.debug("Removing directory before core close: {}", val.path); try { removeDirectory(val); } catch (Exception e) { @@ -295,10 +295,10 @@ private void close(CacheValue val) { log.debug("Closing directory, CoreContainer#isShutdown={}", coreContainer != null ? coreContainer.isShutDown() : "null"); try { if (coreContainer != null && coreContainer.isShutDown() && val.directory instanceof ShutdownAwareDirectory) { - log.debug("Closing directory on shutdown: " + val.path); + log.debug("Closing directory on shutdown: {}", val.path); ((ShutdownAwareDirectory) val.directory).closeOnShutdown(); } else { - log.debug("Closing directory: " + val.path); + log.debug("Closing directory: {}", val.path); val.directory.close(); } assert ObjectReleaseTracker.release(val.directory); @@ -418,9 +418,10 @@ public void release(Directory directory) throws IOException { throw new IllegalArgumentException("Unknown directory: " + directory + " " + byDirectoryCache); } - log.debug("Releasing directory: " + cacheValue.path + " " + (cacheValue.refCnt - 1) + " " + cacheValue.doneWithDir); cacheValue.refCnt--; + + log.debug("Releasing directory: {} {} {}", cacheValue.path, cacheValue.refCnt, cacheValue.doneWithDir); assert cacheValue.refCnt >= 0 : cacheValue.refCnt; diff --git a/solr/core/src/java/org/apache/solr/core/Config.java b/solr/core/src/java/org/apache/solr/core/Config.java index 11a381e3937e..199f40690b22 100644 --- a/solr/core/src/java/org/apache/solr/core/Config.java +++ b/solr/core/src/java/org/apache/solr/core/Config.java @@ -265,7 +265,7 @@ public Node getNode(String path, Document doc, boolean errIfMissing) { if (errIfMissing) { throw new RuntimeException(name + " missing "+path); } else { - log.debug(name + " missing optional " + path); + log.debug("{} missing optional {}", name, path); return null; } } @@ -274,7 +274,7 @@ public Node getNode(String path, Document doc, boolean errIfMissing) { name + " contains more than one value for config path: " + path); } Node nd = nodes.item(0); - log.trace(name + ":" + path + "=" + nd); + log.trace("{}:{}={}", name, path, nd); return nd; } catch (XPathExpressionException e) { @@ -299,12 +299,12 @@ public NodeList getNodeList(String path, boolean errIfMissing) { if (errIfMissing) { throw new RuntimeException(name + " missing "+path); } else { - log.debug(name + " missing optional " + path); + log.debug("{} missing optional {}", name, path); return null; } } - log.trace(name + ":" + path + "=" + nodeList); + log.trace("{}:{}={}", name, path, nodeList); return nodeList; } catch (XPathExpressionException e) { @@ -385,8 +385,7 @@ public String getVal(String path, boolean errIfMissing) { if (nd==null) return null; String txt = DOMUtil.getText(nd); - - log.debug(name + ' '+path+'='+txt); + log.debug("{} {}={}", name, path, txt); return txt; /****** diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java b/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java index 77cd2725cf66..ee07ca76d023 100644 --- a/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java +++ b/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java @@ -51,7 +51,7 @@ public static NamedList readFromResourceLoader(SolrResourceLoader loader, String try { reader = new InputStreamReader(loader.openResource(name), StandardCharsets.UTF_8); } catch (SolrResourceNotFoundException ex) { - log.debug("Did not find ConfigSet properties, assuming default properties: " + ex.getMessage()); + log.debug("Did not find ConfigSet properties, assuming default properties: {}", ex.getMessage()); return null; } catch (Exception ex) { throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to load reader for ConfigSet properties: " + name, ex); diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index 3c3aaa570f4e..96993ed5ca07 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -215,7 +215,9 @@ public PluginBag getRequestHandlers() { } { - log.debug("New CoreContainer " + System.identityHashCode(this)); + if (log.isDebugEnabled()) { + log.debug("New CoreContainer " + System.identityHashCode(this)); + } } /** @@ -322,11 +324,11 @@ private synchronized void initializeAuthenticationPlugin(Map aut } if (pluginClassName != null) { - log.debug("Authentication plugin class obtained from security.json: "+pluginClassName); + log.debug("Authentication plugin class obtained from security.json: {}", pluginClassName); } else if (System.getProperty(AUTHENTICATION_PLUGIN_PROP) != null) { pluginClassName = System.getProperty(AUTHENTICATION_PLUGIN_PROP); - log.debug("Authentication plugin class obtained from system property '" + - AUTHENTICATION_PLUGIN_PROP + "': " + pluginClassName); + log.debug("Authentication plugin class obtained from system property'{}': {}", + AUTHENTICATION_PLUGIN_PROP, pluginClassName); } else { log.debug("No authentication plugin used."); } @@ -730,9 +732,7 @@ public void shutdown() { } } catch (InterruptedException e) { Thread.currentThread().interrupt(); - if (log.isDebugEnabled()) { - log.debug("backgroundCloser thread was interrupted before finishing"); - } + log.debug("backgroundCloser thread was interrupted before finishing"); } } // Now clear all the cores that are being operated upon. @@ -817,7 +817,7 @@ protected SolrCore registerCore(CoreDescriptor cd, SolrCore core, boolean regist if( core == null ) { throw new RuntimeException( "Can not register a null core." ); } - + if (isShutDown) { core.close(); throw new IllegalStateException("This CoreContainer has been closed"); @@ -834,14 +834,14 @@ protected SolrCore registerCore(CoreDescriptor cd, SolrCore core, boolean regist coreInitFailures.remove(cd.getName()); if( old == null || old == core) { - log.debug( "registering core: " + cd.getName() ); + log.debug( "registering core: {}", cd.getName() ); if (registerInZk) { zkSys.registerInZk(core, false, skipRecovery); } return null; } else { - log.debug( "replacing core: " + cd.getName() ); + log.debug( "replacing core: {}", cd.getName() ); old.close(); if (registerInZk) { zkSys.registerInZk(core, false, skipRecovery); @@ -1142,7 +1142,7 @@ public void reload(String name) { SolrCore core = solrCores.getCoreFromAnyList(name, false); if (core != null) { // The underlying core properties files may have changed, we don't really know. So we have a (perhaps) stale - // CoreDescriptor we need to reload it if it's out there. + // CoreDescriptor we need to reload it if it's out there. CorePropertiesLocator cpl = new CorePropertiesLocator(null); CoreDescriptor cd = cpl.reload(this, core.getCoreDescriptor()); if (cd == null) cd = core.getCoreDescriptor(); @@ -1331,10 +1331,10 @@ public SolrCore getCore(String name) { // This is a bit of awkwardness where SolrCloud and transient cores don't play nice together. For transient cores, // we have to allow them to be created at any time there hasn't been a core load failure (use reload to cure that). // But for TestConfigSetsAPI.testUploadWithScriptUpdateProcessor, this needs to _not_ try to load the core if - // the core is null and there was an error. If you change this, be sure to run both TestConfiSetsAPI and + // the core is null and there was an error. If you change this, be sure to run both TestConfiSetsAPI and // TestLazyCores if (desc == null || zkSys.getZkController() != null) return null; - + // This will put an entry in pending core ops if the core isn't loaded core = solrCores.waitAddPendingCoreOps(name); diff --git a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java index 1747fa2ca12e..e3fcddd68be4 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java +++ b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java @@ -228,7 +228,7 @@ public CoreDescriptor(String name, Path instanceDir, Map corePro cloudDesc = null; } - log.debug("Created CoreDescriptor: " + coreProperties); + log.debug("Created CoreDescriptor: {}", coreProperties); } /** diff --git a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java index c69770c72fd3..bdd9463131ef 100644 --- a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java +++ b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java @@ -86,10 +86,10 @@ public void init(NamedList args) { "Invalid compressionMode: '" + compressionModeStr + "'. Value must be one of " + Arrays.toString(Mode.values())); } - log.debug("Using compressionMode: " + compressionMode); + log.debug("Using compressionMode: {}", compressionMode); } else { compressionMode = SOLR_DEFAULT_COMPRESSION_MODE; - log.debug("Using default compressionMode: " + compressionMode); + log.debug("Using default compressionMode: {}", compressionMode); } codec = new Lucene70Codec(compressionMode) { @Override diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index d3a1df114dc0..d645ac3be5ad 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -393,7 +393,7 @@ public String getNewIndexDir() { } } if (!result.equals(lastNewIndexDir)) { - log.debug("New index directory detected: old="+lastNewIndexDir + " new=" + result); + log.debug("New index directory detected: old={} new={}", lastNewIndexDir, result); } lastNewIndexDir = result; return result; @@ -746,8 +746,10 @@ void initIndex(boolean passOnPreviousState, boolean reload) throws IOException { // Create the index if it doesn't exist. if(!indexExists) { - log.debug(logid + "Solr index directory '" + new File(indexDir) + "' doesn't exist." - + " Creating new index..."); + if (log.isDebugEnabled()) { + log.debug(logid + "Solr index directory '" + new File(indexDir) + "' doesn't exist." + + " Creating new index..."); + } SolrIndexWriter writer = SolrIndexWriter.create(this, "SolrCore.initIndex", indexDir, getDirectoryFactory(), true, getLatestSchema(), solrConfig.indexConfig, solrDelPolicy, codec); @@ -1050,7 +1052,7 @@ public void seedVersionBuckets() { /** Set UpdateLog to buffer updates if the slice is in construction. */ private void bufferUpdatesIfConstructing(CoreDescriptor coreDescriptor) { - + if (coreContainer != null && coreContainer.isZooKeeperAware()) { if (reqHandlers.get("/get") == null) { log.warn("WARNING: RealTimeGetHandler is not registered at /get. " + @@ -1381,9 +1383,9 @@ private StatsCache initStatsCache() { if (pluginInfo != null && pluginInfo.className != null && pluginInfo.className.length() > 0) { cache = createInitInstance(pluginInfo, StatsCache.class, null, LocalStatsCache.class.getName()); - log.debug("Using statsCache impl: " + cache.getClass().getName()); + log.debug("Using statsCache impl: {}", cache.getClass().getName()); } else { - log.debug("Using default statsCache cache: " + LocalStatsCache.class.getName()); + log.debug("Using default statsCache cache: {}", LocalStatsCache.class.getName()); cache = new LocalStatsCache(); } return cache; @@ -1842,9 +1844,12 @@ public RefCounted getSearcher() { */ public IndexFingerprint getIndexFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, long maxVersion) throws IOException { + final boolean isDebug = log.isDebugEnabled(); IndexReader.CacheHelper cacheHelper = ctx.reader().getReaderCacheHelper(); if (cacheHelper == null) { - log.debug("Cannot cache IndexFingerprint as reader does not support caching. searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion); + if (isDebug) { + log.debug("Cannot cache IndexFingerprint as reader does not support caching. searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion); + } return IndexFingerprint.getFingerprint(searcher, ctx, maxVersion); } @@ -1855,7 +1860,9 @@ public IndexFingerprint getIndexFingerprint(SolrIndexSearcher searcher, LeafRead // documents were deleted from segment for which fingerprint was cached // if (f == null || (f.getMaxInHash() > maxVersion) || (f.getNumDocs() != ctx.reader().numDocs())) { - log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion); + if (isDebug) { + log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion); + } f = IndexFingerprint.getFingerprint(searcher, ctx, maxVersion); // cache fingerprint for the segment only if all the versions in the segment are included in the fingerprint if (f.getMaxVersionEncountered() == f.getMaxInHash()) { @@ -1863,10 +1870,12 @@ public IndexFingerprint getIndexFingerprint(SolrIndexSearcher searcher, LeafRead perSegmentFingerprintCache.put(cacheHelper.getKey(), f); } - } else { + } else if (isDebug) { log.debug("IndexFingerprint cache hit for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion); } - log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size()); + if (isDebug) { + log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size()); + } return f; } @@ -2010,7 +2019,7 @@ public RefCounted openNewSearcher(boolean updateHandlerReope // but log a message about it to minimize confusion newestSearcher.incref(); - log.debug("SolrIndexSearcher has not changed - not re-opening: " + newestSearcher.get().getName()); + log.debug("SolrIndexSearcher has not changed - not re-opening: {}", newestSearcher.get().getName()); return newestSearcher; } // ELSE: open a new searcher against the old reader... @@ -2448,7 +2457,7 @@ private void registerSearcher(RefCounted newSearcherHolder) { public void closeSearcher() { - log.debug(logid+"Closing main searcher on request."); + log.debug("{}Closing main searcher on request.",logid); synchronized (searcherLock) { if (realtimeSearcher != null) { realtimeSearcher.decref(); @@ -2985,7 +2994,7 @@ private static boolean checkStale(SolrZkClient zkClient, String zkPath, int cur return false; } if (stat.getVersion() > currentVersion) { - log.debug(zkPath+" is stale will need an update from {} to {}", currentVersion,stat.getVersion()); + log.debug("{} is stale will need an update from {} to {}", zkPath, currentVersion, stat.getVersion()); return true; } return false; diff --git a/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java b/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java index 34482cd14d48..bdbc883a3f23 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java +++ b/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java @@ -150,7 +150,7 @@ private void updateCommits(List commits) { synchronized (this) { long maxCommitAgeTimeStamp = -1L; IndexCommit newest = commits.get(commits.size() - 1); - log.debug("newest commit generation = " + newest.getGeneration()); + log.debug("newest commit generation = {}", newest.getGeneration()); int singleSegKept = (newest.getSegmentCount() == 1) ? 1 : 0; int totalKept = 1; diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java index 2e679cf6d4d4..5211b792124f 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java +++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java @@ -252,7 +252,7 @@ private static URLClassLoader addURLsToClassLoader(final URLClassLoader oldLoade allURLs.addAll(Arrays.asList(oldLoader.getURLs())); allURLs.addAll(urls); for (URL url : urls) { - log.debug("Adding '{}' to classloader", url.toString()); + log.debug("Adding '{}' to classloader", url); } ClassLoader oldParent = oldLoader.getParent(); @@ -546,7 +546,7 @@ public Class findClass(String cname, Class expectedType, Str for (String subpackage : subpackages) { try { String name = base + '.' + subpackage + newName; - log.trace("Trying class name " + name); + log.trace("Trying class name {}", name); return clazz = Class.forName(name,true,classLoader).asSubclass(expectedType); } catch (ClassNotFoundException e1) { // ignore... assume first exception is best. @@ -776,9 +776,9 @@ public static Path locateSolrHome() { home = (String)c.lookup("java:comp/env/"+project+"/home"); logOnceInfo("home_using_jndi", "Using JNDI solr.home: "+home ); } catch (NoInitialContextException e) { - log.debug("JNDI not configured for "+project+" (NoInitialContextEx)"); + log.debug("JNDI not configured for {} (NoInitialContextEx)", project); } catch (NamingException e) { - log.debug("No /"+project+"/home in JNDI"); + log.debug("No /{}/home in JNDI", project); } catch( RuntimeException ex ) { log.warn("Odd RuntimeException while testing for JNDI: " + ex.getMessage()); } diff --git a/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java index fd568dfbd36d..47761e8c2b90 100644 --- a/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java @@ -104,7 +104,7 @@ public void init(NamedList args) { } catch (IllegalArgumentException ex) { // Other implementations will likely throw this exception since "reuse-instance" // is implementation specific. - log.debug("Unable to set the 'reuse-instance' property for the input factory: " + inputFactory); + log.debug("Unable to set the 'reuse-instance' property for the input factory: {}", inputFactory); } } diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java index 96e505a86d97..b8ff47b026f4 100644 --- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java +++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java @@ -958,9 +958,10 @@ private long downloadIndexFiles(boolean downloadCompleteIndex, Directory indexDi for (Map file : filesToDownload) { String filename = (String) file.get(NAME); long size = (Long) file.get(SIZE); - CompareResult compareResult = compareFile(indexDir, filename, size, (Long) file.get(CHECKSUM)); + Long checksum = (Long) file.get(CHECKSUM); + CompareResult compareResult = compareFile(indexDir, filename, size, checksum); boolean alwaysDownload = filesToAlwaysDownloadIfNoChecksums(filename, size, compareResult); - LOG.debug("Downloading file={} size={} checksum={} alwaysDownload={}", filename, size, file.get(CHECKSUM), alwaysDownload); + LOG.debug("Downloading file={} size={} checksum={} alwaysDownload={}", filename, size, checksum, alwaysDownload); if (!compareResult.equal || downloadCompleteIndex || alwaysDownload) { dirFileFetcher = new DirectoryFileFetcher(tmpIndexDir, file, (String) file.get(NAME), FILE, latestGeneration); diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java index 94ff1893928b..111d47be7d7a 100644 --- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java @@ -701,7 +701,7 @@ private void getFileList(SolrParams solrParams, SolrQueryResponse rsp) { if (confFileNameAlias.size() < 1 || core.getCoreContainer().isZooKeeperAware()) return; - LOG.debug("Adding config files to list: " + includeConfFiles); + LOG.debug("Adding config files to list: {}", includeConfFiles); //if configuration files need to be included get their details rsp.add(CONF_FILES, getConfFileInfoFromCache(confFileNameAlias, confFileInfoCache)); } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java index eceb4b73693f..14f309d65f17 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java @@ -136,7 +136,7 @@ private void doEdit(SolrQueryRequest req, SolrQueryResponse rsp, String path, fi return; } } - log.debug("Security edit operation failed {} time(s)" + count); + log.debug("Security edit operation failed {} time(s)", count); } throw new SolrException(SERVER_ERROR, "Failed to persist security config after 3 attempts. Giving up"); } diff --git a/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java index 80cca1577a57..1949a47672ee 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java @@ -868,11 +868,14 @@ private void removeQueryFacetsUnderLimits(ResponseBuilder rb) { // The int minCount = rb.req.getParams().getInt(FacetParams.FACET_MINCOUNT, 0); boolean replace = false; + boolean isTraceEnabled = log.isTraceEnabled(); for (Map.Entry ent : query_facets.entrySet()) { if (ent.getValue().count >= minCount) { newQueryFacets.put(ent.getKey(), ent.getValue()); } else { - log.trace("Removing facetQuery/key: " + ent.getKey() + "/" + ent.getValue().toString() + " mincount=" + minCount); + if (isTraceEnabled) { + log.trace("Removing facetQuery/key: " + ent.getKey() + "/" + ent.getValue().toString() + " mincount=" + minCount); + } replace = true; } } @@ -1541,11 +1544,14 @@ long maxPossible(int shardNum) { public void respectMinCount(long minCount) { HashMap newOne = new HashMap<>(); boolean replace = false; + boolean isTraceEnabled = log.isTraceEnabled(); for (Map.Entry ent : counts.entrySet()) { if (ent.getValue().count >= minCount) { newOne.put(ent.getKey(), ent.getValue()); } else { - log.trace("Removing facet/key: " + ent.getKey() + "/" + ent.getValue().toString() + " mincount=" + minCount); + if (isTraceEnabled) { + log.trace("Removing facet/key: " + ent.getKey() + "/" + ent.getValue().toString() + " mincount=" + minCount); + } replace = true; } } diff --git a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java index fd9d37d4aad7..e498d295906e 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java @@ -112,7 +112,7 @@ public void process(ResponseBuilder rb) throws IOException { Entry idToQuery = idToQueryIt.next(); String s = idToQuery.getValue().toString(); - log.debug("MLT Query:" + s); + log.debug("MLT Query:{}", s); temp.add(idToQuery.getKey(), idToQuery.getValue().toString()); } @@ -135,7 +135,7 @@ public void process(ResponseBuilder rb) throws IOException { public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0 && rb.req.getParams().getBool(COMPONENT_NAME, false)) { - log.debug("ShardRequest.response.size: " + sreq.responses.size()); + log.debug("ShardRequest.response.size: {}", sreq.responses.size()); for (ShardResponse r : sreq.responses) { if (r.getException() != null) { // This should only happen in case of using shards.tolerant=true. Omit this ShardResponse @@ -143,11 +143,10 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { } NamedList moreLikeThisReponse = (NamedList) r.getSolrResponse() .getResponse().get("moreLikeThis"); - log.debug("ShardRequest.response.shard: " + r.getShard()); + log.debug("ShardRequest.response.shard: {}", r.getShard()); if (moreLikeThisReponse != null) { for (Entry entry : moreLikeThisReponse) { - log.debug("id: \"" + entry.getKey() + "\" Query: \"" - + entry.getValue() + "\""); + log.debug("id: \"{}\" Query: \"{}\"", entry.getKey(), entry.getValue()); ShardRequest s = buildShardQuery(rb, (String) entry.getValue(), entry.getKey()); rb.addRequest(this, s); @@ -156,7 +155,7 @@ public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { } } - if ((sreq.purpose & ShardRequest.PURPOSE_GET_MLT_RESULTS) != 0) { + if (log.isDebugEnabled() && ((sreq.purpose & ShardRequest.PURPOSE_GET_MLT_RESULTS) != 0)) { for (ShardResponse r : sreq.responses) { log.debug("MLT Query returned: " + r.getSolrResponse().getResponse().toString()); @@ -180,7 +179,7 @@ public void finishStage(ResponseBuilder rb) { for (ShardRequest sreq : rb.finished) { if ((sreq.purpose & ShardRequest.PURPOSE_GET_MLT_RESULTS) != 0) { for (ShardResponse r : sreq.responses) { - log.debug("ShardRequest.response.shard: " + r.getShard()); + log.debug("ShardRequest.response.shard: {}", r.getShard()); String key = r.getShardRequest().params .get(MoreLikeThisComponent.DIST_DOC_ID); SolrDocumentList shardDocList = (SolrDocumentList) r.getSolrResponse().getResponse().get("response"); @@ -208,7 +207,7 @@ public void finishStage(ResponseBuilder rb) { mergedDocList = mergeSolrDocumentList(mergedDocList, shardDocList, mltcount, keyName); } - log.debug("Adding docs for key: " + key); + log.debug("Adding docs for key: {}", key); tempResults.put(key, mergedDocList); } } diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java index 4f2c933073c6..c0ceddb8d5f5 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java @@ -760,7 +760,7 @@ public static SolrDocument toSolrDoc(SolrInputDocument sdoc, IndexSchema schema) out.add(f); } } else { - log.debug("Don't know how to handle field " + f); + log.debug("Don't know how to handle field {}", f); } } diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java index 926fc008c0a2..8b732da62189 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java @@ -185,12 +185,12 @@ private void initComponents() { dbgCmp = (DebugComponent) comp; } else { components.add(comp); - log.debug("Adding component:"+comp); + log.debug("Adding component:{}", comp); } } if (makeDebugLast == true && dbgCmp != null){ components.add(dbgCmp); - log.debug("Adding debug component:" + dbgCmp); + log.debug("Adding debug component:{}", dbgCmp); } this.components = components; } diff --git a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java index a07aff24a200..9356e60b052a 100644 --- a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java +++ b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java @@ -105,7 +105,7 @@ public XMLLoader init(SolrParams args) { } catch (IllegalArgumentException ex) { // Other implementations will likely throw this exception since "reuse-instance" // isimplementation specific. - log.debug("Unable to set the 'reuse-instance' property for the input chain: " + inputFactory); + log.debug("Unable to set the 'reuse-instance' property for the input chain: {}", inputFactory); } // Init SAX parser (for XSL): @@ -116,7 +116,7 @@ public XMLLoader init(SolrParams args) { xsltCacheLifetimeSeconds = XSLT_CACHE_DEFAULT; if(args != null) { xsltCacheLifetimeSeconds = args.getInt(XSLT_CACHE_PARAM,XSLT_CACHE_DEFAULT); - log.debug("xsltCacheLifetimeSeconds=" + xsltCacheLifetimeSeconds); + log.debug("xsltCacheLifetimeSeconds={}", xsltCacheLifetimeSeconds); } return this; } @@ -263,7 +263,7 @@ void processUpdate(SolrQueryRequest req, UpdateRequestProcessor processor, XMLSt throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unexpected tag without an tag surrounding it."); } } else if (UpdateRequestHandler.COMMIT.equals(currTag) || UpdateRequestHandler.OPTIMIZE.equals(currTag)) { - log.trace("parsing " + currTag); + log.trace("parsing {}", currTag); CommitUpdateCommand cmd = new CommitUpdateCommand(req, UpdateRequestHandler.OPTIMIZE.equals(currTag)); ModifiableSolrParams mp = new ModifiableSolrParams(); diff --git a/solr/core/src/java/org/apache/solr/logging/LogWatcher.java b/solr/core/src/java/org/apache/solr/logging/LogWatcher.java index c5105902826a..ff02fc882ec8 100644 --- a/solr/core/src/java/org/apache/solr/logging/LogWatcher.java +++ b/solr/core/src/java/org/apache/solr/logging/LogWatcher.java @@ -150,7 +150,7 @@ private static LogWatcher createWatcher(LogWatcherConfig config, SolrResourceLoa try { slf4jImpl = StaticLoggerBinder.getSingleton().getLoggerFactoryClassStr(); - log.debug("SLF4J impl is " + slf4jImpl); + log.debug("SLF4J impl is {}", slf4jImpl); if (fname == null) { if ("org.slf4j.impl.Log4jLoggerFactory".equals(slf4jImpl)) { fname = "Log4j"; diff --git a/solr/core/src/java/org/apache/solr/rest/ManagedResourceStorage.java b/solr/core/src/java/org/apache/solr/rest/ManagedResourceStorage.java index af3f0c68bf09..cfb27994a333 100644 --- a/solr/core/src/java/org/apache/solr/rest/ManagedResourceStorage.java +++ b/solr/core/src/java/org/apache/solr/rest/ManagedResourceStorage.java @@ -109,7 +109,7 @@ public static StorageIO newStorageIO(String collection, SolrResourceLoader resou } else { if (zkClient != null) { String znodeBase = "/configs/"+zkConfigName; - log.debug("Setting up ZooKeeper-based storage for the RestManager with znodeBase: "+znodeBase); + log.debug("Setting up ZooKeeper-based storage for the RestManager with znodeBase: {}", znodeBase); storageIO = new ManagedResourceStorage.ZooKeeperStorageIO(zkClient, znodeBase); } else { storageIO = new FileStorageIO(); diff --git a/solr/core/src/java/org/apache/solr/rest/RestManager.java b/solr/core/src/java/org/apache/solr/rest/RestManager.java index 04eb23613c9f..93ba5b73c16e 100644 --- a/solr/core/src/java/org/apache/solr/rest/RestManager.java +++ b/solr/core/src/java/org/apache/solr/rest/RestManager.java @@ -611,7 +611,7 @@ public void init(SolrResourceLoader loader, StorageIO storageIO) throws SolrException { - log.debug("Initializing RestManager with initArgs: "+initArgs); + log.debug("Initializing RestManager with initArgs: {}", initArgs); if (storageIO == null) throw new IllegalArgumentException( diff --git a/solr/core/src/java/org/apache/solr/schema/BinaryField.java b/solr/core/src/java/org/apache/solr/schema/BinaryField.java index a0e7d051a13f..7fecbc6da9dd 100644 --- a/solr/core/src/java/org/apache/solr/schema/BinaryField.java +++ b/solr/core/src/java/org/apache/solr/schema/BinaryField.java @@ -82,7 +82,7 @@ public ByteBuffer toObject(IndexableField f) { public IndexableField createField(SchemaField field, Object val) { if (val == null) return null; if (!field.stored()) { - log.trace("Ignoring unstored binary field: " + field); + log.trace("Ignoring unstored binary field: {}", field); return null; } byte[] buf = null; diff --git a/solr/core/src/java/org/apache/solr/schema/CurrencyField.java b/solr/core/src/java/org/apache/solr/schema/CurrencyField.java index 7b27c3f46c8a..3f8fda495127 100644 --- a/solr/core/src/java/org/apache/solr/schema/CurrencyField.java +++ b/solr/core/src/java/org/apache/solr/schema/CurrencyField.java @@ -785,7 +785,7 @@ public boolean reload() throws SolrException { InputStream is = null; Map> tmpRates = new HashMap<>(); try { - log.debug("Reloading exchange rates from file "+this.currencyConfigFile); + log.debug("Reloading exchange rates from file {}", this.currencyConfigFile); is = loader.openResource(currencyConfigFile); javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java index 3127262548a0..faf8043ed313 100644 --- a/solr/core/src/java/org/apache/solr/schema/EnumField.java +++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java @@ -381,8 +381,7 @@ public IndexableField createField(SchemaField field, Object value) { final boolean docValues = field.hasDocValues(); if (!indexed && !stored && !docValues) { - if (log.isTraceEnabled()) - log.trace("Ignoring unindexed/unstored field: " + field); + log.trace("Ignoring unindexed/unstored field: {}", field); return null; } final Integer intValue = stringValueToIntValue(value.toString()); diff --git a/solr/core/src/java/org/apache/solr/schema/FieldType.java b/solr/core/src/java/org/apache/solr/schema/FieldType.java index 07eb866c7c46..00e8c92194ce 100644 --- a/solr/core/src/java/org/apache/solr/schema/FieldType.java +++ b/solr/core/src/java/org/apache/solr/schema/FieldType.java @@ -264,8 +264,7 @@ public String toString() { */ public IndexableField createField(SchemaField field, Object value) { if (!field.indexed() && !field.stored()) { - if (log.isTraceEnabled()) - log.trace("Ignoring unindexed/unstored field: " + field); + log.trace("Ignoring unindexed/unstored field: {}", field); return null; } diff --git a/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java b/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java index f332934abdec..3ab6683af1b1 100644 --- a/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java +++ b/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java @@ -169,7 +169,7 @@ protected void init(FieldType plugin, Node node) throws Exception { protected FieldType register(String name, FieldType plugin) throws Exception { - log.trace("fieldtype defined: " + plugin ); + log.trace("fieldtype defined: {}", plugin ); return fieldTypes.put( name, plugin ); } diff --git a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java index 3de59eea3683..3e260575aea3 100644 --- a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java +++ b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java @@ -648,7 +648,7 @@ protected synchronized Map loadFields(Document document, XPath x NamedNodeMap attrs = node.getAttributes(); String name = DOMUtil.getAttr(attrs, NAME, "field definition"); - log.trace("reading field def "+name); + log.trace("reading field def {}", name); String type = DOMUtil.getAttr(attrs, TYPE, "field " + name); FieldType ft = fieldTypes.get(type); @@ -671,13 +671,13 @@ protected synchronized Map loadFields(Document document, XPath x + f.getName() + "' [[["+old.toString()+"]]] and [[["+f.toString()+"]]]"; throw new SolrException(ErrorCode.SERVER_ERROR, msg ); } - log.debug("field defined: " + f); + log.debug("field defined: {}", f); if( f.getDefaultValue() != null ) { - log.debug(name+" contains default value: " + f.getDefaultValue()); + log.debug("{} contains default value: {}", name, f.getDefaultValue()); fieldsWithDefaultValue.add( f ); } if (f.isRequired()) { - log.debug(name+" is required in this schema"); + log.debug("{} is required in this schema", name); requiredFields.add(f); } } else if (node.getNodeName().equals(DYNAMIC_FIELD)) { @@ -710,7 +710,9 @@ protected static DynamicField[] dynamicFieldListToSortedArray(List DynamicField[] dFields = dynamicFieldList.toArray(new DynamicField[dynamicFieldList.size()]); Arrays.sort(dFields); - log.trace("Dynamic Field Ordering:" + Arrays.toString(dFields)); + if (log.isTraceEnabled()) { + log.trace("Dynamic Field Ordering:" + Arrays.toString(dFields)); + } return dFields; } @@ -811,9 +813,9 @@ public void registerDynamicFields(SchemaField... fields) { List dynFields = new ArrayList<>(asList(dynamicFields)); for (SchemaField field : fields) { if (isDuplicateDynField(dynFields, field)) { - log.debug("dynamic field already exists: dynamic field: [" + field.getName() + "]"); + log.debug("dynamic field already exists: dynamic field: [{}]", field.getName()); } else { - log.debug("dynamic field creation for schema field: " + field.getName()); + log.debug("dynamic field creation for schema field: {}", field.getName()); addDynamicFieldNoDupCheck(dynFields, field); } } @@ -822,7 +824,7 @@ public void registerDynamicFields(SchemaField... fields) { private void addDynamicFieldNoDupCheck(List dFields, SchemaField f) { dFields.add(new DynamicField(f)); - log.debug("dynamic field defined: " + f); + log.debug("dynamic field defined: {}", f); } protected boolean isDuplicateDynField(List dFields, SchemaField f) { @@ -846,8 +848,10 @@ public void registerCopyField( String source, String dest ) { * @see SolrCoreAware */ public void registerCopyField(String source, String dest, int maxChars) { - log.debug(COPY_FIELD + " " + SOURCE + "='" + source + "' " + DESTINATION + "='" + dest - + "' " + MAX_CHARS + "=" + maxChars); + if (log.isDebugEnabled()) { + log.debug(COPY_FIELD + " " + SOURCE + "='" + source + "' " + DESTINATION + "='" + dest + + "' " + MAX_CHARS + "=" + maxChars); + } DynamicField destDynamicField = null; SchemaField destSchemaField = fields.get(dest); @@ -975,7 +979,7 @@ private void registerDynamicCopyField( DynamicCopy dcopy ) { temp[temp.length -1] = dcopy; dynamicCopyFields = temp; } - log.trace("Dynamic Copy Field:" + dcopy); + log.trace("Dynamic Copy Field:{}", dcopy); } static SimilarityFactory readSimilarity(SolrResourceLoader loader, Node node) { diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java index 9bc888dd767b..f4d620f31140 100644 --- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java +++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java @@ -401,7 +401,7 @@ public ManagedIndexSchema addFields(Collection newFields, newSchema.fields.put(newField.getName(), newField); if (null != newField.getDefaultValue()) { - log.debug(newField.getName() + " contains default value: " + newField.getDefaultValue()); + log.debug("{} contains default value: {}", newField.getName(), newField.getDefaultValue()); newSchema.fieldsWithDefaultValue.add(newField); } if (newField.isRequired()) { @@ -501,7 +501,7 @@ public ManagedIndexSchema deleteFields(Collection names) { SchemaField replacementField = SchemaField.create(fieldName, replacementFieldType, replacementArgs); newSchema.fields.put(fieldName, replacementField); if (null != replacementField.getDefaultValue()) { - log.debug(replacementField.getName() + " contains default value: " + replacementField.getDefaultValue()); + log.debug("{} contains default value: {}", replacementField.getName(), replacementField.getDefaultValue()); newSchema.fieldsWithDefaultValue.add(replacementField); } if (replacementField.isRequired()) { @@ -966,7 +966,7 @@ public ManagedIndexSchema addFieldTypes(List fieldTypeList, boolean p if (i > 0) fieldTypeNames.append(", "); fieldTypeNames.append(fieldTypeList.get(i).typeName); } - log.debug("Added field types: {}", fieldTypeNames.toString()); + log.debug("Added field types: " + fieldTypeNames); } } else { // this is unlikely to happen as most errors are handled as exceptions in the persist code @@ -1067,7 +1067,7 @@ public ManagedIndexSchema replaceFieldType(String typeName, String replacementCl SchemaField replacementField = SchemaField.create(fieldName, replacementFieldType, oldField.getArgs()); replacementFields.add(replacementField); // Save the new field to be added after iteration is finished if (null != replacementField.getDefaultValue()) { - log.debug(replacementField.getName() + " contains default value: " + replacementField.getDefaultValue()); + log.debug("{} contains default value: {}", replacementField.getName(), replacementField.getDefaultValue()); newSchema.fieldsWithDefaultValue.add(replacementField); } if (replacementField.isRequired()) { diff --git a/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java b/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java index 2d16108d2004..f04be6445b87 100644 --- a/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java +++ b/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java @@ -139,7 +139,7 @@ public Set listAvailableCurrencies() { public boolean reload() throws SolrException { InputStream ratesJsonStream = null; try { - log.debug("Reloading exchange rates from "+ratesFileLocation); + log.debug("Reloading exchange rates from {}", ratesFileLocation); try { ratesJsonStream = (new URL(ratesFileLocation)).openStream(); } catch (Exception e) { @@ -172,7 +172,7 @@ public void init(Map params) throws SolrException { refreshInterval = 60; log.warn("Specified refreshInterval was too small. Setting to 60 minutes which is the update rate of openexchangerates.org"); } - log.debug("Initialized with rates="+ratesFileLocation+", refreshInterval="+refreshInterval+"."); + log.debug("Initialized with rates={}, refreshInterval={}.", ratesFileLocation, refreshInterval); refreshIntervalSeconds = refreshInterval * 60; } catch (SolrException e1) { throw e1; diff --git a/solr/core/src/java/org/apache/solr/schema/PointField.java b/solr/core/src/java/org/apache/solr/schema/PointField.java index cad3c7e9706e..1e380c6cacef 100644 --- a/solr/core/src/java/org/apache/solr/schema/PointField.java +++ b/solr/core/src/java/org/apache/solr/schema/PointField.java @@ -197,9 +197,7 @@ protected boolean isFieldUsed(SchemaField field) { boolean docValues = field.hasDocValues(); if (!indexed && !stored && !docValues) { - if (log.isTraceEnabled()) { - log.trace("Ignoring unindexed/unstored field: " + field); - } + log.trace("Ignoring unindexed/unstored field: {}", field); return false; } return true; diff --git a/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java b/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java index d2dc811d02ad..f6ee81130548 100644 --- a/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java +++ b/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java @@ -168,8 +168,7 @@ public String toFormattedString(Field f) throws IOException { */ public static org.apache.lucene.document.FieldType createFieldType(SchemaField field) { if (!field.indexed() && !field.stored()) { - if (LOG.isTraceEnabled()) - LOG.trace("Ignoring unindexed/unstored field: " + field); + LOG.trace("Ignoring unindexed/unstored field: {}", field); return null; } org.apache.lucene.document.FieldType newType = new org.apache.lucene.document.FieldType(); diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java index e7a33bdb64f3..a12bc02ccdc9 100644 --- a/solr/core/src/java/org/apache/solr/schema/TrieField.java +++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java @@ -561,8 +561,7 @@ public IndexableField createField(SchemaField field, Object value) { boolean docValues = field.hasDocValues(); if (!indexed && !stored && !docValues) { - if (log.isTraceEnabled()) - log.trace("Ignoring unindexed/unstored field: " + field); + log.trace("Ignoring unindexed/unstored field: {}", field); return null; } diff --git a/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java b/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java index e7194045095d..1d2c409db8c5 100644 --- a/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java +++ b/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java @@ -57,7 +57,7 @@ public ZkIndexSchemaReader(ManagedIndexSchemaFactory managedIndexSchemaFactory, public void preClose(SolrCore core) { CoreContainer cc = core.getCoreContainer(); if (cc.isZooKeeperAware()) { - log.debug("Removing ZkIndexSchemaReader OnReconnect listener as core "+core.getName()+" is shutting down."); + log.debug("Removing ZkIndexSchemaReader OnReconnect listener as core {} is shutting down.", core.getName()); ZkIndexSchemaReader.this.isRemoved = true; cc.getZkController().removeOnReconnectListener(ZkIndexSchemaReader.this); } diff --git a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java index ffcc99d0e319..99efb8d75305 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java @@ -136,7 +136,7 @@ public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, throws IOException { TermStats termStats = termStatsCache.get(term.toString()); if (termStats == null) { - LOG.debug("## Missing global termStats info: {}, using local", term.toString()); + LOG.debug("## Missing global termStats info: {}, using local", term); return localSearcher.localTermStatistics(term, context); } else { return termStats.toTermStatistics(); diff --git a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java index 2eb3fc0e35af..410fee18db00 100644 --- a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java +++ b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java @@ -38,7 +38,7 @@ public class LocalStatsCache extends StatsCache { @Override public StatsSource get(SolrQueryRequest req) { - LOG.debug("## GET {}", req.toString()); + LOG.debug("## GET {}", req); return new LocalStatsSource(); } @@ -49,31 +49,33 @@ public void init(PluginInfo info) { // by returning null we don't create additional round-trip request. @Override public ShardRequest retrieveStatsRequest(ResponseBuilder rb) { - LOG.debug("## RDR {}", rb.req.toString()); + LOG.debug("## RDR {}", rb.req); return null; } @Override public void mergeToGlobalStats(SolrQueryRequest req, List responses) { - LOG.debug("## MTGD {}", req.toString()); - for (ShardResponse r : responses) { - LOG.debug(" - {}", r); + if (LOG.isDebugEnabled()) { + LOG.debug("## MTGD " + req); + for (ShardResponse r : responses) { + LOG.debug(" - " + r); + } } } @Override public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) { - LOG.debug("## RLD {}", rb.req.toString()); + LOG.debug("## RLD {}", rb.req); } @Override public void receiveGlobalStats(SolrQueryRequest req) { - LOG.debug("## RGD {}", req.toString()); + LOG.debug("## RGD {}", req); } @Override public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) { - LOG.debug("## SGD {}", outgoing.toString()); + LOG.debug("## SGD {}", outgoing); } } diff --git a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java index 42d22ca8e835..992c50507af4 100644 --- a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java @@ -221,7 +221,7 @@ private void putParamOptional(Map params, String internalParamNa @Override public boolean doAuthenticate(ServletRequest req, ServletResponse rsp, FilterChain chain) throws Exception { - log.debug("Request to authenticate using kerberos: "+req); + log.debug("Request to authenticate using kerberos: {}", req); final HttpServletResponse frsp = (HttpServletResponse)rsp; diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java index 4f6bae0aead1..012ae8f82655 100644 --- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java +++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java @@ -335,7 +335,7 @@ protected void init() throws Exception { return; // we are done with a valid handler } } - log.debug("no handler or core retrieved for " + path + ", follow through..."); + log.debug("no handler or core retrieved for {}, follow through...", path); action = PASSTHROUGH; } @@ -493,7 +493,9 @@ public Action call() throws IOException { if (headers != null) { for (Map.Entry e : headers.entrySet()) response.setHeader(e.getKey(), e.getValue()); } - log.debug("USER_REQUIRED "+req.getHeader("Authorization")+" "+ req.getUserPrincipal()); + if (log.isDebugEnabled()) { + log.debug("USER_REQUIRED " + req.getHeader("Authorization") + " " + req.getUserPrincipal()); + } } if (!(authResponse.statusCode == HttpStatus.SC_ACCEPTED) && !(authResponse.statusCode == HttpStatus.SC_OK)) { log.info("USER_REQUIRED auth header {} context : {} ", req.getHeader("Authorization"), context); diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java index 24bcf3dc38a4..16ed2aefdf98 100644 --- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java +++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java @@ -133,7 +133,9 @@ public SolrDispatchFilter() { @Override public void init(FilterConfig config) throws ServletException { - log.trace("SolrDispatchFilter.init(): {}", this.getClass().getClassLoader()); + if (log.isTraceEnabled()) { + log.trace("SolrDispatchFilter.init(): " + this.getClass().getClassLoader()); + } SolrRequestParsers.fileCleaningTracker = new SolrFileCleaningTracker(); @@ -168,7 +170,7 @@ public void init(FilterConfig config) throws ServletException extraProperties); this.httpClient = cores.getUpdateShardHandler().getHttpClient(); setupJvmMetrics(); - log.debug("user.dir=" + System.getProperty("user.dir")); + log.debug("user.dir={}", System.getProperty("user.dir")); } catch( Throwable t ) { // catch this so our filter still works diff --git a/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java index 15fee72c3b2b..7b6c759d9066 100644 --- a/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java +++ b/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java @@ -174,7 +174,7 @@ public void build(SolrCore core, SolrIndexSearcher searcher) throws IOException @Override public SpellingResult getSuggestions(SpellingOptions options) throws IOException { - LOG.debug("getSuggestions: " + options.tokens); + LOG.debug("getSuggestions: {}", options.tokens); SpellingResult result = new SpellingResult(); float accuracy = (options.accuracy == Float.MIN_VALUE) ? checker.getAccuracy() : options.accuracy; diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java b/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java index 267d9ad196ec..d9f21dfbeef8 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java @@ -230,7 +230,7 @@ public File getStoreFile() { /** Returns suggestions based on the {@link SuggesterOptions} passed */ public SuggesterResult getSuggestions(SuggesterOptions options) throws IOException { - LOG.debug("getSuggestions: " + options.token); + LOG.debug("getSuggestions: {}", options.token); if (lookup == null) { LOG.info("Lookup is null - invoke suggest.build first"); return EMPTY_RESULT; diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java b/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java index d585fed72f38..b9cde3df0020 100644 --- a/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java +++ b/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java @@ -197,7 +197,7 @@ public void reload(SolrCore core, SolrIndexSearcher searcher) throws IOException @Override public SpellingResult getSuggestions(SpellingOptions options) throws IOException { - LOG.debug("getSuggestions: " + options.tokens); + LOG.debug("getSuggestions: {}", options.tokens); if (lookup == null) { LOG.info("Lookup is null - invoke spellchecker.build first"); return EMPTY_RESULT; diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java index 72d48aea9bdf..75825471e576 100644 --- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java +++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java @@ -248,7 +248,9 @@ public IndexInput clone() { @Override public void sync(Collection names) throws IOException { - LOG.debug("Sync called on {}", Arrays.toString(names.toArray())); + if (LOG.isDebugEnabled()) { + LOG.debug("Sync called on " + Arrays.toString(names.toArray())); + } } @Override diff --git a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java index bc2afa879c34..6b99b783016b 100644 --- a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java +++ b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java @@ -192,14 +192,14 @@ private void changeWriter(SolrCore core, boolean rollback, boolean openNewWriter if (iw != null) { if (!rollback) { try { - log.debug("Closing old IndexWriter... core=" + coreName); + log.debug("Closing old IndexWriter... core={}", coreName); iw.close(); } catch (Exception e) { SolrException.log(log, "Error closing old IndexWriter. core=" + coreName, e); } } else { try { - log.debug("Rollback old IndexWriter... core=" + coreName); + log.debug("Rollback old IndexWriter... core={}", coreName); iw.rollback(); } catch (Exception e) { SolrException.log(log, "Error rolling back old IndexWriter. core=" + coreName, e); diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java index dd179f22ade5..87e45afd6214 100644 --- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java +++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java @@ -809,7 +809,7 @@ public UpdateLog getUpdateLog() { @Override public void close() throws IOException { - log.debug("closing " + this); + log.debug("closing {}", this); commitTracker.close(); softCommitTracker.close(); diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java index d484e85b13e1..b14d19f8ee3b 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java +++ b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java @@ -114,7 +114,7 @@ private SolrIndexConfig(SolrConfig solrConfig) { public SolrIndexConfig(SolrConfig solrConfig, String prefix, SolrIndexConfig def) { if (prefix == null) { prefix = "indexConfig"; - log.debug("Defaulting to prefix \""+prefix+"\" for index configuration"); + log.debug("Defaulting to prefix \"{}\" for index configuration", prefix); } if (def == null) { diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java index 049d2286b759..62d78e4ae753 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java +++ b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java @@ -109,7 +109,7 @@ public SolrIndexWriter(String name, Directory d, IndexWriterConfig conf) throws this.infoStream = conf.getInfoStream(); this.directory = d; numOpens.incrementAndGet(); - log.debug("Opened Writer " + name); + log.debug("Opened Writer {}", name); // no metrics mergeTotals = false; mergeDetails = false; @@ -121,7 +121,7 @@ private SolrIndexWriter(SolrCore core, String name, String path, Directory direc setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND). setIndexDeletionPolicy(delPolicy).setCodec(codec) ); - log.debug("Opened Writer " + name); + log.debug("Opened Writer {}", name); this.name = name; infoStream = getConfig().getInfoStream(); this.directory = directory; @@ -275,7 +275,7 @@ protected void doAfterFlush() throws IOException { @Override public void close() throws IOException { - log.debug("Closing Writer " + name); + log.debug("Closing Writer {}", name); try { super.close(); } catch (Throwable t) { @@ -290,7 +290,7 @@ public void close() throws IOException { @Override public void rollback() throws IOException { - log.debug("Rollback Writer " + name); + log.debug("Rollback Writer {}", name); try { super.rollback(); } catch (Throwable t) { diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java index c50add4a45e3..63422960c4dc 100644 --- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java +++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java @@ -914,8 +914,8 @@ private synchronized List getEntryFromTLog(long lookupPointer, long lookupVersio // This can happen when trying to deserialize the entry at position lookupPointer, // but from a different tlog than the one containing the desired entry. // Just ignore the exception, so as to proceed to the next tlog. - log.debug("Exception reading the log (this is expected, don't worry)=" + lookupLog + ", for version=" + lookupVersion + - ". This can be ignored."); + log.debug("Exception reading the log (this is expected, don't worry)={}, for version={}. " + + "This can be ignored.", lookupLog, lookupVersion); } if (obj != null && obj instanceof List) { @@ -2031,7 +2031,7 @@ protected Long seedBucketsWithHighestVersion(SolrIndexSearcher newSearcher, Vers } log.debug("Took {}ms to seed version buckets with highest version {}", - timer.getTime(), String.valueOf(highestVersion)); + timer.getTime(), highestVersion); return highestVersion; } diff --git a/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java index 1d56a62d485e..b51ce1be3d67 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java @@ -84,7 +84,7 @@ protected final SolrInputField mutate(final SolrInputField srcField) { final Object destVal = mutateValue(srcVal); if (SKIP_FIELD_VALUE_LIST_SINGLETON == destVal) { log.debug("field '{}' {} value '{}' is not mutable, so no values will be mutated", - new Object[] { srcField.getName(), srcVal.getClass().getSimpleName(), srcVal }); + srcField.getName(), srcVal.getClass().getSimpleName(), srcVal); return srcField; } if (DELETE_VALUE_SINGLETON == destVal) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java index cb1b2fb68c6c..ea33a57c59fd 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java @@ -1182,7 +1182,9 @@ protected boolean versionAdd(AddUpdateCommand cmd) throws IOException { Long lastVersion = vinfo.lookupVersion(cmd.getIndexedId()); if (lastVersion != null && Math.abs(lastVersion) >= versionOnUpdate) { // This update is a repeat, or was reordered. We need to drop this update. - log.debug("Dropping add update due to version {}", idBytes.utf8ToString()); + if (log.isDebugEnabled()) { + log.debug("Dropping add update due to version " + idBytes.utf8ToString()); + } return true; } diff --git a/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java index a8b331c84299..c8ac8dcfa588 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java @@ -343,9 +343,7 @@ private boolean isVersionNewEnough(BytesRef indexedDocId, return true; } if (ignoreOldUpdates) { - if (log.isDebugEnabled()) { - log.debug("Dropping update since user version is not high enough: " + newUserVersion + "; old user version=" + oldUserVersion); - } + log.debug("Dropping update since user version is not high enough: {}; old user version={}", newUserVersion, oldUserVersion); // log.info("VERSION returning false (dropping update)" ); return false; } else { diff --git a/solr/core/src/java/org/apache/solr/update/processor/RegexpBoostProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/RegexpBoostProcessor.java index 3f41a940ebde..1c0d40dcac27 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/RegexpBoostProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/RegexpBoostProcessor.java @@ -140,7 +140,7 @@ private List initBoostEntries(InputStream is) throws IOException { String regexp = fields[0]; String boost = fields[1]; newBoostEntries.add(new BoostEntry(Pattern.compile(regexp), Double.parseDouble(boost))); - log.debug("Read regexp " + regexp + " with boost " + boost); + log.debug("Read regexp {} with boost {}", regexp, boost); } else { log.warn("Malformed config input line: " + line + " (expected 2 fields, got " + fields.length + " fields). Skipping entry."); continue; diff --git a/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java index 0844b6023fca..d9342ae724b3 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java @@ -125,7 +125,7 @@ public void processAdd(AddUpdateCommand command) throws IOException { if (canonicalUrlFieldname != null) { document.setField(canonicalUrlFieldname, getCanonicalUrl(normalizedURL)); } - log.debug(document.toString()); + log.debug("{}", document); } catch (MalformedURLException | URISyntaxException e) { log.warn("cannot get the normalized url for \"" + url + "\" due to " + e.getMessage()); } diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java index 0ed626cb0fc3..be7bd8a2f570 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java @@ -117,7 +117,7 @@ public void init(PluginInfo info) { (null != info.name ? info.name : "") + "\"" + (info.isDefault() ? " (default)" : ""); - log.debug("creating " + infomsg); + log.debug("creating {}", infomsg); // wrap in an ArrayList so we know we know we can do fast index lookups // and that add(int,Object) is supported @@ -154,7 +154,7 @@ public void init(PluginInfo info) { distrib.init(new NamedList()); list.add(runIndex, distrib); - log.debug("inserting DistributedUpdateProcessorFactory into " + infomsg); + log.debug("inserting DistributedUpdateProcessorFactory into {}", infomsg); } chain = list; @@ -237,7 +237,7 @@ public static UpdateRequestProcessorChain constructChain(UpdateRequestProcessorC //port-processor is tried to be inserted before RunUpdateProcessor insertBefore(urps, post, RunUpdateProcessorFactory.class, urps.size() - 1); UpdateRequestProcessorChain result = new UpdateRequestProcessorChain(urps, core); - if (log.isInfoEnabled()) { + if (log.isDebugEnabled()) { ArrayList names = new ArrayList<>(urps.size()); for (UpdateRequestProcessorFactory urp : urps) names.add(urp.getClass().getSimpleName()); log.debug("New dynamic chain constructed : " + StrUtils.join(names, '>')); diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java index 7be76af6ed15..a3f6e8143168 100644 --- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java +++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java @@ -182,7 +182,7 @@ public Option[] getOptions() { protected void runImpl(CommandLine cli) throws Exception { String zkHost = cli.getOptionValue("zkHost", ZK_HOST); - log.debug("Connecting to Solr cluster: " + zkHost); + log.debug("Connecting to Solr cluster: {}", zkHost); try (CloudSolrClient cloudSolrClient = new CloudSolrClient.Builder().withZkHost(zkHost).build()) { String collection = cli.getOptionValue("collection"); @@ -489,7 +489,7 @@ private static List> findToolClassesInPackage(String packageName) { } } catch (Exception e) { // safe to squelch this as it's just looking for tools to run - log.debug("Failed to find Tool impl classes in "+packageName+" due to: "+e); + log.debug("Failed to find Tool impl classes in {} due to: {}", packageName, e); } return toolClasses; } @@ -1137,7 +1137,7 @@ protected void runCloudTool(CloudSolrClient cloudSolrClient, CommandLine cli) th if (collection == null) throw new IllegalArgumentException("Must provide a collection to run a healthcheck against!"); - log.debug("Running healthcheck for "+collection); + log.debug("Running healthcheck for {}", collection); ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader(); diff --git a/solr/core/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java b/solr/core/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java index d0b8785fff3a..a5d05a2591d3 100644 --- a/solr/core/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java +++ b/solr/core/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java @@ -138,7 +138,7 @@ public T load( SolrResourceLoader loader, NodeList nodes ) { List info = new ArrayList<>(); T defaultPlugin = null; - + final boolean logDebug = log.isDebugEnabled(); if (nodes !=null ) { for (int i=0; i