diff --git a/etc/bankdefs/hipo4/dc.json b/etc/bankdefs/hipo4/dc.json index e8dd22f348..616528e41b 100644 --- a/etc/bankdefs/hipo4/dc.json +++ b/etc/bankdefs/hipo4/dc.json @@ -6,6 +6,7 @@ "info": "reconstructed hits using DC wire positions", "entries": [ {"name":"id", "type":"S", "info":"id of the hit"}, + {"name": "indexTDC", "type":"S", "info":"inex in the bank DC::TDC"}, {"name":"status", "type":"S", "info":"id of the hit"}, {"name":"sector", "type":"B", "info":"DC sector"}, {"name":"superlayer", "type":"B", "info":"DC superlayer (1...6)"}, diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/HitReader.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/HitReader.java index 6f45c09806..faa907a8cf 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/HitReader.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/HitReader.java @@ -44,7 +44,7 @@ public class HitReader { private int run = 0; private long tiTimeStamp = 0; private DataEvent event = null; - + private IndexedTable tt = null; private IndexedTable reverseTT = null; private IndexedTable dcrbjitters = null; @@ -54,7 +54,8 @@ public class HitReader { private IndexedTable docares = null; private IndexedTable time2dist = null; private IndexedTable t0s = null; - + + private int numTDCBankRows = -1; private List _DCHits; private List _HBHits; //hit-based tracking hit information private List _TBHits; //time-based tracking hit information @@ -263,6 +264,7 @@ private void fetch_DCHits(Clas12NoiseAnalysis noiseAnalysis, RawDataBank bankFiltered = new RawDataBank(bankNames.getTdcBank(), rawBankOrders); bankFiltered.read(event); + this.set_NumTDCBankRows(bankFiltered.rows()); for (int i = 0; i < bankFiltered.rows(); i++) { int sector = bankFiltered.getByte("sector", i); int layer = (bankFiltered.getByte("layer", i)-1)%6 + 1; @@ -339,6 +341,7 @@ private void fetch_DCHits(Clas12NoiseAnalysis noiseAnalysis, hit.calc_CellSize(detector); double posError = hit.get_CellSize() / Math.sqrt(12.); hit.set_DocaErr(posError); + hit.set_IndexTDC(index); this._DCHits.add(hit); } } @@ -624,6 +627,7 @@ private void read_NNHits() { tPars[2] = (double)bankAI.getFloat("phi", j); tPars[3] = (double)bankAI.getByte("id", j); + aimatch.clear(); for (int k = 0; k < 6; k++) { aimatch.put(Ids[k], tPars); } @@ -915,4 +919,20 @@ public List get_DCHits(int sectorSelect) { return list; } } + + /** + * + * @param num # of rows in DC::TDC bank + */ + public void set_NumTDCBankRows(int num){ + this.numTDCBankRows = num; + } + + /** + * + * @return # of rows in DC::TDC bank + */ + public int get_NumTDCBankRows(){ + return numTDCBankRows; + } } diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/RecoBankWriter.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/RecoBankWriter.java index 3be9e11d45..529be90740 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/RecoBankWriter.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/banks/RecoBankWriter.java @@ -36,21 +36,65 @@ public RecoBankWriter(Banks names) { public void updateListsWithClusterInfo(List fhits, List clusters) { - + ArrayList rmHits = new ArrayList(); + ArrayList addHits = new ArrayList(); for (int i = 0; i < clusters.size(); i++) { clusters.get(i).set_Id(i + 1); for (int j = 0; j < clusters.get(i).size(); j++) { clusters.get(i).get(j).set_AssociatedClusterID(clusters.get(i).get_Id()); + addHits.add(clusters.get(i).get(j)); for (int k = 0; k < fhits.size(); k++) { if (fhits.get(k).get_Id() == clusters.get(i).get(j).get_Id()) { - fhits.remove(k); - fhits.add(clusters.get(i).get(j)); - + rmHits.add(fhits.get(k)); + } } } } + + fhits.removeAll(rmHits); + fhits.addAll(addHits); + } + + public DataBank fillHitsBank(DataEvent event, List hitlist) { + String name = bankNames.getHitsBank(); + + int rejCnt = 0; + for (int i = 0; i < hitlist.size(); i++) { +// if (hitlist.get(i).get_Id() == -1 /*|| hitlist.get(i).get_Id()==0*/) { //PASS1 + if (hitlist.get(i).get_Id() == -1 || hitlist.get(i).get_Id()==0) { + rejCnt++; + } + } + DataBank bank = event.createBank(name, hitlist.size()-rejCnt); + rejCnt=0; + for (int i = 0; i < hitlist.size(); i++) { +// if (hitlist.get(i).get_Id() == -1 /*|| hitlist.get(i).get_Id()==0*/) { //PASS1 + if (hitlist.get(i).get_Id() == -1 || hitlist.get(i).get_Id()==0) { + rejCnt++; + continue; } + bank.setShort("id", i-rejCnt, (short) hitlist.get(i).get_Id()); + bank.setShort("indexTDC", i-rejCnt, (short) hitlist.get(i).get_IndexTDC()); + bank.setShort("status", i-rejCnt, (short) hitlist.get(i).get_QualityFac()); + bank.setByte("superlayer", i-rejCnt, (byte) hitlist.get(i).get_Superlayer()); + bank.setByte("layer", i-rejCnt, (byte) hitlist.get(i).get_Layer()); + bank.setByte("sector", i-rejCnt, (byte) hitlist.get(i).get_Sector()); + bank.setShort("wire", i-rejCnt, (short) hitlist.get(i).get_Wire()); + bank.setFloat("docaError", i-rejCnt, (float) hitlist.get(i).get_DocaErr()); + bank.setFloat("trkDoca", i-rejCnt, (float) hitlist.get(i).get_ClusFitDoca()); + bank.setFloat("LocX", i-rejCnt, (float) hitlist.get(i).get_lX()); + bank.setFloat("LocY", i-rejCnt, (float) hitlist.get(i).get_lY()); + bank.setFloat("X", i-rejCnt, (float) hitlist.get(i).get_X()); + bank.setFloat("Z", i-rejCnt, (float) hitlist.get(i).get_Z()); + bank.setByte("LR", i-rejCnt, (byte) hitlist.get(i).get_LeftRightAmb()); + bank.setShort("clusterID", i-rejCnt, (short) hitlist.get(i).get_AssociatedClusterID()); + bank.setInt("TDC",i-rejCnt,hitlist.get(i).get_TDC()); + bank.setByte("jitter",i, (byte) hitlist.get(i).getJitter()); + + } + + return bank; } @@ -829,6 +873,7 @@ public List createRawHitList(List hits) { hit.get_Layer(), hit.get_Wire(), hit.get_TDC(), hit.getJitter(), hit.get_Id()); fhit.set_Id(hit.get_Id()); + fhit.set_IndexTDC(hit.get_IndexTDC()); fhit.set_DocaErr(hit.get_DocaErr()); fhits.add(fhit); } diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/Cluster.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/Cluster.java index eb972c03ac..735fc111aa 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/Cluster.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/Cluster.java @@ -18,7 +18,9 @@ public class Cluster extends ArrayList { private int _Sector; // sector[1...6] private int _Superlayer; // superlayer [1,...6] private int _Id; // cluster Id - + private int _MinWire; + private int _MaxWire; + /** * * @param sector the sector (1...6) @@ -107,6 +109,38 @@ public int get_RegionSlayer() { return (this._Superlayer + 1) % 2 + 1; } + /** + * + * @return the min wire of the cluster (1...112) + */ + public int get_MinWire() { + return _MinWire; + } + + /** + * + * @param _MinWire min wire of the cluster (1...112) + */ + public void set_MinWire(int _MinWire) { + this._MinWire = _MinWire; + } + + /** + * + * @return the max wire of the cluster (1...112) + */ + public int get_MaxWire() { + return _MaxWire; + } + + /** + * + * @param _MaxWire max wire of the cluster (1...112) + */ + public void set_MaxWire(int _MaxWire) { + this._MaxWire = _MaxWire; + } + /** * * @return cluster info. about location and number of hits contained in it diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterCleanerUtilities.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterCleanerUtilities.java index e27e639ae0..b2b37c20f1 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterCleanerUtilities.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterCleanerUtilities.java @@ -137,7 +137,7 @@ public List ClusterSplitter(FittedCluster clus, int nextClsStartI // loop over accumulator array to find peaks (allows for more than one peak for multiple tracks) // The accumulator cell count must be at least half the total number of hits // Make binrMax, bintMax arrays to allow for more than one peak - int threshold = Constants.DC_MIN_NLAYERS; + int threshold = Constants.DC_MIN_NLAYERS-1; int nbPeaksR_Phi = 0; // 1st find the peaks in the R_Phi accumulator array @@ -145,7 +145,7 @@ public List ClusterSplitter(FittedCluster clus, int nextClsStartI for (int ibint1 = 0; ibint1 < N_t; ibint1++) { //find the peak - if (R_Phi_Accumul[ibinr1][ibint1] >= Constants.DC_MIN_NLAYERS) { + if (R_Phi_Accumul[ibinr1][ibint1] >= Constants.DC_MIN_NLAYERS-1) { if (R_Phi_Accumul[ibinr1][ibint1] > threshold) { threshold = R_Phi_Accumul[ibinr1][ibint1]; @@ -171,7 +171,7 @@ public List ClusterSplitter(FittedCluster clus, int nextClsStartI //remove all existing hits and add only the ones passing the criteria below //newClus.removeAll(clus); for (int i = 0; i < clus.size(); i++) { - double rho = clus.get(i).get_X(); + double rho = clus.get(i).get_lX(); double phi = clus.get(i).get_lY(); for (int j_t = 0; j_t < N_t; j_t++) { @@ -205,7 +205,8 @@ public List ClusterSplitter(FittedCluster clus, int nextClsStartI } } //require 4 layers to make a cluster - if (count_nlayers_in_cluster(contigArrayOfHits) < Constants.DC_MIN_NLAYERS) { + if ((!isExceptionalCluster(contigArrayOfHits) && count_nlayers_in_cluster(contigArrayOfHits) < Constants.DC_MIN_NLAYERS) + || (isExceptionalCluster(contigArrayOfHits) && count_nlayers_in_cluster(contigArrayOfHits) < Constants.DC_MIN_NLAYERS - 1)) { passCluster = false; } @@ -221,7 +222,7 @@ public List ClusterSplitter(FittedCluster clus, int nextClsStartI } } - // make new clusters + // make new clusters with application of OverlappingClusterResolver List selectedClusList = new ArrayList<>(); int newcid = nextClsStartIndex; @@ -239,19 +240,36 @@ public List ClusterSplitter(FittedCluster clus, int nextClsStartI } } } + + // Apply OverlappingClusterResolver again + List selectedClusList2 = new ArrayList<>(); + for (FittedCluster cluster : selectedClusList) { + cluster.set_Id(newcid++); + cf.SetFitArray(cluster, "LC"); + cf.Fit(cluster, true); + + FittedCluster bestCls = OverlappingClusterResolver(cluster, selectedClusList); + + if (bestCls != null) { + + if (!(selectedClusList2.contains(bestCls))) { + selectedClusList2.add(bestCls); + } + } + } int splitclusId = 1; - if (!selectedClusList.isEmpty()) { - for (FittedCluster cl : selectedClusList) { + if (!selectedClusList2.isEmpty()) { + for (FittedCluster cl : selectedClusList2) { cl.set_Id(clus.get_Id() * 1000 + splitclusId); splitclusId++; } } - if (selectedClusList.isEmpty()) { - selectedClusList.add(clus); // if the splitting fails, then return the original cluster + if (selectedClusList2.isEmpty()) { + selectedClusList2.add(clus); // if the splitting fails, then return the original cluster } - return selectedClusList; + return selectedClusList2; } public List> byLayerListSorter(List DCHits, int sector, int superlyr) { @@ -703,9 +721,9 @@ public FittedCluster SecondariesRemover(DataEvent event, FittedCluster clus, Clu * @return the selected cluster */ public FittedCluster OverlappingClusterResolver(FittedCluster thisclus, List clusters) { - + // Get list for overlapped clusters List overlapingClusters = new ArrayList<>(); - + for (FittedCluster cls : clusters) { List hitOvrl = new ArrayList<>(); @@ -721,17 +739,18 @@ public FittedCluster OverlappingClusterResolver(FittedCluster thisclus, List 0.2) { - passCls = false; - } + + //if (Math.abs(ovr.get_clusterLineFitSlope() - cls.get_clusterLineFitSlope()) > 0.2) { + // passCls = false; + //} } - if (hitOvrl.size() < 3) { + if((!isExceptionalFittedCluster(cls) && !isExceptionalFittedCluster(thisclus) && hitOvrl.size() < 3) + || ((isExceptionalFittedCluster(cls) || isExceptionalFittedCluster(thisclus)) && hitOvrl.size() < 2)) { passCls = false; } @@ -740,8 +759,20 @@ public FittedCluster OverlappingClusterResolver(FittedCluster thisclus, List 1){ + List rmClusters = new ArrayList<>(); + for(FittedCluster overlapingCls : overlapingClusters){ + if(overlapingCls.get_Superlayer() <=4 && Math.abs(overlapingCls.get_clusterLineFitSlope()) > 0.578) //tan(30 deg) + rmClusters.add(overlapingCls); + } + if(overlapingClusters.size() > rmClusters.size()) overlapingClusters.removeAll(rmClusters); + } + + Collections.sort(overlapingClusters); // Order overlapping clusters; 1st priortiy: cluster size; 2nd priority if same cluster size : fitting quality + // return the largest cluster. return overlapingClusters.get(0); @@ -978,5 +1009,76 @@ public FittedCluster ClusterCleaner(FittedCluster clus, ClusterFitter cf, DCGean // LOGGER.log(Level.INFO, h.printInfo()); return BestCluster; } + + /** + * Check if one or more layers are skipped in the cluster + * @param hitsInClus the hits in a cluster (can be either Hit or FittedHit) + * @param nlayr the number of layers + * @return true if one or more layers are skipped in the cluster + */ + private boolean isExceptionalClusterHelper(List hitsInClus, int nlayr) { + // Initialize array to count hits in each layer + int[] nlayers = new int[nlayr]; + + // Count hits for each layer in a single pass through the hits + for (Hit hit : hitsInClus) { + int layer = hit.get_Layer() - 1; // layer numbering starts from 1 + if (layer >= 0 && layer < nlayr) { + nlayers[layer]++; + } + } + + // Check if the first or last two layerers are missed + if ((nlayers[0] == 0 && nlayers[1] == 0) || (nlayers[4] == 0 && nlayers[5] == 0)) { + return true; + } + + // Check if there is one or more skipped layers in the middle + for (int l = 0; l < 4; l++) { + if (nlayers[l] > 0 && nlayers[l + 1] == 0) { + return true; + } + } + + return false; + } + + /** + * Wrapper for checking if a cluster of Hit objects is exceptional. + */ + public boolean isExceptionalCluster(List hitsInClus) { + return isExceptionalClusterHelper(hitsInClus, 6); // 6 layers for Hit objects + } + + /** + * Wrapper for checking if a cluster of FittedHit objects is exceptional. + */ + public boolean isExceptionalFittedCluster(List hitsInClus) { + return isExceptionalClusterHelper(hitsInClus, 6); // 6 layers for FittedHit objects + } + + public Cluster ClusterSticher(Cluster thisclus, Cluster nextclus, int cid){ + ClusterFitter cf = new ClusterFitter(); + + // Two clusters must be in the same sector and the same superlayer + if((thisclus.get_Sector() != nextclus.get_Sector()) || (thisclus.get_Superlayer() != nextclus.get_Superlayer())) return null; + + // Dont take stiching if number of layers in two clusters less than 3 + if((count_nlayers_in_cluster(thisclus) + count_nlayers_in_cluster(nextclus)) < 3) return null; + + // Only allow one-wire skipped + if((nextclus.get_MinWire() - thisclus.get_MaxWire() != 2)) return null; + + Cluster stichedcluster = new Cluster(thisclus.get_Sector(), thisclus.get_Superlayer(), cid); + stichedcluster.addAll(thisclus); + stichedcluster.addAll(nextclus); + + if ((!isExceptionalCluster(stichedcluster) && count_nlayers_in_cluster(stichedcluster) < Constants.DC_MIN_NLAYERS) + || (isExceptionalCluster(stichedcluster) && count_nlayers_in_cluster(stichedcluster) < Constants.DC_MIN_NLAYERS - 1)) + return null; + + return stichedcluster; + + } } diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterFinder.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterFinder.java index d6df1944f7..d9b8eb2ab8 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterFinder.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/cluster/ClusterFinder.java @@ -6,6 +6,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.logging.Level; import java.util.logging.Logger; import org.jlab.detector.geant4.v2.DCGeant4Factory; import org.jlab.io.base.DataEvent; @@ -109,7 +110,7 @@ public List findClumps(List allhits, ClusterCleanerUtilities ct) { Collections.sort(allhits); List clumps = new ArrayList<>(); - + // looping over each superlayer in each sector // each superlayer is treated independently int cid = 1; // cluster id, will increment with each new good cluster @@ -143,15 +144,15 @@ public List findClumps(List allhits, ClusterCleanerUtilities ct) { wi++; } - - // Need at least MIN_NLAYERS - if (ct.count_nlayers_in_cluster(hits) >= Constants.DC_MIN_NLAYERS) { - - // cluster constructor DCCluster(hit.sector,hit.superlayer, cid) + + // Need at least DC_MIN_NLAYERS layers if no skipped layer or at meast DC_MIN_NLAYERS - 1 layers if with skipped layer + if ((!ct.isExceptionalCluster(hits) && ct.count_nlayers_in_cluster(hits) >= Constants.DC_MIN_NLAYERS) + || (ct.isExceptionalCluster(hits) && ct.count_nlayers_in_cluster(hits) >= Constants.DC_MIN_NLAYERS - 1)) { + // cluster constructor DCCluster(hit.sector,hit.superlayer, cid) Cluster this_cluster = new Cluster((int) (ssl / nsect) + 1, (int) (ssl % nsect) + 1, cid++); //LOGGER.log(Level.FINER, " created cluster "+this_cluster.printInfo()); this_cluster.addAll(hits); - + clumps.add(this_cluster); } @@ -161,7 +162,7 @@ public List findClumps(List allhits, ClusterCleanerUtilities ct) { wi++; } - } + } return clumps; } @@ -170,18 +171,18 @@ public List findClumps(List allhits, ClusterCleanerUtilities ct) { * @param ct * @param cf * @param DcDetector + * @param numTDCBankRows * @return clusters of hits. Hit-based tracking linear fits to the wires are * done to determine the clusters. The result is a fitted cluster */ - public List FindHitBasedClusters(List allhits, ClusterCleanerUtilities ct, ClusterFitter cf, DCGeant4Factory DcDetector) { + public List FindHitBasedClusters(List allhits, ClusterCleanerUtilities ct, ClusterFitter cf, DCGeant4Factory DcDetector, int numTDCBankRows) { //fill array of hit this.fillHitArray(allhits, 0); - //prune noise - //ct.HitListPruner(allhits, HitArray); + //find clumps of hits init List clusters = this.findClumps(allhits, ct); - + allhits.clear(); for (Cluster clus : clusters) { @@ -192,32 +193,23 @@ public List FindHitBasedClusters(List allhits, ClusterCleane this.fillHitArray(allhits, 0); clusters.clear(); clusters = this.findClumps(allhits, ct); - + // create cluster list to be fitted List selectedClusList = new ArrayList<>(); - for (Cluster clus : clusters) { - if(clus.size() fittedClusList = new ArrayList<>(); List refittedClusList = new ArrayList<>(); @@ -226,15 +218,13 @@ public List FindHitBasedClusters(List allhits, ClusterCleane cf.SetFitArray(clus, "LC"); cf.Fit(clus, true); - if(clus.get_fitProb() Constants.HITBASEDTRKGMINFITHI2PROB ){ - // || - // (clus.size() < Constants.HITBASEDTRKGNONSPLITTABLECLSSIZE && clus.get_fitProb()!=0) ){ + if (clus.get_fitProb() > Constants.HITBASEDTRKGMINFITHI2PROB ){ fittedClusList.add(clus); //if the chi2 prob is good enough, then just add the cluster, or if the cluster is not split-able because it has too few hits } else { @@ -243,9 +233,10 @@ public List FindHitBasedClusters(List allhits, ClusterCleane } } - ArrayList rmHits = new ArrayList(); + int idSharedHits = numTDCBankRows + 10000; for (FittedCluster clus : fittedClusList) { - if (clus != null && clus.size() > 3 && clus.get_fitProb()>Constants.HITBASEDTRKGMINFITHI2PROB) { + if (clus != null && ((!ct.isExceptionalFittedCluster(clus) && clus.size() >= Constants.DC_MIN_NLAYERS) || + (ct.isExceptionalFittedCluster(clus) && clus.size() >= Constants.DC_MIN_NLAYERS-1)) && clus.get_fitProb()>Constants.HITBASEDTRKGMINFITHI2PROB) { // update the hits for (FittedHit fhit : clus) { @@ -260,13 +251,30 @@ public List FindHitBasedClusters(List allhits, ClusterCleane clus = ct.ClusterCleaner(clus, cf, DcDetector); // update the hits + ArrayList rmHits = new ArrayList(); + ArrayList addHits = new ArrayList(); for (FittedHit fhit : clus) { - fhit.set_AssociatedClusterID(clus.get_Id()); + if(fhit.get_AssociatedClusterID() <= 0) + fhit.set_AssociatedClusterID(clus.get_Id()); + else{ + try{ + FittedHit newHit = fhit.clone(); + newHit.set_IndexTDC(fhit.get_IndexTDC()); + newHit.set_Id(idSharedHits++); + newHit.set_AssociatedClusterID(clus.get_Id()); + rmHits.add(fhit); + addHits.add(newHit); + + } catch (CloneNotSupportedException ex) { + Logger.getLogger(FittedHit.class.getName()).log(Level.SEVERE, null, ex); + } + } } + clus.removeAll(rmHits); + clus.addAll(addHits); cf.SetFitArray(clus, "TSC"); cf.Fit(clus, false); cf.SetSegmentLineParameters(clus.get(0).get_Z(), clus); - if (clus != null ) { refittedClusList.add(clus); } @@ -274,18 +282,17 @@ public List FindHitBasedClusters(List allhits, ClusterCleane } } - - //LOGGER.log(Level.FINER, " Clusters Step 4"); - //for(FittedCluster c : refittedClusList) - // for(FittedHit h : c) - // LOGGER.log(Level.FINER, h.printInfo()); + return refittedClusList; } - public List RecomposeClusters(Map> grpHits, + public List RecomposeClusters(Map> grpHits, DCGeant4Factory dcDetector, ClusterFitter cf) { cf.reset(); + + ClusterCleanerUtilities ct = new ClusterCleanerUtilities(); + List clusters = new ArrayList<>(); // using iterators @@ -294,7 +301,8 @@ public List RecomposeClusters(Map> while(itr.hasNext()) { Map.Entry> entry = itr.next(); - if(entry.getValue().size()>3) { + if((!ct.isExceptionalFittedCluster(entry.getValue()) && entry.getValue().size() >= Constants.DC_MIN_NLAYERS) + || (ct.isExceptionalFittedCluster(entry.getValue()) && entry.getValue().size() >= Constants.DC_MIN_NLAYERS-1)) { Cluster cluster = new Cluster(entry.getValue().get(0).get_Sector(), entry.getValue().get(0).get_Superlayer(), entry.getValue().get(0).get_AssociatedClusterID()); FittedCluster fcluster = new FittedCluster(cluster); @@ -328,7 +336,7 @@ public List RecomposeClusters(Map> return clusters; } - private List RecomposeTrackClusters(DataEvent event, List fhits, IndexedTable tab, DCGeant4Factory DcDetector, TimeToDistanceEstimator tde) { + private List RecomposeTrackClusters(DataEvent event, List fhits, ClusterCleanerUtilities ct, IndexedTable tab, DCGeant4Factory DcDetector, TimeToDistanceEstimator tde) { Map> grpHits = new HashMap<>(); List clusters = new ArrayList<>(); @@ -354,8 +362,9 @@ private List RecomposeTrackClusters(DataEvent event, List> entry = itr.next(); - - if(entry.getValue().size()>3) { + + if((!ct.isExceptionalFittedCluster(entry.getValue()) && entry.getValue().size() >= Constants.DC_MIN_NLAYERS) + || (ct.isExceptionalFittedCluster(entry.getValue()) && entry.getValue().size() >= Constants.DC_MIN_NLAYERS-1)){ Cluster cluster = new Cluster(entry.getValue().get(0).get_Sector(), entry.getValue().get(0).get_Superlayer(), entry.getValue().get(0).get_AssociatedClusterID()); FittedCluster fcluster = new FittedCluster(cluster); @@ -389,7 +398,7 @@ public List FindTimeBasedClusters(DataEvent event, List clusters = new ArrayList<>(); - List rclusters = RecomposeTrackClusters(event, fhits, tab, DcDetector, tde); + List rclusters = RecomposeTrackClusters(event, fhits, ct, tab, DcDetector, tde); //LOGGER.log(Level.FINER, " Clusters TimeBased Step 1"); // for(FittedCluster c : rclusters) // for(FittedHit h : c) @@ -616,5 +625,5 @@ public EvioDataBank getLayerEfficiencies(List fclusters, List o.size()) { + if (this.size() < o.size()) { return 1; - } else { - return 0; + } + else if(this.size() == o.size()){ + if(this.get_fitProb() < o.get_fitProb()) return 1; + else if(this.get_fitProb() == o.get_fitProb()) return 0; + else return -1; + } + else { + return -1; } } diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/hit/FittedHit.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/hit/FittedHit.java index 741a0ed905..36e802de15 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/hit/FittedHit.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/hit/FittedHit.java @@ -1112,6 +1112,7 @@ public FittedHit clone() throws CloneNotSupportedException { hitClone.set_CellSize(this.get_CellSize()); hitClone.set_AssociatedClusterID(this.get_AssociatedClusterID()); hitClone.set_AssociatedHBTrackID(this.get_AssociatedHBTrackID()); + hitClone.set_IndexTDC(this.get_IndexTDC()); hitClone.betaFlag = this.betaFlag; return hitClone; diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/hit/Hit.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/hit/Hit.java index 7b2c9dc74f..e308353c76 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/hit/Hit.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/hit/Hit.java @@ -21,6 +21,7 @@ public class Hit implements Comparable { private int _Id; private double _cellSize; private double _DocaErr; + private int _indexTDC = -1; // class implements Comparable interface to allow for sorting a collection of hits by wire number values public int NNTrkId; public int NNClusId; @@ -49,6 +50,22 @@ public Hit(int sector, int superlayer, int layer, int wire, int TDC, int jitter, } + + /** + * + * @return index in DC::TDC bank + */ + public int get_IndexTDC(){ + return _indexTDC; + } + + /** + * + * @param index in DC::TDC bank + */ + public void set_IndexTDC(int index){ + this._indexTDC = index; + } /** * diff --git a/reconstruction/dc/src/main/java/org/jlab/rec/dc/nn/PatternRec.java b/reconstruction/dc/src/main/java/org/jlab/rec/dc/nn/PatternRec.java index 56c8dd2700..2edc349e04 100644 --- a/reconstruction/dc/src/main/java/org/jlab/rec/dc/nn/PatternRec.java +++ b/reconstruction/dc/src/main/java/org/jlab/rec/dc/nn/PatternRec.java @@ -9,6 +9,7 @@ import java.util.logging.Level; import java.util.logging.Logger; import org.jlab.detector.geant4.v2.DCGeant4Factory; +import org.jlab.rec.dc.Constants; import org.jlab.rec.dc.cluster.Cluster; import org.jlab.rec.dc.cluster.ClusterCleanerUtilities; import org.jlab.rec.dc.cluster.ClusterFinder; @@ -139,15 +140,28 @@ public List RecomposeSegments(List fhits, while(itr.hasNext()) { Map.Entry> entry = itr.next(); - if(entry.getValue().size()>=20) {// 4 layers per superlayer, 5 out of six superlayer tracking - // find clusters - //fill array of hit - clf.fillHitArray(entry.getValue(), 0); //find clumps of hits init - List clusters = clf.findClumps(entry.getValue(), ct); + if(entry.getValue().size()>=15) {// 3 or 4 layers per superlayer, 5 out of six superlayer tracking + // Construct clusters + Map clusterMap = new HashMap<>(); + for(Hit hit : entry.getValue()){ + int index = hit.NNClusId; + if(clusterMap.get(index)==null) { // if the list not yet created make it + clusterMap.put(index, new Cluster(hit.get_Sector(), hit.get_Superlayer(), index)); + clusterMap.get(index).add(hit); // append hit + } else { + clusterMap.get(index).add(hit); // append hit + } + } + List clusters = new ArrayList(); + for(Cluster clus : clusterMap.values()){ + Collections.sort(clus); + clusters.add(clus); + } for (Cluster clus : clusters) { FittedCluster fclus = new FittedCluster(clus); clus.set_Id(clus.get(0).NNClusId); - if (clus != null && clus.size() >= 4 ) { //4 layers per superlayer + if (clus != null && ((!ct.isExceptionalCluster(clus) && clus.size() >= Constants.DC_MIN_NLAYERS) || + (ct.isExceptionalCluster(clus) && clus.size() >= Constants.DC_MIN_NLAYERS-1)) ) { // 3 or 4 layers per superlayer fclus.set_Id(clus.get(0).NNClusId); // update the hits for (FittedHit fhit : fclus) { diff --git a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBClustering.java b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBClustering.java index c527befa37..978ae61e2f 100644 --- a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBClustering.java +++ b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBClustering.java @@ -79,19 +79,16 @@ public boolean processDataEvent(DataEvent event) { List clusters = clusFinder.FindHitBasedClusters(hits, ct, cf, - Constants.getInstance().dcDetector); + Constants.getInstance().dcDetector, hitRead.get_NumTDCBankRows()); if (clusters.isEmpty()) { return true; } else { List fhits = rbc.createRawHitList(hits); /* 13 */ rbc.updateListsWithClusterInfo(fhits, clusters); - rbc.fillAllHBBanks(event, - fhits, - clusters, - null, - null, - null); + event.appendBanks(rbc.fillHitsBank(event, fhits), + rbc.fillHBClustersBank(event, clusters) + ); } return true; diff --git a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBEngine.java b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBEngine.java index 868608395b..eae796665e 100644 --- a/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBEngine.java +++ b/reconstruction/dc/src/main/java/org/jlab/service/dc/DCHBEngine.java @@ -104,7 +104,7 @@ public boolean processDataEvent(DataEvent event) { List clusters = clusFinder.FindHitBasedClusters(hits, ct, cf, - Constants.getInstance().dcDetector); + Constants.getInstance().dcDetector, hitRead.get_NumTDCBankRows()); if (clusters.isEmpty()) { return true; }