diff --git a/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java b/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java index 461b228464..2761babf71 100644 --- a/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java +++ b/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java @@ -65,7 +65,8 @@ public abstract class DigitizationWithPulserDataMergingReadoutDriver500 MeV + /** * Specifies the name of the subdetector geometry object. */ @@ -168,7 +169,9 @@ public abstract class DigitizationWithPulserDataMergingReadoutDriver debugCellIDWithHits=new ArrayList(); // ============================================================== // ==== Driver Parameters ======================================= @@ -394,16 +397,23 @@ public void process(EventHeader event) { // Get current raw hits in pulser data. Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, PulserDataCollectionName, RawTrackerHit.class); - + if(debug)System.out.println("DigiReadout:: "+truthHitCollectionName +" local time = "+ReadoutDataManager.getCurrentTime()+" number of hits = "+hits.size()); + // Once an overlaid event is input, reset adcBufferMap to ensure that other overlaid events do not affect the current event. if(hits.size()!=0 || rawHits.size()!=0) { - // Get the set of all possible channel IDs. + // Get the set of all possible channel IDs. Set cells = getChannelIDs(); - + if(debug)System.out.println(this.getClass().getName()+":: resetting adc buffers at time = "+ReadoutDataManager.getCurrentTime()); // Reset adcBufferMap. for(Long cellID : cells) adcBufferMap.get(cellID).setAll((int) Math.round(getPedestalConditions(cellID))); - } + debugCellIDWithHits.clear(); + //if we are in no-spacing mode, just clear everything + if(doNoSpacing){ + resetBuffers(); + channelIntegrationSumMap.clear(); + } + } /* To merge MC data with pulser data, three different cases are handled separately. * Case 1: If pulser data does not have a channel in MC data, directly buffer samples @@ -432,9 +442,11 @@ public void process(EventHeader event) { // The hash map is used to check if MC data has a channel that is also in pulser data. Map hitCellIDMap = new HashMap(hits.size()); for(SimCalorimeterHit hit : hits) { - // Store the truth data. + // Store the truth data. Long hitCellID = hit.getCellID(); // For Ecal, cell ID is geometry ID; For hodo, cell ID is channel ID after hodoscope preprocessing - + if(debug) + System.out.println(this.getClass().getName()+":: process:: sim hit energy = "+hit.getRawEnergy()+" on cell = "+hitCellID); + ObjectRingBuffer hitBuffer = truthBufferMap.get(hitCellID); hitBuffer.addToCell(0, hit); @@ -499,7 +511,18 @@ public void process(EventHeader event) { // Get the truth hit energy deposition. double energyAmplitude = hit.getRawEnergy(); - + if(energyAmplitude>debugEnergyThresh && debug){ + System.out.println(this.getClass().getName()+":: process:: Putting sim hits in adcBuffer cellID = "+hitCellID); + System.out.println(this.getClass().getName()+":: process:: adding hits to adcBuffer cellID = "+hitCellID); + + System.out.println(this.getClass().getName()+":: process:: ReadoutDataManager Time = "+ReadoutDataManager.getCurrentTime()); + System.out.println(this.getClass().getName()+":: process:: hit time = "+hit.getTime()); + System.out.println(this.getClass().getName()+":: process:: readouttime() = "+readoutTime()); + + System.out.println(this.getClass().getName()+":: process:: truth energy = "+energyAmplitude); + debugCellIDWithHits.add(hitCellID); + } + if(hitCellIDMap.get(hitCellID) == 1) { // If noise should be added, calculate a random value for // the noise and add it to the truth energy deposition. @@ -531,14 +554,20 @@ public void process(EventHeader event) { double sigma = getNoiseConditions(hitCellID); currentValue += RandomGaussian.getGaussian(0, sigma); } - + // An ADC value is not allowed to exceed 4095. If a // larger value is observed, 4096 (overflow) is given // instead. (This corresponds to >2 Volts.) int digitizedValue = Math.min((int) Math.round(pedestal + currentValue), (int) Math.pow(2, nBit)); - + if(energyAmplitude>debugEnergyThresh&&debug) + System.out.println(this.getClass().getName()+":: process: writing digitized value for sample = "+i + +" post-noise current value = "+currentValue + +"; digitized value = "+digitizedValue); + // Write this value to the ADC buffer. adcBuffer.setValue(i, digitizedValue); + // + } } @@ -602,12 +631,24 @@ public void process(EventHeader event) { // contain any newly integrated hits and perform integration. List newHits = null; List newTruthRelations = null; - while(ReadoutDataManager.getCurrentTime() - readoutTime() + ReadoutDataManager.getBeamBunchSize() >= READOUT_PERIOD) { - if(newHits == null) { newHits = new ArrayList(); } - if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } - readHits(newHits, newTruthRelations); - readoutCounter++; - } + + if(doNoSpacing){ + if(newHits == null) { newHits = new ArrayList(); } + if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } + readoutCounter=0; + for(int i = 0; i < pulserDataWindow; i++){ + // System.out.println(this.getClass().getName()+":: looping over pulse data window readoutCounter = "+readoutCounter); + readHits(newHits, newTruthRelations); + readoutCounter++; + } + }else{ + while(ReadoutDataManager.getCurrentTime() - readoutTime() + ReadoutDataManager.getBeamBunchSize() >= READOUT_PERIOD) { + if(newHits == null) { newHits = new ArrayList(); } + if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } + readHits(newHits, newTruthRelations); + readoutCounter++; + } + } } // TODO: Document this. @@ -624,11 +665,17 @@ private void readHits(List newHits, List newTruth // Store the pedestal subtracted value so that it may // be checked against the integration threshold. int pedestalSubtractedValue = adcBuffer.getValue() - pedestal; - + if(pedestalSubtractedValue > integrationThreshold && debug){ + System.out.println(this.getClass().getName()+":: readHits:: Looping over adcBufferMap cellID = "+cellID); + System.out.println(this.getClass().getName()+":: readHits:: ped subtracted ADC counts = "+pedestalSubtractedValue); + } + // Get the total ADC value that has been integrated // on this channel. Integer sum = channelIntegrationSumMap.get(cellID); - + if(pedestalSubtractedValue >integrationThreshold && debug) + System.out.println(this.getClass().getName()+":: readHits:: sum = "+sum); + // If any readout hits exist on this channel, add the // current ADC values to them. @@ -641,7 +688,7 @@ private void readHits(List newHits, List newTruth // events (4 ns). This will indicate when the // integration started and, in turn, should end. channelIntegrationTimeMap.put(cellID, readoutCounter); - + if(debug)System.out.println(this.getClass().getName()+":: readHits:: Found a hit above threshold = "+cellID); // Integrate the ADC values for a number of // samples defined by NSB and threshold // crossing sample. @@ -649,7 +696,7 @@ private void readHits(List newHits, List newTruth for(int i = 0; i <= numSamplesBefore; i++) { sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); } - + if(debug)System.out.println(this.getClass().getName()+":: readHits:: sum before this sample = "+sumBefore); // This will represent the total integral sum at // the current point in time. Store it in the sum // buffer so that it may be incremented later as @@ -680,13 +727,16 @@ private void readHits(List newHits, List newTruth // If the integration sum is defined, then pulse // integration is ongoing. if(sum != null) { - // Three cases are treated separataly + if(debug)System.out.println(this.getClass().getName()+":: readHits:: integration is ongoing..."+cellID+" count = "+readoutCounter); + // Three cases are treated separataly // Case 1: CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter // Case 2: CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter // Case 3: CHANNEL_INTEGRATION_DEADTIME < numSamplesAfter if(CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter) { // Case 1 //Continue integration until NSA, the threshold-crossing sample has been added before. - if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 1: channel deadtime > numSamplesAfter "+cellID+" count = "+readoutCounter); + if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 1: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -703,7 +753,8 @@ private void readHits(List newHits, List newTruth // to data manager. else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 1: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns @@ -727,8 +778,10 @@ else if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - } // Case 1 ends else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 // Continue integration until NSA, the threshold-crossing sample has been added before. + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 2: channel deadtime == numSamplesAfter "+cellID+" count = "+readoutCounter); if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case2: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); // Add the new ADC sample. channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); @@ -743,6 +796,7 @@ else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 // to data manager. else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager // Add a new calorimeter hit. + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 2: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); @@ -761,8 +815,10 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC } } // Case 2 ends else { // Case 3 + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 3: channel deadtime < numSamplesAfter "+cellID+" count = "+readoutCounter); if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1 >= readoutCounter) { - // Continue integration until CHANNEL_INTEGRATION_DEADTIME + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: integration + DEADTIME - 1>= readoutCounter "+cellID+" count = "+readoutCounter); + // Continue integration until CHANNEL_INTEGRATION_DEADTIME channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -779,9 +835,12 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC flagStartNewIntegration.put(cellID, true); } else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); if(flagStartNewIntegration.get(cellID) == true) { // Flag for previous sample is true + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: flagStartNewIntegration = true "+cellID+" count = "+readoutCounter); if(pedestalSubtractedValue <= integrationThreshold) { // If sample is less than threshold, then do not start new integration channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: too small...don't start new integration "+cellID+" count = "+readoutCounter); // Add the new ADC sample. channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); @@ -794,6 +853,7 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } else { // if sample is larger than threshold, a hit is added into data manager and start new integration // Add a new calorimeter hit. + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 3: new hit starting, storing old hit; adding new hit "+cellID+" count = "+readoutCounter); RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); @@ -850,6 +910,7 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } } else { // Flag for previous sample is false + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: flagStartNewIntegration = false "+cellID+" count = "+readoutCounter); channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -865,8 +926,9 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } } else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//If reach NSA + 1, hit is added into data manager, and flag is set as false - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 3: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); + // Add a new calorimeter hit. + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; @@ -905,9 +967,21 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC // Write the trigger path output data to the readout data // manager. Truth data is optional. - - + + //if running no-spacing, set the time to current time+readout + //I'm just replacing integration time here to make it easier + //note...I have no idea how using integration time works + //in the "spacing" readout. It's in local units, but the lookup + //in GTPClusters is in global??? I'm missing something + + if(doNoSpacing) + integrationTime=readoutTime()+readoutCounter * READOUT_PERIOD; + + if(debug && newHits.size()>0) + System.out.println("DigiReadout:: "+ outputHitCollectionName+" time = "+integrationTime+" adding trigger hits = "+newHits.size()); ReadoutDataManager.addData(outputHitCollectionName, integrationTime, newHits, RawCalorimeterHit.class); + if(doNoSpacing) + newHits.clear(); if(writeTriggerTruth) { ReadoutDataManager.addData(triggerTruthRelationsCollectionName, integrationTime, newTruthRelations, LCRelation.class); } @@ -1046,7 +1120,8 @@ protected Collection> getOnTriggerData(double triggerTime) } else { collectionsList = new ArrayList>(2); } - + if(debug) + System.out.println(this.getClass().getName()+":: got a trigger at time = "+triggerTime); // Readout drivers need to produce readout timestamps to // specify when they occurred in terms of simulation time. // The readout timestamp for the subdetector data should be @@ -1080,6 +1155,9 @@ protected Collection> getOnTriggerData(double triggerTime) List readoutHits = null; if(mode == 1) { readoutHits = getMode1Hits(triggerTime); } else { readoutHits = getMode3Hits(triggerTime); } + if(debug) + System.out.println(this.getClass().getName()+":: number of readoutHits = "+readoutHits.size()); + TriggeredLCIOData readoutData = new TriggeredLCIOData(mode13HitCollectionParams); readoutData.getData().addAll(readoutHits); collectionsList.add(readoutData); @@ -1159,7 +1237,10 @@ protected double getReadoutWindowBefore() { @Override protected double getTimeDisplacement() { - return localTimeOffset; + if(doNoSpacing) + return 0; + else + return localTimeOffset; } @Override @@ -1272,7 +1353,8 @@ private List getMode1Hits(double triggerTime) { // Check that there is a threshold-crossing at some // point in the ADC buffer. if(adcValues[i] > getPedestalConditions(cellID) + integrationThreshold) { - isAboveThreshold = true; + if(debug)System.out.println(this.getClass().getName()+":: found an adc value above threshold for cellID = "+cellID); + isAboveThreshold = true; break; } } @@ -1389,14 +1471,23 @@ private short[] getTriggerADCValues(long cellID, double triggerTime) { // Calculate the offset between the current position and the // trigger time. int readoutLatency = getReadoutLatency(triggerTime); - // Get the ADC pipeline. IntegerRingBuffer pipeline = adcBufferMap.get(cellID); - + if(debug && debugCellIDWithHits.contains(cellID)){ + System.out.println(this.getClass().getName()+":: getting triggered adc values with latency = "+readoutLatency+" for cellID = "+cellID); + /* + for(int k=0; k(); } + if(debug) + System.out.println(this.getClass().getName()+":: adding pulser-data strip hit for channel = "+channel+" at time = "+pulserHit.time); pulserHitQueues[channel].add(pulserHit); } @@ -306,7 +319,10 @@ public void process(EventHeader event) { if(hitQueues[channel] == null) { hitQueues[channel] = new PriorityQueue(); } - hitQueues[channel].add(stripHit); + if(debug) + System.out.println(this.getClass().getName()+":: adding simulated strip hit for channel = "+channel+" at time = "+stripHit.time); + + hitQueues[channel].add(stripHit); } // Hits older than a certain time frame should no longer @@ -628,9 +644,15 @@ protected Collection> getOnTriggerData(double triggerTime) List truthHits = new ArrayList(); List trueHitRelations = new ArrayList(); // Calculate time of first sample - double firstSample = Math.floor(((triggerTime + 256) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) + double firstSample = Math.floor(((triggerTime + triggerOffset) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; - + if(debug){ + System.out.println(this.getClass().getName()+":: trigger time = "+triggerTime+ + "; trigger offset = "+triggerOffset+"; readout latency = "+readoutLatency+ + "; readout offset = "+readoutOffset); + + System.out.println(this.getClass().getName()+":: svt first sample time for trigger = "+firstSample); + } List processedHits = new ArrayList(); for(SiSensor sensor : sensors) { @@ -693,11 +715,19 @@ protected Collection> getOnTriggerData(double triggerTime) // across all size samples. StringBuffer signalBuffer = new StringBuffer("\t\t\t\tSample Pulse :: ["); for(int sampleN = 0; sampleN < 6; sampleN++) { + //add the time offset to this. + // double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL-timeOffset; double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL; shape.setParameters(channel, (HpsSiSensor) sensor); double signalAtTime = hit.amplitude * shape.getAmplitudePeakNorm(sampleTime - hit.time); - totalContrib += signalAtTime; + + totalContrib += signalAtTime; signal[sampleN] += signalAtTime; + if(debug){ + System.out.println(this.getClass().getName()+":: making pulse: sample time = " + +sampleTime+"; hit time = "+hit.time); + System.out.println(this.getClass().getName()+":: signal from pulse @ time() = "+signalAtTime+"; total ADC = "+signal[sampleN]); + } meanNoise += ((HpsSiSensor) sensor).getNoise(channel, sampleN); signalBuffer.append(signalAtTime + " (" + sampleTime + ")"); @@ -736,6 +766,8 @@ protected Collection> getOnTriggerData(double triggerTime) // be passed through to readout. if(readoutCuts(hit)) { // Add the hit to the readout hits collection. + if(debug) + System.out.println(this.getClass().getName()+":: adding svt hit to triggered event"); hits.add(hit); // Associate the truth hits with the raw hit and // add them to the truth hits collection. diff --git a/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java deleted file mode 100644 index 9647269d1a..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/CalDigiWithPulserNoSpacingReadoutDriver.java +++ /dev/null @@ -1,1899 +0,0 @@ -package org.hps.digi.nospacing; - -import static org.hps.recon.ecal.EcalUtils.fallTime; -import static org.hps.recon.ecal.EcalUtils.maxVolt; -import static org.hps.recon.ecal.EcalUtils.nBit; -import static org.hps.recon.ecal.EcalUtils.riseTime; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.hps.readout.ReadoutDriver; -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutTimestamp; -import org.hps.readout.util.DoubleRingBuffer; -import org.hps.readout.util.IntegerRingBuffer; -import org.hps.readout.util.ObjectRingBuffer; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.readout.util.collection.TriggeredLCIOData; -import org.hps.recon.ecal.EcalUtils; -import org.hps.util.RandomGaussian; -import org.lcsim.event.CalorimeterHit; -import org.lcsim.event.EventHeader; -import org.lcsim.event.LCRelation; -import org.lcsim.event.MCParticle; -import org.lcsim.event.RawCalorimeterHit; -import org.lcsim.event.RawTrackerHit; -import org.lcsim.event.SimCalorimeterHit; -import org.lcsim.event.base.BaseCalorimeterHit; -import org.lcsim.event.base.BaseLCRelation; -import org.lcsim.event.base.BaseRawCalorimeterHit; -import org.lcsim.event.base.BaseRawTrackerHit; -import org.lcsim.event.base.BaseSimCalorimeterHit; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.compact.Subdetector; -import org.lcsim.lcio.LCIOConstants; - -/** - * Class DigitizationWithPulserDataMergingReadoutDriver performs digitization - * of truth hits from SLIC by converting them into emulated pulses and merges pulser data, - * and then performing pulse integration. The results are output in - * the form of {@link org.lcsim.event.RawCalorimeterHit - * RawCalorimeterHit} objects. - *

- * The truth hit information is retained by also producing an output - * collection of {@link org.lcsim.event.LCRelation LCRelation} - * objects linking the raw hits to the original {@link - * org.lcsim.event.SimCalorimeterHit SimCalorimeterHit} objects from - * which they were generated. - *

- * DigitizationReadoutDriver is itself abstract. It is - * designed with the intent to function for both the hodoscope and - * the calorimeter. As such, it requires its implementing classes to - * handle certain subdetector-specific tasks. - * - * @author Tongtong Cao - */ -public abstract class CalDigiWithPulserNoSpacingReadoutDriver extends ReadoutDriver { - - // ============================================================== - // ==== LCIO Collections ======================================== - // ============================================================== - - /** - * Specifies the name of the subdetector geometry object. - */ - private String geometryName = null; - /** - * The name of the input {@link org.lcsim.event.SimCalorimeterHit - * SimCalorimeterHit} truth hit collection from SLIC. - */ - private String truthHitCollectionName = null; - /** - * The name of the input {@link org.lcsim.event.RawTrackerHit - * RawTrackerHit} collection from pulser data. - */ - private String PulserDataCollectionName = null; - /** - * The name of the digitized output {@link - * org.lcsim.event.RawCalorimeterHit RawCalorimeterHit} - * collection. - */ - private String outputHitCollectionName = null; - /** - * The name of the {@link org.lcsim.event.LCRelation LCRelation} - * collection that links output raw hits to the SLIC truth hits - * from which they were generated. - */ - private String truthRelationsCollectionName = null; - /** - * The name of the {@link org.lcsim.event.LCRelation LCRelation} - * collection that links output raw hits to the SLIC truth hits - * from which they were generated. This collection is output for - * trigger path hits, and is never persisted. - */ - private String triggerTruthRelationsCollectionName = null; - /** - * The name of the collection which contains readout hits. The - * class type of this collection will vary based on which mode - * the simulation is set to emulate. - */ - private String readoutCollectionName = null; - - // ============================================================== - // ==== Driver Options ========================================== - // ============================================================== - - /** - * Indicates whether or not noise should be simulated when - * converting truth energy depositions to the voltage amplitudes. - */ - private boolean addNoise = true; - /** - * Defines the number of photoelectrons per MeV of truth energy - * for the purpose of noise calculation. - */ - private double pePerMeV = Double.NaN; - /** - * Defines a fixed gain to be used for all subdetector channels. - * A negative value will result in gains being pulled from the - * conditions database for the run instead. Units are in MeV/ADC. - */ - private double fixedGain = -1; - /** - * Defines the pulse shape to use when simulating detector truth - * energy deposition response. - */ - private PulseShape pulseShape = PulseShape.ThreePole; - /** - * Defines the pulse time parameter. This influences the shape of - * a pulse generated from truth energy depositions and will vary - * depending on the form of pulse selected. Units are in ns. - */ - private double tp = Double.NaN; - /** - * Defines the ADC threshold needed to initiate pulse integration - * for raw hit creation. - */ - protected int integrationThreshold = 18; - /** - * Defines the number of integration samples that should be - * included in the pulse integral from before the sample that - * exceeds the integration threshold. - */ - protected int numSamplesBefore = 5; - /** - * Defines the number of integration samples that should be - * included in the pulse integral from after the sample that - * exceeds the integration threshold. - * Threshold-crossing sample is part of NSA. - */ - protected int numSamplesAfter = 25; - /** - * The format in which readout hits should be output. - */ - private int mode = 1; - /** - * Specifies whether trigger path hit truth information should be - * included in the driver output. - */ - private boolean writeTriggerTruth = false; - /** - * Specifies whether readout path truth information should be - * included in the driver output. - */ - private boolean writeTruth = false; - - // ============================================================== - // ==== Driver Parameters ======================================= - // ============================================================== - - /** - * Defines the length in nanoseconds of a hardware sample. - */ - private static final double READOUT_PERIOD = 4.0; - /** - * Serves as an internal clock variable for the driver. This is - * used to track the number of clock-cycles (1 per {@link - * org.hps.readout.ecal.updated.DigitizationReadoutDriver#READOUT_PERIOD - * READOUT_PERIOD}). - */ - private int readoutCounter = 0; - /** - * A buffer for storing pulse amplitudes representing the signals - * from the preamplifiers. These are stored in units of Volts - * with no pedestal. One buffer exists for each subdetector - * channel. - */ - private Map voltageBufferMap = new HashMap(); - /** - * Buffers the truth information for each sample period so that - * truth relations can be retained upon readout. - */ - private Map> truthBufferMap = new HashMap>(); - /** - * A buffer for storing ADC values representing the converted - * voltage values from the voltage buffers. These are stored in - * units of ADC and include a pedestal. One buffer exists for - * each subdetector channel. - */ - private Map adcBufferMap = new HashMap(); - - /** - * Stores the subdetector geometry object. - */ - private D geometry = null; - /** - * Stores the total ADC sums for each subdetector channel that is - * currently undergoing integration. - */ - private Map channelIntegrationSumMap = new HashMap(); - /** - * Stores the total ADC sums for each subdetector channel that is - * currently undergoing integration. - */ - private Map> channelIntegrationTruthMap = new HashMap>(); - /** - * Stores the time at which integration began on a given channel. - * This is used to track when the integration period has ended. - */ - private Map channelIntegrationTimeMap = new HashMap(); - // TODO: Give this documentation. - private Map> channelIntegrationADCMap = new HashMap>(); - /** - * Defines the time offset of objects produced by this driver - * from the actual true time that they should appear. - */ - private double localTimeOffset = 0; - /** - * Stores the minimum length of that must pass before a new hit - * may be integrated on a given channel. - * Unit: clock-cycle - */ - private static final int CHANNEL_INTEGRATION_DEADTIME = 8; - //private static final int CHANNEL_INTEGRATION_DEADTIME = 0; - /** - * Defines the total time range around the trigger time in which - * hits are output into the readout LCIO file. The trigger time - * position within this range is determined by {@link - * org.hps.readout.ecal.updated.DigitizationReadoutDriver#readoutOffset - * readoutOffset}. - */ - protected int readoutWindow = 48; - /** - * Sets how far from the beginning of the readout window trigger - * time should occur. A value of x, for instance would result in - * a window that starts at triggerTime - x and - * extends for a total time readoutWindow. - */ - // private int readoutOffset = 0; - - private int readoutOffset = -12; - - /** - * Sets time window of ADC samples in pulser data - */ - protected int pulserDataWindow = 48; - - /** - * To make time alignment between Ecal and hodoscope detectors, samples of - * pulser data may need to be shifted according to readout window offset - * difference between Ecal and hodoscope - */ - private int pulserSamplesShift = 0; - - - private double debugEnergyThresh=0.25; //only print debug for hits>500 MeV - - private boolean debug_=false; - - /** - * Defines the LCSim collection data for the trigger hits that - * are produced by this driver when it is emulating Mode-1 or - * Mode-3. - */ - private LCIOCollection mode13HitCollectionParams; - /** - * Defines the LCSim collection data for the trigger hits that - * are produced by this driver when it is emulating Mode-7. - */ - private LCIOCollection mode7HitCollectionParams; - /** - * Defines the LCSim collection data that links SLIC truth hits - * to the their corresponding simulated output hit. - */ - private LCIOCollection truthRelationsCollectionParams; - - /** - * Flag to point out that new integration could be started at a sample - * between CHANNEL_INTEGRATION_DEADTIME and numSamplesAfter - * for the case is less than numSamplesAfter - */ - private Map flagStartNewIntegration = new HashMap<>(); - - /** - * Since new integration could happen between CHANNEL_INTEGRATION_DEADTIME and numSamplesAfter, - * integration time needs to be assigned as parameter of ReadoutDataManager.addData(). - * Global displacement is 0 for dependency. - */ - private double integrationTime = Double.NaN; - - - // ============================================================== - // ==== To Be Re-Worked ========================================= - // ============================================================== - // TODO: We should be able to define these based on the integration parameters. - private static final int BUFFER_LENGTH = 100; - private static final int PIPELINE_LENGTH = 2000; - - @Override - public void startOfData() { - // Validate that all the collection names are defined. - if(truthHitCollectionName == null || PulserDataCollectionName == null || outputHitCollectionName == null || truthRelationsCollectionName == null - || triggerTruthRelationsCollectionName == null || readoutCollectionName == null) { - throw new RuntimeException("One or more collection names is not defined!"); - } - - // Calculate the correct time offset. This is a function of - // the integration samples and the output delay. - // Threshold-crossing sample is part of NSA. - // localTimeOffset = 4 * numSamplesAfter; - localTimeOffset = 0; - - // Validate that a real mode was selected. - if(mode != 1 && mode != 3 && mode != 7) { - throw new IllegalArgumentException("Error: Mode " + mode + " is not a supported output mode."); - } - - // Add the driver dependencies. - addDependency(truthHitCollectionName); - addDependency(PulserDataCollectionName); - - // Define the LCSim collection parameters for this driver's - // output. Note: Since these are not persisted, the flags and - // readout name are probably not necessary. - LCIOCollectionFactory.setCollectionName(outputHitCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags((0 + (1 << LCIOConstants.CHBIT_LONG) + (1 << LCIOConstants.RCHBIT_ID1))); - LCIOCollectionFactory.setReadoutName(truthHitCollectionName); - LCIOCollection hitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawCalorimeterHit.class); - ReadoutDataManager.registerCollection(hitCollectionParams, false); - - LCIOCollectionFactory.setCollectionName(triggerTruthRelationsCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollection triggerTruthCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); - ReadoutDataManager.registerCollection(triggerTruthCollectionParams, false); - - // Define the LCSim collection data for the on-trigger output. - LCIOCollectionFactory.setCollectionName(readoutCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - mode13HitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); - - LCIOCollectionFactory.setCollectionName(readoutCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.RCHBIT_TIME); - mode7HitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawCalorimeterHit.class); - - LCIOCollectionFactory.setCollectionName(truthRelationsCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - truthRelationsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); - - // Run the superclass method. - super.startOfData(); - } - - @SuppressWarnings("unchecked") - @Override - public void detectorChanged(Detector detector) { - // Throw an error if the geometry name is not set. - if(geometryName == null) { - throw new RuntimeException("Subdetector name is not defined!"); - } - - // Get the readout name from the subdetector geometry data. - geometry = (D) detector.getSubdetector(geometryName); - - // Update the output LCIO collections data. - LCIOCollectionFactory.setReadoutName(geometry.getReadout().getName()); - mode13HitCollectionParams = LCIOCollectionFactory.cloneCollection(mode13HitCollectionParams); - LCIOCollectionFactory.setReadoutName(geometry.getReadout().getName()); - mode7HitCollectionParams = LCIOCollectionFactory.cloneCollection(mode7HitCollectionParams); - - // Reinstantiate the buffers. - resetBuffers(); - } - - @Override - public void process(EventHeader event) { - - /* - * Get current SLIC hits and current raw hits in pulser data. - */ - - // Get current SLIC hits. - Collection hits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, - truthHitCollectionName, SimCalorimeterHit.class); - - // Get current raw hits in pulser data. - Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, - PulserDataCollectionName, RawTrackerHit.class); - if(debug_)System.out.println("DigiReadout:: "+truthHitCollectionName +" local time = "+ReadoutDataManager.getCurrentTime()+" number of hits = "+hits.size()); - // Once an overlaid event is input, reset adcBufferMap to ensure that other overlaid events do not affect the current event. - if(hits.size()!=0 || rawHits.size()!=0) { - // Get the set of all possible channel IDs. - Set cells = getChannelIDs(); - - // Reset adcBufferMap. - for(Long cellID : cells) - adcBufferMap.get(cellID).setAll((int) Math.round(getPedestalConditions(cellID))); - } - - /* To merge MC data with pulser data, three different cases are handled separately. - * Case 1: If pulser data does not have a channel in MC data, directly buffer samples - * - * Case 2: If MC data does not have a channel in pulser data, - * 1) add noise into MC hits - * 2) convert MC hits into a window of ADC samples - * 3) add pedestal - * 4) buffer samples - * - * Case 3: If MC data has a channel that is also in pulser data, - * 1) convert MC hits into a window of ADC samples - * 2) merge with samples of pulser data - * 3) buffer merged samples - * - * MC hits are digitized into ADC samples with the same time window of pulser data. - * Before the time window, the window is extended with NSB ADC samples, and values of the ADC samples are set as pedestal. - * After the time window, the window is extended with NSA ADC samples, and values of the ADC samples are set as pedestal. - * The extension is allowed since enough empty events are inserted into neighbored overlaid events. - */ - - // Add the truth hits to the truth hit buffer. The buffer is - // only incremented when the ADC buffer is incremented, which - // is handled below. - // Save cell IDs of hits as keys in the MC hit Cell ID hash map, and set values as 1. - // The hash map is used to check if MC data has a channel that is also in pulser data. - Map hitCellIDMap = new HashMap(hits.size()); - for(SimCalorimeterHit hit : hits) { - if(debug_) - System.out.println(this.getClass().getName()+":: process:: sim hit energy = "+hit.getRawEnergy()); - // Store the truth data. - Long hitCellID = hit.getCellID(); // For Ecal, cell ID is geometry ID; For hodo, cell ID is channel ID after hodoscope preprocessing - - ObjectRingBuffer hitBuffer = truthBufferMap.get(hitCellID); - hitBuffer.addToCell(0, hit); - - // Save cell IDs of hits as keys in the hit Cell ID hash map, and set values as 1. - if(hitCellIDMap.get(hitCellID) == null) - hitCellIDMap.put(hitCellID,1); - } - - // handle pulser data: case 1. - // If cellID of a raw hit is not included by keys in the MC hit Cell ID hash map for MC hits, directly buffer ADC samples. - // If included, set value as 2 for the corresponding key in the MC hit Cell ID hash map. - // Save raw hits in the raw hit hash map, where keys are raw hit cell IDs and values are raw hits. - // The hash map is used for case 3 - Map rawHitsMap = new HashMap(rawHits.size()); - for(RawTrackerHit rawHit : rawHits) { - Long rawHitID = getID(rawHit); // For Ecal, ID is geometry ID; For hodo, ID is channel ID, which is converted from geometry ID. - if(hitCellIDMap.get(rawHitID) == null) { - // Get the ADC buffer for the channel. - IntegerRingBuffer adcBuffer = adcBufferMap.get(rawHitID); - - // Get ADC samples for the channel. - short[] adcSamples = rawHit.getADCValues(); - - // Length of ADC sample array should be equal to setup for time window of ADC samples - if(adcSamples.length != pulserDataWindow) - throw new RuntimeException("Error: time window of pulser data is not correctly set."); - - // Buffer ADC samples in pulser data - for(int i = 0; i < pulserDataWindow; i++) - adcBuffer.setValue(i - pulserSamplesShift, (int)adcSamples[i]); - } - else { - hitCellIDMap.put(rawHitID, 2); - rawHitsMap.put(rawHitID, rawHit); - } - } - - // handle MC hits: case 2 and case 3 - // In the MC hit Cell ID hash map, if value for cell ID of a MC hit is 1, handle the hit as case 2. - // If value for cell ID of a MC hit is 2, handle the hit as case 3. - for(SimCalorimeterHit hit : hits) { - Long hitCellID = hit.getCellID(); - // Check to see if the hit time seems valid. This is done - // by calculating the time of the next readout cycle in - // ns and subtracting the time of the current hit (with - // adjustment for simulation time passed) from it. If the - // hit would fall in a previous readout cycle, something - // is probably wrong. - // if(READOUT_PERIOD + readoutTime() - (ReadoutDataManager.getCurrentTime() + hit.getTime()) >= READOUT_PERIOD) { - if(READOUT_PERIOD - hit.getTime() >= READOUT_PERIOD) { - throw new RuntimeException("Error: Trying to add a hit to the analog pipeline, but the time seems incorrect."); - } - - // Get the ADC buffer for the channel. - IntegerRingBuffer adcBuffer = adcBufferMap.get(hitCellID); - - // Get the pedestal for the channel. - int pedestal = (int) Math.round(getPedestalConditions(hitCellID)); - - // Get the buffer for the current truth hit's channel. - DoubleRingBuffer voltageBuffer = voltageBufferMap.get(hitCellID); - - // Get the truth hit energy deposition. - double energyAmplitude = hit.getRawEnergy(); - if(energyAmplitude>debugEnergyThresh && debug_){ - System.out.println(this.getClass().getName()+":: process:: Putting sim hits in adcBuffer cellID = "+hitCellID); - System.out.println(this.getClass().getName()+":: process:: adding hits to adcBuffer cellID = "+hitCellID); - - System.out.println(this.getClass().getName()+":: process:: ReadoutDataManager Time = "+ReadoutDataManager.getCurrentTime()); - System.out.println(this.getClass().getName()+":: process:: hit time = "+hit.getTime()); - System.out.println(this.getClass().getName()+":: process:: readouttime() = "+readoutTime()); - - - System.out.println(this.getClass().getName()+":: process:: truth energy = "+energyAmplitude); - } - if(hitCellIDMap.get(hitCellID) == 1) { - // If noise should be added, calculate a random value for - // the noise and add it to the truth energy deposition. - if(addNoise) { - energyAmplitude += getAmplitudeFluctuation(hit); - if(energyAmplitude>debugEnergyThresh&&debug_) - System.out.println(this.getClass().getName()+":: process:: added noise to energy; new energy = "+energyAmplitude); - } - - // Simulate the pulse for each position in the preamp - // pulse buffer for the subdetector channel on which the - // hit occurred. - if(energyAmplitude>debugEnergyThresh&&debug_) - System.out.println(this.getClass().getName()+":: process:: making pulse"); - for(int i = 0; i < pulserDataWindow; i++) { - // Calculate the voltage deposition for the current - // buffer time. - //double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD + readoutTime() - // - (ReadoutDataManager.getCurrentTime() + hit.getTime()) - getTimeShiftConditions(hitCellID), hitCellID); - - double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD - - hit.getTime() - - getTimeShiftConditions(hitCellID) - , hitCellID); - - if(energyAmplitude>debugEnergyThresh&&debug_){ - System.out.println(this.getClass().getName()+":: process:: pulse sample i = "+i - +" local time = "+((i + 1) * READOUT_PERIOD - hit.getTime() - getTimeShiftConditions(hitCellID)) - +" pulse amplitude = "+pulseAmplitude((i + 1) * READOUT_PERIOD - - hit.getTime() - - getTimeShiftConditions(hitCellID) - , hitCellID)); - System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i - +" voltage dep value = "+voltageDeposition); - } - // Increase the current buffer time's voltage value - // by the calculated amount. - voltageBuffer.addToCell(i, voltageDeposition); - - // Scale the current value of the preamplifier buffer - // to a 12-bit ADC value where the maximum represents - // a value of maxVolt. - double currentValue = voltageBuffer.getValue(i) * ((Math.pow(2, nBit) - 1) / maxVolt); - if(energyAmplitude>debugEnergyThresh&&debug_){ - System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i - +" pre-noise digitized value = "+currentValue); - } - // If noise should be added, calculate a random value for - // the noise and add it to the ADC value. - if(addNoise) { - double sigma = getNoiseConditions(hitCellID); - currentValue += RandomGaussian.getGaussian(0, sigma); - } - if(energyAmplitude>debugEnergyThresh&&debug_) - System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i - +" post-noise current value = "+currentValue); - // If noise should be added, calculate a random value for - // An ADC value is not allowed to exceed 4095. If a - // larger value is observed, 4096 (overflow) is given - // instead. (This corresponds to >2 Volts.) - int digitizedValue = Math.min((int) Math.round(pedestal + currentValue), (int) Math.pow(2, nBit)); - if(energyAmplitude>debugEnergyThresh&&debug_) - System.out.println(this.getClass().getName()+":: process:: writing digitized value for sample = "+i - +" digitized value = "+digitizedValue); - - // Write this value to the ADC buffer. - adcBuffer.setValue(i, digitizedValue); - } - } - - else { - // Get ADC samples for the channel. - short[] ADCSamples = rawHitsMap.get(hitCellID).getADCValues(); - - // Get digitized samples for MC hits - int[] digitizedValue = new int[pulserDataWindow]; - - // Simulate the pulse for each position in the preamp - // pulse buffer for the subdetector channel on which the - // hit occurred. - - for(int i = 0; i < pulserDataWindow; i++) { - // Calculate the voltage deposition for the current - // buffer time. - double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD - - hit.getTime() - getTimeShiftConditions(hitCellID), hitCellID); - - // double voltageDeposition = energyAmplitude * pulseAmplitude((i + 1) * READOUT_PERIOD + readoutTime() - // - (ReadoutDataManager.getCurrentTime() + hit.getTime()) - getTimeShiftConditions(hitCellID), hitCellID); - - // Increase the current buffer time's voltage value - // by the calculated amount. - voltageBuffer.addToCell(i, voltageDeposition); - - // Scale the current value of the preamplifier buffer - // to a 12-bit ADC value where the maximum represents - // a value of maxVolt. - double currentValue = voltageBuffer.getValue(i) * ((Math.pow(2, nBit) - 1) / maxVolt); - - // An ADC value is not allowed to exceed 4095. If a - // larger value is observed, 4096 (overflow) is given - // instead. (This corresponds to >2 Volts.) - digitizedValue[i] = Math.min((int) Math.round(currentValue), (int) Math.pow(2, nBit)); - } - - // Write this value to the ADC buffer. - // If pulserSamplesShift is larger than 0, merged sample window is [-pulserSamplesShift, pulserDataWindow] - if(pulserSamplesShift >= 0) { - for(int i = -pulserSamplesShift; i < 0; i++) adcBuffer.setValue(i , (int)ADCSamples[i + pulserSamplesShift]); - for(int i = 0; i < pulserDataWindow - pulserSamplesShift; i++) adcBuffer.setValue(i, digitizedValue[i] + ADCSamples[i + pulserSamplesShift]); - for(int i = pulserDataWindow - pulserSamplesShift; i < pulserDataWindow; i++) adcBuffer.setValue(i, digitizedValue[i]); - } - // If pulserSamplesShift is less than 0, merged sample window is [0, -pulserSamplesShift + pulserDataWindow] - else { - for(int i = 0; i < -pulserSamplesShift; i++) adcBuffer.setValue(i, digitizedValue[i]); - for(int i = -pulserSamplesShift; i < pulserDataWindow; i++) adcBuffer.setValue(i, digitizedValue[i] + ADCSamples[i + pulserSamplesShift]); - for(int i = pulserDataWindow; i < pulserDataWindow - pulserSamplesShift; i++) adcBuffer.setValue(i, (int)ADCSamples[i + pulserSamplesShift]); - } - } - } - - /* - * Next step is to integrate hits from the pulses. Hit - * integration is only performed once per readout period. The - * readout period, defined by the hardware, is by default 4 - * nanoseconds. - */ - - // Check whether the appropriate amount of time has passed to - // perform another integration step. If so, create a list to - // contain any newly integrated hits and perform integration. - List newHits = null; - List newTruthRelations = null; - if(newHits == null) { newHits = new ArrayList(); } - if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } - readoutCounter=0; - for(int i = 0; i < pulserDataWindow; i++){ - // System.out.println(this.getClass().getName()+":: looping over pulse data window readoutCounter = "+readoutCounter); - readHits(newHits, newTruthRelations); - readoutCounter++; - } - } - - // TODO: Document this. - private void readHits(List newHits, List newTruthRelations) { - // Perform hit integration as needed for each subdetector - // channel in the buffer map. - for(Long cellID : adcBufferMap.keySet()) { - // System.out.println("************** new channel ***************"); - // Get the ADC buffer for the channel. - IntegerRingBuffer adcBuffer = adcBufferMap.get(cellID); - - // Get the pedestal for the channel. - int pedestal = (int) Math.round(getPedestalConditions(cellID)); - - // Store the pedestal subtracted value so that it may - // be checked against the integration threshold. - int pedestalSubtractedValue = adcBuffer.getValue() - pedestal; - if(pedestalSubtractedValue > integrationThreshold && debug_){ - System.out.println(this.getClass().getName()+":: readHits:: Looping over adcBufferMap cellID = "+cellID); - System.out.println(this.getClass().getName()+":: readHits:: ped subtracted ADC counts = "+pedestalSubtractedValue); - } - // Get the total ADC value that has been integrated - // on this channel. - Integer sum = channelIntegrationSumMap.get(cellID); - if(pedestalSubtractedValue >integrationThreshold && debug_) - System.out.println(this.getClass().getName()+":: readHits:: sum = "+sum); - // If any readout hits exist on this channel, add the - // current ADC values to them. - - // If the ADC sum is undefined, then there is not an - // ongoing integration. If the pedestal subtracted - // value is also over the integration threshold, then - // integration should be initiated. - if(sum == null && pedestalSubtractedValue > integrationThreshold) { - // Store the current local time in units of - // events (4 ns). This will indicate when the - // integration started and, in turn, should end. - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: Found a hit above threshold = "+cellID); - - channelIntegrationTimeMap.put(cellID, readoutCounter); - - // Integrate the ADC values for a number of - // samples defined by NSB and threshold - // crossing sample. - int sumBefore = 0; - for(int i = 0; i <= numSamplesBefore; i++) { - sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); - } - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: sum before this sample = "+sumBefore); - // This will represent the total integral sum at - // the current point in time. Store it in the sum - // buffer so that it may be incremented later as - // additional samples are read. - channelIntegrationSumMap.put(cellID, sumBefore); - - // Collect and store truth information for trigger - // path hits. - channelIntegrationADCMap.put(cellID, new ArrayList()); - - // Get the truth information in the - // integration samples for this channel. - Set truthHits = new HashSet(); - for(int i = 0; i < numSamplesBefore + 4; i++) { - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(-(numSamplesBefore - i))); - truthHits.addAll(truthBufferMap.get(cellID).getValue(-(numSamplesBefore - i))); - } - - // Store all the truth hits that occurred in - // the truth buffer in the integration period - // for this channel as well. These will be - // passed through the chain to allow for the - // accessing of truth information during the - // trigger simulation. - channelIntegrationTruthMap.put(cellID, truthHits); - } - - // If the integration sum is defined, then pulse - // integration is ongoing. - if(sum != null) { - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: integration is ongoing..."); - // Three cases are treated separataly - // Case 1: CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter - // Case 2: CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter - // Case 3: CHANNEL_INTEGRATION_DEADTIME < numSamplesAfter - if(CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter) { // Case 1 - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 1: DEADTIME>NSA"); - //Continue integration until NSA, the threshold-crossing sample has been added before. - if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - } - - // If integration is complete, a hit may be added - // to data manager. - else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, - 64 * channelIntegrationTimeMap.get(cellID)); - newHits.add(newHit); - // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns - integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; - // Add the truth relations for this hit, if - // trigger path truth is enabled. - if (writeTriggerTruth) { - Set truthHits = channelIntegrationTruthMap.get(cellID); - for (SimCalorimeterHit truthHit : truthHits) { - newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); - } - } - } - - // Do not clear the channel for integration until deadtime has passed. - // The threshold-crossing sample counts as the first sample in the deadtime. - else if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1 <= readoutCounter - - 1) { // No new integration until over deadtime - channelIntegrationSumMap.remove(cellID); - } - } // Case 1 ends - else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case2: DEADTIME==NSA"); - // Continue integration until NSA, the threshold-crossing sample has been added before. - if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - } - // If integration is complete, a hit may be added - // to data manager. - else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, - 64 * channelIntegrationTimeMap.get(cellID)); - newHits.add(newHit); - // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns - integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; - - // Add the truth relations for this hit, if - // trigger path truth is enabled. - if (writeTriggerTruth) { - Set truthHits = channelIntegrationTruthMap.get(cellID); - for (SimCalorimeterHit truthHit : truthHits) { - newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); - } - } - channelIntegrationSumMap.remove(cellID); - } - } // Case 2 ends - else { // Case 3 - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: DEADTIME= readoutCounter) { - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + deadtime - 1 >= readoutCounter"+(channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1)+">="+readoutCounter+"....just keep integrating "+cellID); - // Continue integration until CHANNEL_INTEGRATION_DEADTIME - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - - // If sample at the end of deadtime is less than threshold, new integration could be started from next sample - if(channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME == readoutCounter && pedestalSubtractedValue <= integrationThreshold){ - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + deadtime - 1 >= readoutCounter ... at deadtime limit and below threshold, setting new integration flag to true"); - flagStartNewIntegration.put(cellID, true); - } - } - else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: time + NSA - 1 >= readoutCounter"+(channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1)+">="+readoutCounter+"....decide what to do "+cellID); - if(flagStartNewIntegration.get(cellID) == true) { // Flag for previous sample is true - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is true; starting new integration "+cellID); - if(pedestalSubtractedValue <= integrationThreshold) { // If sample is less than threshold, then do not start new integration - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - } - else { // if sample is larger than threshold, a hit is added into data manager and start new integration - // Add a new calorimeter hit. - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: making new hit after new integration flag is true because sample is over threshold and new integration is starting!!! "+cellID); - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, - 64 * channelIntegrationTimeMap.get(cellID)); - newHits.add(newHit); - integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; - - // Add the truth relations for this hit, if - // trigger path truth is enabled. - if (writeTriggerTruth) { - Set truthHits = channelIntegrationTruthMap.get(cellID); - for (SimCalorimeterHit truthHit : truthHits) { - newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); - } - } - - //Start new integration - channelIntegrationTimeMap.put(cellID, readoutCounter); - flagStartNewIntegration.put(cellID, false); - - // Integrate the ADC values for a number of - // samples defined by NSB from before threshold - // crossing. Note that this stops one sample - // before the current sample. This current sample - // is handled in the subsequent code block. - int sumBefore = 0; - for(int i = 0; i <= numSamplesBefore; i++) { - sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); - } - - // This will represent the total integral sum at - // the current point in time. Store it in the sum - // buffer so that it may be incremented later as - // additional samples are read. - channelIntegrationSumMap.put(cellID, sumBefore); - - // Collect and store truth information for trigger - // path hits. - channelIntegrationADCMap.put(cellID, new ArrayList()); - - // Get the truth information in the - // integration samples for this channel. - Set truthHits = new HashSet(); - for(int i = 0; i < numSamplesBefore + 4; i++) { - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(-(numSamplesBefore - i))); - truthHits.addAll(truthBufferMap.get(cellID).getValue(-(numSamplesBefore - i))); - } - - // Store all the truth hits that occurred in - // the truth buffer in the integration period - // for this channel as well. These will be - // passed through the chain to allow for the - // accessing of truth information during the - // trigger simulation. - channelIntegrationTruthMap.put(cellID, truthHits); - } - } - else { // Flag for previous sample is false - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false; just add new sample "+cellID); - channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); - - // Add the new ADC sample. - channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false: integration sum = "+(sum + adcBuffer.getValue(0))); - // Add the new truth information, if trigger - // path truth output is enabled. - if (writeTriggerTruth) { - channelIntegrationTruthMap.get(cellID).addAll(truthBufferMap.get(cellID).getValue(0)); - } - if(pedestalSubtractedValue <= integrationThreshold){ - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: new integration flag is false: went below threshold, setting flag to true"); - flagStartNewIntegration.put(cellID, true); - } - } - } - else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//If reach NSA + 1, hit is added into data manager, and flag is set as false - if(debug_)System.out.println(this.getClass().getName()+":: readHits:: case 3: reached NSA + 1; adding new hit "+cellID); - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, - 64 * channelIntegrationTimeMap.get(cellID)); - newHits.add(newHit); - integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; - - // Add the truth relations for this hit, if - // trigger path truth is enabled. - if (writeTriggerTruth) { - Set truthHits = channelIntegrationTruthMap.get(cellID); - for (SimCalorimeterHit truthHit : truthHits) { - newTruthRelations.add(new BaseLCRelation(newHit, truthHit)); - } - } - channelIntegrationSumMap.remove(cellID); - flagStartNewIntegration.put(cellID, false); - } - } // Case 3 ends - } - - // Step to the next entry in the adc buffer. - adcBuffer.stepForward(); - - // Step to the next entry in the voltage buffer. - if(voltageBufferMap.get(cellID) != null) { // A channel could be from pulser data, while MC data has no such channel. - DoubleRingBuffer voltageBuffer = voltageBufferMap.get(cellID); - voltageBuffer.clearValue(); - voltageBuffer.stepForward(); - } - - // Step the truth buffer for this channel forward. - // The new cell should be cleared of any old values. - if(truthBufferMap.get(cellID) != null) { // A channel could be from pulser data, while MC data has no such channel. - truthBufferMap.get(cellID).stepForward(); - truthBufferMap.get(cellID).clearValue(); - } - } - - // Write the trigger path output data to the readout data - // manager. Truth data is optional. - - if(debug_)System.out.println("DigiReadout:: "+ outputHitCollectionName+" local time = "+(ReadoutDataManager.getCurrentTime()+readoutTime())+" adding trigger hits = "+newHits.size()); - ReadoutDataManager.addData(outputHitCollectionName, ReadoutDataManager.getCurrentTime()+readoutTime(), newHits, RawCalorimeterHit.class); - newHits.clear(); //remove newHits since we've already put it in data manager - if(writeTriggerTruth) { - ReadoutDataManager.addData(triggerTruthRelationsCollectionName, integrationTime, newTruthRelations, LCRelation.class); - newTruthRelations.clear(); - } - - } - - /** - * Finds all root particles associated with the interactions that - * created the argument particle. - * @param particle - The particle. - * @return Returns a {@link java.util.List List} containing each - * particle object in the argument particle's particle tree which - * has no parent particle. - */ - private static final List getRootParticleList(MCParticle particle) { - // If the particle has no parents, it should be added to the - // list and the list returned. - if(particle.getParents().isEmpty()) { - List list = new ArrayList(1); - list.add(particle); - return list; - } - - // If there is only one parent, just return the results from - // that parent. - else if(particle.getParents().size() == 1) { - return getRootParticleList(particle.getParents().get(0)); - } - - // Otherwise, run the method on each parent particle and - // return the results from that instead. - else { - // Store the parent particle roots. - List list = new ArrayList(); - - // Get the root particles for each parent and add them to - // the list. - for(MCParticle parent : particle.getParents()) { - List parentParticles = getRootParticleList(parent); - list.addAll(parentParticles); - } - - // Return the compiled particle list. - return list; - } - } - - /** - * Flattens the particle tree to a set containing both the root - * particle and any particles that are descended from it. - * @param root - The root of the particle tree. - * @return Returns a set containing the argument particle and all - * of its descendants. - */ - private static final Set getParticleTreeAsSet(MCParticle root) { - // Create a set to store the particle tree. - Set particleSet = new HashSet(); - - // Add the root particle to the tree, and then recursively - // add any daughter particles to the tree. - particleSet.add(root); - addDaughtersToSet(root, particleSet); - - // Return the particle set. - return particleSet; - } - - /** - * Adds all the daughter particles of the argument to the set. - * Daughters of each daughter particle are then recursively added - * to the set as well. - * @param particle - The particle to add. - * @param set - The set to which to add the particle. - */ - private static final void addDaughtersToSet(MCParticle particle, Set set) { - // Add each daughter particle to the set, and recursively add - // its daughters as well. - for(MCParticle daughter : particle.getDaughters()) { - set.add(daughter); - addDaughtersToSet(daughter, set); - } - } - - /** - * Gets a {@link java.util.Set Set} containing all valid channel - * IDs for the relevant subdetector geometry. - * @return Returns a Set containing all possible - * channel IDs. - */ - protected abstract Set getChannelIDs(); - - /** - * Gets a channel ID through {@link org.lcsim.event.RawTrackerHit RawTrackerHit} - * @return Returns a ID. Return a geometry ID for Ecal, while return a channel ID for hodoscope - */ - protected abstract Long getID(RawTrackerHit hit); - - /** - * Gets the gain for the indicated subdetector channel. - * @param channelID - The channel ID. - * @return Returns the value of the gain in units of ADC/MeV as a - * double. - */ - protected abstract double getGainConditions(long channelID); - - /** - * Gets the noise sigma for the indicated subdetector channel. - * @param channelID - The channel ID. - * @return Returns the value of the noise sigma as a - * double. - */ - protected abstract double getNoiseConditions(long channelID); - - /** - * Gets the int flag used to denote the appropriate - * subdetector in relation to a readout timestamp. - * @return Returns the timestamp flag as an int. - */ - protected abstract int getTimestampFlag(); - - /** - * Generate photoelectron/amplification noise for a pulse's amplitude. - * @param hit - The hit for which to generate a fluctuation. - * @return Returns a fluctuation in units GeV. - */ - protected double getAmplitudeFluctuation(CalorimeterHit hit) { - double sigma = Math.sqrt(hit.getRawEnergy() * EcalUtils.MeV / pePerMeV); - return RandomGaussian.getGaussian(0, sigma); - } - - @Override - protected Collection> getOnTriggerData(double triggerTime) { - // Create a list to store the extra collections. - List> collectionsList = null; - if(writeTruth) { - collectionsList = new ArrayList>(5); - } else { - collectionsList = new ArrayList>(2); - } - - // Readout drivers need to produce readout timestamps to - // specify when they occurred in terms of simulation time. - // The readout timestamp for the subdetector data should be - // defined as the start simulation time of the ADC buffer. - ReadoutTimestamp timestamp = new ReadoutTimestamp(getTimestampFlag(), triggerTime - (readoutOffset * 4) + 4); - - // Make the readout timestamp collection parameters object. - LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); - LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); - TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); - timestampData.getData().add(timestamp); - collectionsList.add(timestampData); - - // Instantiate some lists to store truth data, if truth is to - // be output. - List triggerTruthHits = null; - List triggerTruthRelations = null; - if(writeTruth) { - triggerTruthHits = new ArrayList(); - triggerTruthRelations = new ArrayList(); - } - - // Get the appropriate collection of readout hits and output - // them to the readout data manager. - if(debug_)System.out.println(this.getClass().getName()+":: getting mode = "+mode+" hits on trigger time = "+triggerTime); - if(mode == 7) { - List readoutHits = getMode7Hits(triggerTime); - TriggeredLCIOData readoutData = new TriggeredLCIOData(mode7HitCollectionParams); - readoutData.getData().addAll(readoutHits); - collectionsList.add(readoutData); - } else { - List readoutHits = null; - if(mode == 1) { readoutHits = getMode1Hits(triggerTime); } - else { readoutHits = getMode3Hits(triggerTime); } - TriggeredLCIOData readoutData = new TriggeredLCIOData(mode13HitCollectionParams); - readoutData.getData().addAll(readoutHits); - collectionsList.add(readoutData); - - // FIXME: Truth information is currently only supported for Mode-1 operation. - if(writeTruth && mode == 1) { - for(RawTrackerHit hit : readoutHits) { - Collection truthHits = getTriggerTruthValues(hit.getCellID(), triggerTime); - triggerTruthHits.addAll(truthHits); - for(CalorimeterHit truthHit : truthHits) { - triggerTruthRelations.add(new BaseLCRelation(hit, truthHit)); - } - } - } - } - - // Add the truth collections if they exist. - if(writeTruth) { - // Add the truth hits to the output collection. - LCIOCollection truthHitCollection = ReadoutDataManager.getCollectionParameters(truthHitCollectionName, SimCalorimeterHit.class); - TriggeredLCIOData truthData = new TriggeredLCIOData(truthHitCollection); - truthData.getData().addAll(triggerTruthHits); - collectionsList.add(truthData); - - // MC particles need to be extracted from the truth hits - // and included in the readout data to ensure that the - // full truth chain is available. - Set truthParticles = new java.util.HashSet(); - for(SimCalorimeterHit simHit : triggerTruthHits) { - for(int i = 0; i < simHit.getMCParticleCount(); i++) { - List rootParticles = getRootParticleList(simHit.getMCParticle(i)); - for(MCParticle rootParticle : rootParticles) { - truthParticles.addAll(getParticleTreeAsSet(rootParticle)); - } - } - } - - // Create the truth MC particle collection. - LCIOCollection truthParticleCollection = ReadoutDataManager.getCollectionParameters("MCParticle", MCParticle.class); - TriggeredLCIOData truthParticleData = new TriggeredLCIOData(truthParticleCollection); - truthParticleData.getData().addAll(truthParticles); - collectionsList.add(truthParticleData); - - // Add the truth relations to the output data. - TriggeredLCIOData truthRelations = new TriggeredLCIOData(truthRelationsCollectionParams); - truthRelations.getData().addAll(triggerTruthRelations); - collectionsList.add(truthRelations); - } - - // Return the extra trigger collections. - return collectionsList; - } - - /** - * Gets the pedestal for the indicated subdetector channel. - * @param channelID - The channel ID. - * @return Returns the value of the pedestal in units of ADC as a - * double. - */ - protected abstract double getPedestalConditions(long channelID); - - - @Override - protected boolean isPersistent() { - throw new UnsupportedOperationException(); - } - - @Override - protected double getReadoutWindowAfter() { - throw new UnsupportedOperationException(); - } - - @Override - protected double getReadoutWindowBefore() { - throw new UnsupportedOperationException(); - } - - @Override - protected double getTimeDisplacement() { - return localTimeOffset; - } - - @Override - protected double getTimeNeededForLocalOutput() { - return (readoutWindow - readoutOffset) * 4.0; - } - - /** - * Gets the time shift for the indicated subdetector channel. - * @param channelID - The channel ID. - * @return Returns the value of the time shift in units of ns as - * a double. - */ - protected abstract double getTimeShiftConditions(long channelID); - - /** - * Gets the subdetector geometry object. - * @return Returns the subdetector geometry object. This will be - * an object of parameterized type D, which will is - * a subclass of {@link org.lcsim.geometry.compact.Subdetector - * Subdetector}. - */ - protected D getSubdetector() { - return geometry; - } - - /** - * Clones an object of type {@link org.lcsim.event.CalorimeterHit - * CalorimeterHit} and returns a copy that is shifted in time by - * the specified amount. - * @param hit - The hit to clone. - * @param newTime - The new time for the hit. - * @return Returns a time-shifted hit as an object of type {@link - * org.lcsim.event.CalorimeterHit CalorimeterHit}, unless the - * input hit was a {@link org.lcsim.event.SimCalorimeterHit - * SimCalorimeterHit} object, in which case the truth information - * will be retained. - */ - private static final CalorimeterHit cloneHitToTime(CalorimeterHit hit, double newTime) { - if(hit instanceof SimCalorimeterHit) { - // Cast the hit to a simulated calorimeter hit. - SimCalorimeterHit simHit = (SimCalorimeterHit) hit; - - // Create the necessary data objects to clone the - // hit. - int[] pdgs = new int[simHit.getMCParticleCount()]; - float[] times = new float[simHit.getMCParticleCount()]; - float[] energies = new float[simHit.getMCParticleCount()]; - Object[] particles = new Object[simHit.getMCParticleCount()]; - for(int i = 0; i < simHit.getMCParticleCount(); i++) { - particles[i] = simHit.getMCParticle(i); - pdgs[i] = simHit.getMCParticle(i).getPDGID(); - - // Note -- Despite returning the value for these - // methods as a double, they are actually stored - // internally as floats, so this case is always safe. - // Note -- Hit times are calculated based on the time - // of each of the contributing truth particles. This - // means that we have to give a fake truth time to - // actually get the correct hit time. - times[i] = (float) newTime; - energies[i] = (float) simHit.getContributedEnergy(i); - } - - // Create the new hit and shift its time position. - BaseSimCalorimeterHit cloneHit = new BaseSimCalorimeterHit(simHit.getCellID(), simHit.getRawEnergy(), newTime, - particles, energies, times, pdgs, simHit.getMetaData()); - - // Return the hit. - return cloneHit; - } else { - return new BaseCalorimeterHit(hit.getRawEnergy(), hit.getCorrectedEnergy(), hit.getEnergyError(), newTime, - hit.getCellID(), hit.getPositionVec(), hit.getType(), hit.getMetaData()); - } - } - - /** - * Gets the value of the pulse-shape Guassian function for the - * given parameters. - * @param t - * @param sig - * @return Returns the value of the function as a - * double. - */ - private static final double funcGaus(double t, double sig) { - return Math.exp(-t * t / (2 * sig * sig)); - } - - /** - * Generates the hits which should be output for a given trigger - * time in Mode-1 format. - * @param triggerTime - The trigger time. - * @return Returns the readout hits for the given trigger time as - * Mode-1 hits. - */ - private List getMode1Hits(double triggerTime) { - // Create a list to store the Mode-1 hits. - List hits = new ArrayList(); - if(debug_)System.out.println(this.getClass().getName()+":: getting mode1Hits for trigger time = "+triggerTime+" and readout window = "+readoutWindow); - // Iterate over each channel. - for(Long cellID : adcBufferMap.keySet()) { - // Get the ADC values at the time of the trigger. - short[] adcValues = getTriggerADCValues(cellID, triggerTime); - if(debug_){ - Collection simHits=getTriggerTruthValues(cellID, triggerTime); - if(simHits.size()==0) - System.out.println(this.getClass().getName()+":: no sim cal hits in this channel"); - else{ - for( SimCalorimeterHit hit: simHits) - System.out.println(this.getClass().getName()+":: sim cal hit in this channel with energy = "+hit.getRawEnergy()); - } - } - - // Iterate across the ADC values. If the ADC value is - // sufficiently high to produce a hit, then it should be - // written out. - boolean isAboveThreshold = false; - for(int i = 0; i < adcValues.length; i++) { - // Check that there is a threshold-crossing at some - // point in the ADC buffer. - if(adcValues[i] > getPedestalConditions(cellID) + integrationThreshold) { - if(debug_)System.out.println(this.getClass().getName()+":: found an adc value above threshold for cellID = "+cellID); - isAboveThreshold = true; - break; - } - } - - // If so, create a new hit and add it to the list. - if(isAboveThreshold) { - hits.add(new BaseRawTrackerHit(cellID, 0, adcValues)); - } - } - - // Return the hits. - return hits; - } - - /** - * Generates the hits which should be output for a given trigger - * time in Mode-3 format. - * @param triggerTime - The trigger time. - * @return Returns the readout hits for the given trigger time as - * Mode-3 hits. - */ - private List getMode3Hits(double triggerTime) { - // Create a list to store the Mode-3 hits. - List hits = new ArrayList(); - - // Iterate across the ADC values and extract Mode-3 hits. - for(Long cellID : adcBufferMap.keySet()) { - int pointerOffset = 0; - int numSamplesToRead = 0; - int thresholdCrossing = 0; - short[] adcValues = null; - short[] window = getTriggerADCValues(cellID, triggerTime); - - for(int i = 0; i < ReadoutDataManager.getReadoutWindow(); i++) { - if(numSamplesToRead != 0) { - adcValues[adcValues.length - numSamplesToRead] = window[i - pointerOffset]; - numSamplesToRead--; - if (numSamplesToRead == 0) { - hits.add(new BaseRawTrackerHit(cellID, thresholdCrossing, adcValues)); - } - } else if ((i == 0 || window[i - 1] <= getPedestalConditions(cellID) + integrationThreshold) && window[i] - > getPedestalConditions(cellID) + integrationThreshold) { - thresholdCrossing = i; - pointerOffset = Math.min(numSamplesBefore, i); - numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, ReadoutDataManager.getReadoutWindow() - i - pointerOffset - 1); - adcValues = new short[numSamplesToRead]; - } - } - } - - // Return the hits. - return hits; - } - - /** - * Generates the hits which should be output for a given trigger - * time in Mode-7 format. - * @param triggerTime - The trigger time. - * @return Returns the readout hits for the given trigger time as - * Mode-7 hits. - */ - private List getMode7Hits(double triggerTime) { - // Create a list to store the Mode-7 hits. - List hits = new ArrayList(); - - // Iterate across the ADC values and extract Mode-7 hits. - for(Long cellID : adcBufferMap.keySet()) { - int adcSum = 0; - int pointerOffset = 0; - int numSamplesToRead = 0; - int thresholdCrossing = 0; - short[] window = getTriggerADCValues(cellID, triggerTime); - - // Generate Mode-7 hits. - if(window != null) { - for(int i = 0; i < ReadoutDataManager.getReadoutWindow(); i++) { - if (numSamplesToRead != 0) { - adcSum += window[i - pointerOffset]; - numSamplesToRead--; - if(numSamplesToRead == 0) { - hits.add(new BaseRawCalorimeterHit(cellID, adcSum, 64 * thresholdCrossing)); - } - } else if((i == 0 || window[i - 1] <= getPedestalConditions(cellID) + integrationThreshold) - && window[i] > getPedestalConditions(cellID) + integrationThreshold) { - thresholdCrossing = i; - pointerOffset = Math.min(numSamplesBefore, i); - numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, ReadoutDataManager.getReadoutWindow() - i - pointerOffset - 1); - adcSum = 0; - } - } - } - } - - // Return the hits. - return hits; - } - - private int getReadoutLatency(double triggerTime) { - return ((int) ((ReadoutDataManager.getCurrentTime() - triggerTime) / 4.0)) + readoutOffset; - } - - /** - * Gets the ADC values for the trigger readout window for the - * requested cell ID and returns them as a short - * primitive array. - * @param cellID - The ID for the channel of the requested ADC - * value array. - * @param triggerTime - The time of the trigger to be written. - * @return Returns the ADC values in a time range equal to the - * readout window positioned around the trigger time as array of - * short primitives. - */ - private short[] getTriggerADCValues(long cellID, double triggerTime) { - // Calculate the offset between the current position and the - // trigger time. - int readoutLatency = getReadoutLatency(triggerTime); - - // Get the ADC pipeline. - IntegerRingBuffer pipeline = adcBufferMap.get(cellID); - - // Extract the ADC values for the requested channel. - short[] adcValues = new short[readoutWindow]; - if(debug_)System.out.println(this.getClass().getName()+":: getTriggerADCValues:: latency = "+readoutLatency); - for(int i = 0; i < readoutWindow; i++) { - adcValues[i] = (short) pipeline.getValue(-(readoutLatency - i - 1)).intValue(); - if(debug_) - System.out.println(this.getClass().getName()+":: getTriggerADCValues:: "+" pipeline index = "+ (-(readoutLatency - i - 1)) - +" adcValue["+i+"] = "+adcValues[i]); - } - - // Return the result. - return adcValues; - } - - /** - * Gets a list of all truth hits that occurred in the ADC output - * window around a given trigger time from the truth buffer. - * @param cellID - The channel ID. - * @param triggerTime - The trigger time. - * @return Returns all truth hits that occurred within the ADC - * readout window around the trigger time for the specified - * channel. - */ - private Collection getTriggerTruthValues(long cellID, double triggerTime) { - // Calculate the offset between the current position and the - // trigger time. - int readoutLatency = getReadoutLatency(triggerTime); - - // Get the truth pipeline. - ObjectRingBuffer pipeline = truthBufferMap.get(cellID); - - // Extract the truth for the requested channel. Note that one - // extra sample is included over the range of ADC samples as - // sometimes, the truth hit occurs a little earlier than may - // be expected due to a delay from pulse propagation. - double baseHitTime = 0; - List channelHits = new ArrayList(); - for(int i = 0; i < readoutWindow + 4; i++) { - // Hit times should be specified with respect to the - // start of the readout window. - for(SimCalorimeterHit hit : pipeline.getValue(-(readoutLatency - i))) { - channelHits.add((SimCalorimeterHit) cloneHitToTime(hit, baseHitTime)); - } - - // Increment the base hit time. - baseHitTime += 4.0; - } - - // Return the result. - return channelHits; - } - - /** - * Returns pulse amplitude at the given time (relative to hit time). Gain is - * applied. - * - * @param time Units of ns. Relative to hit time (negative=before the start - * of the pulse). - * @param cellID Crystal ID as returned by hit.getCellID(). - * @return Amplitude, units of volts/GeV. - */ - private double pulseAmplitude(double time, long cellID) { - //normalization constant from cal gain (MeV/integral bit) to amplitude gain (amplitude bit/GeV) - // Determine the gain. Gain may either be fixed across all - // channels, or be obtained from the conditions database - // depending on the behavior defined in the steering file. - // The gain should also be normalized. - double gain; - if(fixedGain > 0) { - gain = READOUT_PERIOD / (fixedGain * EcalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt)); - } else { - gain = READOUT_PERIOD / (getGainConditions(cellID) * EcalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt)); - } - - // Calculate the correct pulse amplitude and return it. - return gain * pulseAmplitude(time, pulseShape, tp); - } - - /** - * Calculates the amplitude of a pulse at the given time, where - * the time is relative to the hit time, and for a given pulse - * shape. - * @param time - The time in the pulse. This is in units of ns - * and is relative to the hit time. A negative value represents - * the pulse shape before the hit occurs. - * @param shape - The type of pulse for which the calculation is - * to be performed. - * @param shapingTime - A fitting parameter that influences the - * shape of the pulse. - * @return Returns the pulse amplitude in units of inverse ns. - * The amplitude is normalized so that the pulse integral is one. - */ - private static final double pulseAmplitude(double time, PulseShape shape, double shapingTime) { - // There can not be a pulse response from a hit that has not - // occurred yet, so any time before zero must produce a pulse - // amplitude of zero as well. - if(time <= 0.0) { - return 0.0; - } - - // Perform the calculation appropriate to the specified pulse - // shape. - switch (shape) { - case CRRC: - // Peak Location: tp - // Peak Value: 1/(tp * e) - return ((time / (shapingTime * shapingTime)) * Math.exp(-time / shapingTime)); - case DoubleGaussian: - // According to measurements, the output signal can - // be fitted by two Gaussians: one for the rise of - // the signal and one for the fall. - // Peak Location: 3 * riseTime - // Peak Value: 1/norm - double norm = ((riseTime + fallTime) / 2) * Math.sqrt(2 * Math.PI); //to ensure the total integral is equal to 1: = 33.8 - return funcGaus(time - 3 * riseTime, (time < 3 * riseTime) ? riseTime : fallTime) / norm; - case ThreePole: - // Peak Location: 2 * tp - // Peak Value: 2/(tp * e^2) - return ((time * time / (2 * shapingTime * shapingTime * shapingTime)) * Math.exp(-time / shapingTime)); - default: - return 0.0; - } - } - - /** - * Gets the local time for this driver. - * @return Returns the local time for this driver. - */ - private double readoutTime() { - return readoutCounter * READOUT_PERIOD; - //return ReadoutDataManager.getCurrentTime(); - } - - /** - * Resets the driver buffers to their default values. - * @return Returns true if the buffers were reset - * successfully, and false if they were not. - */ - private void resetBuffers() { - // Reset each of the buffer maps. - adcBufferMap.clear(); - truthBufferMap.clear(); - voltageBufferMap.clear(); - - // Get the set of all possible channel IDs. - Set cells = getChannelIDs(); - - // Insert a new buffer for each channel ID. - for(Long cellID : cells) { - voltageBufferMap.put(cellID, new DoubleRingBuffer(BUFFER_LENGTH)); - truthBufferMap.put(cellID, new ObjectRingBuffer(PIPELINE_LENGTH)); - adcBufferMap.put(cellID, new IntegerRingBuffer(PIPELINE_LENGTH, (int) Math.round(getPedestalConditions(cellID)))); - - truthBufferMap.get(cellID).stepForward(); - - flagStartNewIntegration.put(cellID, false); - } - } - - /** - * Sets whether randomized noise should be added to SLIC truth - * energy depositions when simulating subdetector hits. This is - * true by default. - * @param state - true means that noise will be - * added and false that it will not. - */ - public void setAddNoise(boolean state) { - addNoise = state; - } - - /** - * Defines the name of the subdetector geometry object. - * @param ecalName - The subdetector name. - */ - public void setGeometryName(String value) { - geometryName = value; - } - - /** - * Sets a single uniform value for the gain on all channels. This - * will override the conditions database value. If set negative, - * the conditions database values will be used instead. Gains are - * defined in units of MeV/ADC. This defaults to -1. - * @param value - The uniform gain to be employed across all - * channels in units of MeV/ADC. A negative value indicates to - * use the conditions database values. - */ - public void setFixedGain(double value) { - fixedGain = value; - } - - /** - * Sets the threshold that a pulse sample must exceed before - * pulse integration may commence. Units are in ADC and the - * default value is 12 ADC. - * @param value - The pulse integration threshold, in units of - * ADC. - */ - public void setIntegrationThreshold(int value) { - integrationThreshold = value; - } - - /** - * Sets the name of the input truth hit collection name. - * @param collection - The collection name. - */ - public void setInputHitCollectionName(String collection) { - truthHitCollectionName = collection; - } - - /** - * Sets the name of the input pulser data collection name. - * @param collection - The collection name. - */ - public void setInputPulserDataCollectionName(String collection) { - PulserDataCollectionName = collection; - } - - /** - * Sets the operational mode of the simulation. This affects the - * form of the readout hit output. Mode may be set to the values - * 1, 3, or 7. - * @param value - The operational mode. - */ - public void setMode(int value) { - mode = value; - } - - /** - * Defines the number of samples from after a threshold-crossing - * pulse sample that should be included in the pulse integral. - * Units are in clock-cycles (4 ns samples) and the default value - * is 20 samples. - * @param value - The number of samples. - */ - public void setNumberSamplesAfter(int value) { - numSamplesAfter = value; - } - - /** - * Defines the number of samples from before a threshold-crossing - * pulse sample that should be included in the pulse integral. - * Units are in clock-cycles (4 ns samples) and the default value - * is 5 samples. - * @param value - The number of samples. - */ - public void setNumberSamplesBefore(int value) { - numSamplesBefore = value; - } - - /** - * Sets the name of the hits produced by this driver for use in - * the trigger simulation.

- * Note that this is not the name of the collection output when a - * trigger occurs. For this value, see the method {@link - * org.hps.readout.ecal.updated.DigitizationReadoutDriver#setReadoutHitCollectionName(String) - * setReadoutHitCollectionName(String)} instead. - * @param collection - The collection name. - */ - public void setOutputHitCollectionName(String collection) { - outputHitCollectionName = collection; - } - - @Override - public void setPersistent(boolean state) { - throw new UnsupportedOperationException(); - } - - /** - * Sets the number of photoelectrons per MeV of deposited energy. - * This value is used in the simulation of subdetector hit noise - * due to photoelectron statistics. - * @param value - The number of photoelectrons per MeV. - */ - public void setPhotoelectronsPerMeV(double value) { - pePerMeV = value; - } - - /** - * Sets the pulse-shape model used to simulate pre-amplifier - * pulses. The default value is ThreePole. - * @param pulseShape - The name of the pulse shape model that is - * to be employed. Valid options are ThreePole, - * DoubleGaussian, or CRRC. - */ - public void setPulseShape(String pulseShape) { - this.pulseShape = PulseShape.valueOf(pulseShape); - } - - /** - * Sets the shaper time parameter for pulse simulation. The value - * depends on the pulse shape selected. For the default pulse - * shape ThreePole, it is equal to the RC, or half - * the peaking time (9.6 ns). - * @param value The pulse time parameter in units of nanoseconds. - */ - public void setPulseTimeParameter(double value) { - tp = value; - } - - /** - * Sets the name of the triggered hit output collection. This - * collection will hold all hits produced when a trigger occurs. - *

- * Note that this collection is different from the hits produced - * for internal usage by the readout simulation. For this value, - * see the method {@link - * org.hps.readout.ecal.updated.DigitizationReadoutDriver#setOutputHitCollectionName(String) - * setOutputHitCollectionName(String)} instead. - * @param collection - The collection name. - */ - public void setReadoutHitCollectionName(String collection) { - readoutCollectionName = collection; - } - - /** - * Sets the number of samples by which readout hit pulse-crossing - * samples should be offset. Units are in clock-cycles (intervals - * of 4 ns). - * @param value - The offset of the pulse-crossing sample in - * units of clock-cycles (4 ns intervals). - */ - public void setReadoutOffset(int value) { - readoutOffset = value; - } - - /** - * Sets time window of ADC samples in pulser data. - * Units are in clock-cycles (intervals of 4 ns). - * @param value - The time window of ADC samples in pulser data in - * units of clock-cycles (4 ns intervals). - */ - public void setPulserDataWindow(int value) { - pulserDataWindow = value; - } - - /** - * Sets sample shift between Ecal and hodoscope detectors. - * The shift is equal to (Hodo_readout_offset - Ecal_readout_offset) / 4. - * @param value - The shift of ADC samples in pulser data in - * units of clock-cycles (4 ns intervals). - */ - public void setPulserSamplesShift(int value) { - pulserSamplesShift = value; - } - - /** - * Sets the size of the readout window, in units of 4 ns samples. - * @param value - The readout window. - */ - public void setReadoutWindow(int value) { - readoutWindow = value; - } - - @Override - public void setReadoutWindowAfter(double value) { - throw new UnsupportedOperationException(); - } - - @Override - public void setReadoutWindowBefore(double value) { - throw new UnsupportedOperationException(); - } - - /** - * Sets the name of the collection which contains the relations - * between truth hits from SLIC and the calorimeter hit output. - * This is specifically for the trigger path hits. - * @param collection - The collection name. - */ - public void setTriggerPathTruthRelationsCollectionName(String collection) { - triggerTruthRelationsCollectionName = collection; - } - - /** - * Sets the name of the collection which contains the relations - * between truth hits from SLIC and the calorimeter hit output. - * This is specifically for the readout path hits. - * @param collection - The collection name. - */ - public void setTruthRelationsCollectionName(String collection) { - truthRelationsCollectionName = collection; - } - - /** - * Sets whether subdetector truth data for trigger path hits is - * to be produced or not. - * @param state - true indicates that the truth data - * should be created, and false that it should not. - */ - public void setWriteTriggerPathTruth(boolean state) { - writeTriggerTruth = state; - } - - /** - * Sets whether subdetector truth data for readout path hits is - * to be written to the output LCIO file or not. - * @param state - true indicates that the truth data - * should be written, and false that it should not. - */ - public void setWriteTruth(boolean state) { - writeTruth = state; - } - - /** - * Enumerable PulseShape defines the allowed types - * of pulses that may be used to emulate the subdetector response - * to incident energy. - * - * @author Sho Uemura - */ - public enum PulseShape { - CRRC, DoubleGaussian, ThreePole - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java deleted file mode 100644 index b1c505f17c..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/EcalDigiWithPulseNoSpacingReadoutDriver.java +++ /dev/null @@ -1,153 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.Set; - -import org.hps.readout.ReadoutDriver; -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.ecal.EcalChannelConstants; -import org.hps.conditions.ecal.EcalConditions; -import org.hps.readout.ReadoutTimestamp; -import org.hps.recon.ecal.EcalUtils; -import org.lcsim.event.RawTrackerHit; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.HPSEcal3; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig2019.FADCConfigEcal2019; -import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection; - -/** - * Class EcalDigiWithPulseNoSpacingReadoutDriver is an implementation of the - * {@link org.hps.digi.nospacing.CalDigiWithPulserNoSpacingReadoutDriver} for a subdetector of type {@link - * org.lcsim.geometry.subdetector.HPSEcal3 HPSEcal3}. It handles all - * of the calorimeter-specific functions needed by the superclass. - * - * @author Tongtong Cao - */ -public class EcalDigiWithPulseNoSpacingReadoutDriver extends CalDigiWithPulserNoSpacingReadoutDriver { - // The DAQ configuration manager for FADC parameters. - private FADCConfigEcal2019 config = new FADCConfigEcal2019(); - private boolean configStat = false; // Indicates if DAQ configuration is loaded - - // The number of nanoseconds in a clock-cycle (sample). - private static final int nsPerSample = 4; - - - /** Stores the conditions for this subdetector. */ - private EcalConditions ecalConditions = null; - - /** Stores the channel collection for this subdetector. */ - private EcalChannelCollection geoMap = new EcalChannelCollection(); - - public EcalDigiWithPulseNoSpacingReadoutDriver() { - // Set the default values for each subdetector-dependent - // parameter. - setGeometryName("Ecal"); - - setInputHitCollectionName("EcalHits"); - setOutputHitCollectionName("EcalRawHits"); - setTruthRelationsCollectionName("EcalTruthRelations"); - setTriggerPathTruthRelationsCollectionName("TriggerPathTruthRelations"); - setReadoutHitCollectionName("EcalReadoutHits"); - - setPhotoelectronsPerMeV(EcalUtils.photoelectronsPerMeV); - setPulseTimeParameter(9.6); - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - - // Load the DAQ settings from the configuration manager. - numSamplesAfter = daq.getEcalFADCConfig().getNSA() / nsPerSample; - numSamplesBefore = daq.getEcalFADCConfig().getNSB() / nsPerSample; - readoutWindow = daq.getEcalFADCConfig().getWindowWidth() / nsPerSample; - pulserDataWindow = readoutWindow; - - // Get the FADC configuration. - config = daq.getEcalFADCConfig(); - configStat = true; - } - }); - } - } - - - @Override - public void detectorChanged(Detector detector) { - // Get a copy of the calorimeter conditions for the detector. - ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions(); - - // Store the calorimeter conditions table for converting between - // geometric IDs and channel objects. - geoMap = DatabaseConditionsManager.getInstance().getCachedConditions(EcalChannelCollection.class, "ecal_channels").getCachedData(); - - // Run the superclass method. - super.detectorChanged(detector); - } - - @Override - protected Set getChannelIDs() { - return getSubdetector().getNeighborMap().keySet(); - } - - @Override - protected Long getID(RawTrackerHit hit) { - return hit.getCellID(); - } - - @Override - protected double getGainConditions(long cellID) { - return findChannel(cellID).getGain().getGain(); - } - - @Override - protected double getNoiseConditions(long channelID) { - return findChannel(channelID).getCalibration().getNoise(); - } - - protected double getPedestalConditions(long cellID) { - return findChannel(cellID).getCalibration().getPedestal(); - - } - - @Override - protected double getTimeShiftConditions(long cellID) { - return findChannel(cellID).getTimeShift().getTimeShift(); - } - - @Override - protected int getTimestampFlag() { - return ReadoutTimestamp.SYSTEM_ECAL; - } - - /** - * Gets the channel parameters for a given channel ID. - * @param cellID - The long ID value that represents - * the channel. This is typically acquired from the method {@link - * org.lcsim.event.CalorimeterHit#getCellID() getCellID()} in a - * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} object. - * @return Returns the channel parameters for the channel as an - * {@link org.hps.conditions.ecal.EcalChannelConstants - * EcalChannelConstants} object. - */ - private EcalChannelConstants findChannel(long cellID) { - return ecalConditions.getChannelConstants(ecalConditions.getChannelCollection().findGeometric(cellID)); - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java deleted file mode 100755 index 670e827a7a..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/EcalRawConverterNoSpacingReadoutDriver.java +++ /dev/null @@ -1,153 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; - -import org.hps.readout.ReadoutDriver; -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.ecal.EcalChannelConstants; -import org.hps.conditions.ecal.EcalConditions; -//import org.hps.readout.RawConverterNoSpacingReadoutDriver; -import org.hps.readout.rawconverter.AbstractMode3RawConverter; -import org.hps.readout.rawconverter.EcalReadoutMode3RawConverter; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig.ConfigurationManager; -import org.hps.record.daqconfig.DAQConfig; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.HPSEcal3; - -/** - * EcalRawConverterNoSpacingReadoutDriver is an implementation of - * {@link org.hps.readout.RawConverterReadoutDriver - * RawConverterReadoutDriver} for the calorimeter subdetector. - * - * @see org.hps.readout.RawConverterReadoutDriver - */ -public class EcalRawConverterNoSpacingReadoutDriver extends RawConverterNoSpacingReadoutDriver { - /** - * The converter object responsible for processing raw hits into - * proper {@link org.lcsim.event.CalorimeterHit CalorimeterHit} - * objects. - */ - private EcalReadoutMode3RawConverter converter = new EcalReadoutMode3RawConverter(); - - /** - * Cached copy of the calorimeter conditions. All calorimeter - * conditions should be called from here, rather than by directly - * accessing the database manager. - */ - private EcalConditions ecalConditions = null; - - /** - * Instantiates the driver with the correct default parameters. - */ - public EcalRawConverterNoSpacingReadoutDriver() { - super("EcalRawHits", "EcalCorrectedHits"); - setSkipBadChannels(true); - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfiguration2016AppliedintoReadout(boolean state) { - // Track changes in the DAQ configuration. - if (state) { - ConfigurationManager.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig daq = ConfigurationManager.getInstance(); - - // Load the DAQ settings from the configuration manager. - getConverter().setNumberSamplesAfter(daq.getFADCConfig().getNSA()); - getConverter().setNumberSamplesBefore(daq.getFADCConfig().getNSB()); - - // Get the FADC configuration. - getConverter().setFADCConfig2016(daq.getFADCConfig()); - } - }); - } - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // Track changes in the DAQ configuration. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - - // Load the DAQ settings from the configuration manager. - getConverter().setNumberSamplesAfter(daq.getEcalFADCConfig().getNSA()); - getConverter().setNumberSamplesBefore(daq.getEcalFADCConfig().getNSB()); - - // Get the FADC configuration. - getConverter().setFADCConfigEcal2019(daq.getEcalFADCConfig()); - } - }); - } - } - - /** - * Indicates whether or not data from channels flagged as "bad" - * in the conditions system should be ignored. true - * indicates that they should be ignored, and false - * that they should not. - * @param apply - true indicates that "bad" channels - * will be ignored and false that they will not. - */ - @Override - public void setSkipBadChannels(boolean state) { - super.skipBadChannels = state; - } - - @Override - protected AbstractMode3RawConverter getConverter() { - return converter; - } - - @Override - protected String getSubdetectorReadoutName(Detector detector) { - HPSEcal3 calorimeterGeometry = (HPSEcal3) detector.getSubdetector("Ecal"); - return calorimeterGeometry.getReadout().getName(); - } - - @Override - protected boolean isBadChannel(long channelID) { - return findChannel(channelID).isBadChannel(); - } - - @Override - protected void updateDetectorDependentParameters(Detector detector) { - ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions(); - } - - /** - * Gets the channel parameters for a given channel ID. - * @param cellID - The long ID value that represents - * the channel. This is typically acquired from the method {@link - * org.lcsim.event.CalorimeterHit#getCellID() getCellID()} in a - * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} object. - * @return Returns the channel parameters for the channel as an - * {@link org.hps.conditions.ecal.EcalChannelConstants - * EcalChannelConstants} object. - */ - private EcalChannelConstants findChannel(long cellID) { - return ecalConditions.getChannelConstants(ecalConditions.getChannelCollection().findGeometric(cellID)); - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java deleted file mode 100644 index 5f99e4ee65..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsDriver.java +++ /dev/null @@ -1,147 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; -import java.util.List; - -import org.lcsim.event.EventHeader; -import org.lcsim.event.base.BaseLCSimEvent; -import org.lcsim.event.EventHeader.LCMetaData; -import org.lcsim.geometry.Detector; -import org.lcsim.conditions.ConditionsManager; -import org.lcsim.util.Driver; -import org.lcsim.event.EventHeader; - -import org.lcsim.event.SimCalorimeterHit; -import org.lcsim.event.SimTrackerHit; -import org.lcsim.event.MCParticle; - -/* - * This driver will create an empty lcsim event - * and call super.process() so that all of the registered - * drivers run over this empty event. - * - * - */ - - -public class EmptyEventsDriver extends Driver{ - - private int nEmptyToInsert=250; //number of events to insert between real MC events - private int emptyCount=0; //counter - //make collections for all needed by readout sim - EventHeader emptyEvent; - boolean gotFirstRealEvent=false; - //names of collections - Map baseCollectionMap=new HashMap>(); - - @Override - public void detectorChanged(Detector det) { - - // in here, make empty collections. - // since these are members and don't change - // should be able just keep adding some one - // to empty "event"....hopefully this speeds - // things up a lot. - - System.out.println("EmptyEventsDriver:: Setting up base map"); - - baseCollectionMap.put("EcalHits",SimCalorimeterHit.class); - baseCollectionMap.put("HodoscopeHits",SimTrackerHit.class); - baseCollectionMap.put("MCParticle",MCParticle.class); - baseCollectionMap.put("TrackerHits",SimTrackerHit.class); - baseCollectionMap.put("TrackerHitsECal",SimTrackerHit.class); - - - } - - - @Override - public void process(EventHeader event) { - // System.out.println("EmptyEventsDriver:: processing event!"); - - if(!gotFirstRealEvent){ - System.out.println("EmptyEventsDriver:: Making the empty bunch"); - //make an empty lcsim event based on this, real event - emptyEvent=makeEmptyMCEvent(event); - gotFirstRealEvent=true; - } - - // check if we should add empty or continue - - if(emptyCount mcCollections = new ArrayList(mcEvent.getMetaData()); - for (LCMetaData mcCollectionMeta : mcCollections) { - String mcCollectionName = mcCollectionMeta.getName(); - // check to see if this collection is in the base map - // if so, copy collection, clear it, and put it in new event. - if (baseCollectionMap.containsKey(mcCollectionName)){ - List collection =new ArrayList<> ((List) mcEvent.get(mcCollectionName)); - collection.clear(); //remove element - System.out.println("EmptyEventsDriver:: inserting collection "+mcCollectionName); - - this.putCollection(mcCollectionMeta, collection, lcsimEvent); - } - } - System.out.println("EmptyEventsDriver::returning empty event"); - return lcsimEvent; - } - - - - protected void putCollection(LCMetaData collection, List entries, EventHeader event) { - String[] readout = collection.getStringParameters().get("READOUT_NAME"); - if (readout != null) { - event.put(collection.getName(), entries, collection.getType(), collection.getFlags(), readout[0]); - } else { - event.put(collection.getName(), entries, collection.getType(), collection.getFlags()); - } - if (this.getHistogramLevel() > HLEVEL_NORMAL) - System.out.println("Putting collection " + collection.getName() + " into event."); - } - - private void clearEvent(EventHeader event){ - List evtCollections = new ArrayList(event.getMetaData()); - for (LCMetaData evtCollectionMeta : evtCollections) { - String colName=evtCollectionMeta.getName(); - List col=(List)event.get(colName); - if(col.size()>0){ - System.out.println("clearing collection "+colName+" of size = "+col.size()); - ((List)event.get(colName)).clear(); - System.out.println(".....new size = "+col.size()); - } - } - } - -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java deleted file mode 100644 index 6267057dd7..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/EmptyEventsReadoutDriver.java +++ /dev/null @@ -1,223 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.List; - -import org.lcsim.event.EventHeader; -import org.lcsim.event.base.BaseLCSimEvent; -import org.lcsim.event.EventHeader.LCMetaData; -import org.lcsim.geometry.Detector; -import org.lcsim.conditions.ConditionsManager; -import org.lcsim.util.Driver; -import org.lcsim.event.EventHeader; - -import org.lcsim.event.SimCalorimeterHit; -import org.lcsim.event.SimTrackerHit; -import org.lcsim.event.MCParticle; - -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.ReadoutTimestamp; - - -/* - * This driver will create an empty lcsim event - * and call super.process() so that all of the registered - * drivers run over this empty event. - * - * - */ - - -public class EmptyEventsReadoutDriver extends ReadoutDriver{ - - private int nEmptyToInsert=250; //number of events to insert between real MC events - private int emptyCount=0; //counter - //make collections for all needed by readout sim - EventHeader emptyEvent; - boolean gotFirstRealEvent=false; - //names of collections - Map baseCollectionMap=new HashMap>(); - - List baseCollectionNames=Arrays.asList("EcalHits","HodoscopeHits","MCParticle","TrackerHits","TrackerHitsECal"); - List mcCollections = null; - @Override - public void detectorChanged(Detector det) { - - // in here, make empty collections. - // since these are members and don't change - // should be able just keep adding some one - // to empty "event"....hopefully this speeds - // things up a lot. - - System.out.println("EmptyEventsReadoutDriver:: Setting up base map"); - - baseCollectionMap.put("EcalHits",SimCalorimeterHit.class); - baseCollectionMap.put("HodoscopeHits",SimTrackerHit.class); - baseCollectionMap.put("MCParticle",MCParticle.class); - baseCollectionMap.put("TrackerHits",SimTrackerHit.class); - baseCollectionMap.put("TrackerHitsECal",SimTrackerHit.class); - - - } - - - @Override - public void process(EventHeader event) { - System.out.println("EmptyEventsReadoutDriver:: processing event!"); - System.out.println(event.toString()); - printCollections(event); - System.out.println("empty count = "+emptyCount); - if(!gotFirstRealEvent){ - System.out.println("EmptyEventsReadoutDriver:: Making the empty bunch"); - //make an empty lcsim event based on this, real event - // emptyEvent=makeEmptyEventFromMC(event); - //just get the metadata from first event - getMCMetaData(event); - gotFirstRealEvent=true; - } - - // check if we should add empty or continue - - if(emptyCount mcCollections = new ArrayList(mcEvent.getMetaData()); - for (LCMetaData mcCollectionMeta : mcCollections) { - String mcCollectionName = mcCollectionMeta.getName(); - // check to see if this collection is in the base map - // if so, copy collection, clear it, and put it in new event. - if (baseCollectionMap.containsKey(mcCollectionName)){ - List collection =new ArrayList<> ((List) mcEvent.get(mcCollectionName)); - collection.clear(); //remove element - System.out.println("EmptyEventsReadoutDriver:: inserting collection "+mcCollectionName); - - this.putCollection(mcCollectionMeta, collection, lcsimEvent); - } - } - System.out.println("EmptyEventsReadoutDriver::returning empty event"); - return lcsimEvent; - } - - private EventHeader makeEmptyEvent(){ - int eventID=666666; - long time=(long)ReadoutDataManager.getCurrentTime(); - System.out.println("making an empty bunch with time = "+time); - //this was taken from evio/src/main/java/org/hps/evio/BaseEventBuilder.java - // Create a new LCSimEvent. - EventHeader lcsimEvent = - new BaseLCSimEvent( - ConditionsManager.defaultInstance().getRun(), - eventID, - ConditionsManager.defaultInstance().getDetector(), - time); - - // for (Map.Entry> thisEntry : baseCollectionMap.entrySet()) { - for (String name : baseCollectionNames) { - // String name = entry.getKey(); - // use the already obtained Metadata from the first MC event - // in order to get the flags right - System.out.println("EmptyEventsReadoutDriver:: inserting collection "+name); - for(LCMetaData mcCollectionMeta : mcCollections) { - // System.out.println("looping over collections from mcMetaData: "+mcCollectionMeta.getName()); - if (mcCollectionMeta.getName().equals(name)){ - List collection = new ArrayList<> (); - // System.out.println("EmptyEventsReadoutDriver:: inserting collection "+name); - this.putCollection(mcCollectionMeta, collection, lcsimEvent); - } - } - - } - - System.out.println("####################### this should be an empty event ###################"); - printCollections(lcsimEvent); - System.out.println("#############################################################################"); - return lcsimEvent; - - } - - protected void putCollection(LCMetaData meta, List entries, EventHeader event) { - String[] readout = meta.getStringParameters().get("READOUT_NAME"); - if (readout != null) { - event.put(meta.getName(), entries, meta.getType(), meta.getFlags(), readout[0]); - } else { - event.put(meta.getName(), entries, meta.getType(), meta.getFlags()); - } - if (this.getHistogramLevel() > HLEVEL_NORMAL) - System.out.println("Putting collection" + meta.getName() + " into event."); - } - - private void getMCMetaData(EventHeader mcEvent){ - mcCollections = new ArrayList(mcEvent.getMetaData()); - } - - private void clearEvent(EventHeader event){ - List evtCollections = new ArrayList(event.getMetaData()); - for (LCMetaData evtCollectionMeta : evtCollections) { - String colName=evtCollectionMeta.getName(); - List col=(List)event.get(colName); - if(col.size()>0){ - System.out.println("clearing collection "+colName+" of size = "+col.size()); - ((List)event.get(colName)).clear(); - System.out.println(".....new size = "+col.size()); - } - } - } - - private void printCollections(EventHeader event){ - List Collections = new ArrayList(event.getMetaData()); - for (LCMetaData CollectionMeta : Collections) { - String CollectionName = CollectionMeta.getName(); - // check to see if this collection is in the base map - // if so, copy collection, clear it, and put it in new event. - List collection =new ArrayList<> ((List) event.get(CollectionName)); - System.out.println("EmptyEventsReadoutDriver::printCollections:: "+CollectionName+" has "+collection.size()+" entries"); - } - } - - @Override - protected double getTimeDisplacement() { - return 0; - } - - @Override - protected double getTimeNeededForLocalOutput() { - // TODO: Probably should have some defined value - buffer seems to be filled enough from the ecal delay alone, though. - return 0; - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java deleted file mode 100755 index 539f28a7c3..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/GTPClusterNoSpacingReadoutDriver.java +++ /dev/null @@ -1,390 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.readout.util.collection.TriggeredLCIOData; -import org.hps.recon.ecal.cluster.ClusterType; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig2019.VTPConfig2019; -import org.hps.record.daqconfig.ConfigurationManager; -import org.hps.record.daqconfig.DAQConfig; -import org.hps.record.daqconfig.GTPConfig; -import org.lcsim.event.CalorimeterHit; -import org.lcsim.event.Cluster; -import org.lcsim.event.EventHeader; -import org.lcsim.event.base.BaseCluster; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.HPSEcal3; -import org.lcsim.geometry.subdetector.HPSEcal3.NeighborMap; -import org.lcsim.lcio.LCIOConstants; - -/** - * Class GTPClusterNoSpacingReadoutDriver produces GTP cluster - * objects for use in the readout trigger simulation. It takes in - * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} objects as - * input and generates clusters from these using the GTP algorithm. - * This algorithm works by selected all hits in the current - * clock-cycle (4 ns period) and comparing them to adjacent hits. If - * a given hit is an energy maximum compared to all adjacent hits in - * both the current clock-cycle, and a number of clock-cycles before - * and after the current cycle (defined through the variable {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#temporalWindow - * temporalWindow} and set through the method {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setClusterWindow(int) - * setClusterWindow(int)}), then it is a seed hit so long as it also - * exceeds a certain minimum energy (defined through the variable - * {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#seedEnergyThreshold - * seedEnergyThreshold} and set through the method {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setSeedEnergyThreshold(double) - * setSeedEnergyThreshold(double)}).

- * Clusters are then output as objects of type {@link - * org.lcsim.event.Cluster Cluster} to the specified output - * collection. If the {@link - * org.hps.readout.ecal.updated.GTPClusterNoSpacingReadoutDriver#setWriteClusterCollection(boolean) - * setWriteClusterCollection(boolean)} is set to true, the clusters - * will also be persisted into the output LCIO file. - */ -public class GTPClusterNoSpacingReadoutDriver extends ReadoutDriver { - // ============================================================== - // ==== LCIO Collections ======================================== - // ============================================================== - - /** - * The name of the collection that contains the calorimeter hits - * from which clusters should be generated. - */ - private String inputCollectionName = "EcalCorrectedHits"; - /** - * The name of the collection into which generated clusters should - * be output. - */ - private String outputCollectionName = "EcalClustersGTP"; - - // ============================================================== - // ==== Driver Options ========================================== - // ============================================================== - - /** - * The time window used for cluster verification. A seed hit must - * be the highest energy hit within plus or minus this range in - * order to be considered a valid cluster. - */ - private int temporalWindow = 48; - /** - * The minimum energy needed for a hit to be considered as a seed - * hit candidate. - */ - private double seedEnergyThreshold = 0.050; - /** - * The local time for the driver. This starts at 2 ns due to a - * quirk in the timing of the {@link - * org.hps.readout.ecal.updated.EcalReadoutDriver - * EcalReadoutDriver}. - */ - private double localTime = 0.0; - /** - * The length of time by which objects produced by this driver - * are shifted due to the need to buffer data from later events. - * This is calculated automatically. - */ - private double localTimeDisplacement = 0; - - // ============================================================== - // ==== Driver Parameters ======================================= - // ============================================================== - - /** - * An object which can provide, given an argument cell ID, a map - * of cell IDs that are physically adjacent to the argument ID. - * This is used to determine adjacency for energy comparisons in - * the clustering algorithm. - */ - private NeighborMap neighborMap; - - private HPSEcal3 calorimeterGeometry = null; - - private boolean checkInputStatus=false; //don't check status if running on non-spaced events. - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfiguration2016AppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig daq = ConfigurationManager.getInstance(); - GTPConfig config = daq.getGTPConfig(); - - // Load the DAQ settings from the configuration manager. - seedEnergyThreshold = config.getSeedEnergyCutConfig().getLowerBound(); - } - }); - } - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - VTPConfig2019 config = daq.getVTPConfig(); - - // Load the DAQ settings from the configuration manager. - seedEnergyThreshold = config.getEcalClusterSeedThr(); - temporalWindow = config.getEcalClusterHitDT(); - } - }); - } - } - - @Override - public void detectorChanged(Detector etector) { - // Get the calorimeter data object. - //HPSEcal3 ecal = (HPSEcal3) DatabaseConditionsManager.getInstance().getDetectorObject().getSubdetector("Ecal"); - calorimeterGeometry = (HPSEcal3) DatabaseConditionsManager.getInstance().getDetectorObject().getSubdetector("Ecal"); - if(calorimeterGeometry == null) { - throw new IllegalStateException("Error: Calorimeter geometry data object not defined."); - } - - // Get the calorimeter hit neighbor map. - neighborMap = calorimeterGeometry.getNeighborMap(); - if(neighborMap == null) { - throw new IllegalStateException("Error: Calorimeter hit neighbor map is not defined."); - } - } - - @Override - public void process(EventHeader event) { - // Check the data management driver to determine whether the - // input collection is available or not. - if(checkInputStatus&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, ReadoutDataManager.getCurrentTime() +192.0)) { - // System.out.println("Skipping GTP Readout with because collection doesn't exist at "+(ReadoutDataManager.getCurrentTime() + 192.0)); - return; - } - - // Get the hits that occur during the present clock-cycle, as - // well as the hits that occur in the verification window - // both before and after the current clock-cycle. - // TODO: Simplify this? - Collection allHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 192.0, inputCollectionName, CalorimeterHit.class); - // Collection foreHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime() - temporalWindow, ReadoutDataManager.getCurrentTime(), inputCollectionName, CalorimeterHit.class); - //Collection postHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime() + 4.0, ReadoutDataManager.getCurrentTime() + temporalWindow + 4.0, inputCollectionName, CalorimeterHit.class); - - - // List allHits = new ArrayList(seedCandidates.size() + foreHits.size() + postHits.size()); - - //allHits.addAll(foreHits); - //allHits.addAll(seedCandidates); - //allHits.addAll(postHits); - // System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: local time = "+ReadoutDataManager.getCurrentTime()+" number of seeds = "+allHits.size()); - - // Store newly created clusters. - List gtpClusters = new ArrayList(); - - // Iterate over all seed hit candidates. - seedLoop: - for(CalorimeterHit seedCandidate : allHits) { - // A seed candidate must meet a minimum energy cut to be - // considered for clustering. - if(seedCandidate.getRawEnergy() < seedEnergyThreshold) { - continue seedLoop; - } - - // Collect other hits that are adjacent to the seed hit - // and may be a part of the cluster. - List clusterHits = new ArrayList(); - - // Iterate over all other hits in the clustering window - // and check that the seed conditions are met for the - // seed candidate. Note that all hits are properly within - // the clustering time window by definition, so the time - // condition is not checked explicitly. - hitLoop: - for(CalorimeterHit hit : allHits) { - // If the hit is not adjacent to the seed hit, it can - // be ignored. - if(!neighborMap.get(seedCandidate.getCellID()).contains(hit.getCellID())) { - continue hitLoop; - } - - // A seed hit must have the highest energy in its - // spatiotemporal window. If it is not, this is not a - // valid seed hit. - if(seedCandidate.getRawEnergy() < hit.getRawEnergy()) { - continue seedLoop; - } - - // Add the hit to the list of cluster hits. - clusterHits.add(hit); - } - - // If no adjacent hit was found that invalidates the seed - // condition, then the seed candidate is valid and a - // cluster should be formed. - gtpClusters.add(createBasicCluster(seedCandidate, clusterHits)); - } - - // Pass the clusters to the data management driver. - // System.out.println(this.getClass().getName()+":: number of GTP Clusters "+gtpClusters.size()); - ReadoutDataManager.addData(outputCollectionName, gtpClusters, Cluster.class); - } - - @Override - public void startOfData() { - // Define the output LCSim collection parameters. - LCIOCollectionFactory.setCollectionName(outputCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.CLBIT_HITS); - LCIOCollection clusterCollectionParams = LCIOCollectionFactory.produceLCIOCollection(Cluster.class); - - // Instantiate the GTP cluster collection with the readout - // data manager. - localTimeDisplacement = temporalWindow + 4.0; - addDependency(inputCollectionName); - ReadoutDataManager.registerCollection(clusterCollectionParams, false); - } - - @Override - protected Collection> getOnTriggerData(double triggerTime) { - // If clusters are not to be output, return null. - if(!isPersistent()) { return null; } - - // Create a list to store the on-trigger collections. There - // are two collections outputs for this driver - the clusters - // and the cluster hits. Unlike other drivers, the clusterer - // must handle its own output because the manager does not - // know that it must also specifically output the hits from - // each cluster as well. - List> collectionsList = new ArrayList>(2); - - // Define the LCIO collection settings for the clusters. - LCIOCollectionFactory.setCollectionName(outputCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.CLBIT_HITS); - LCIOCollection clusterCollectionParams = LCIOCollectionFactory.produceLCIOCollection(Cluster.class); - - // Define the LCIO collection settings for the cluster hits. - int hitFlags = 0; - hitFlags += 1 << LCIOConstants.RCHBIT_TIME; - hitFlags += 1 << LCIOConstants.RCHBIT_LONG; - LCIOCollectionFactory.setCollectionName("EcalClustersGTPSimHits"); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(hitFlags); - LCIOCollectionFactory.setReadoutName(calorimeterGeometry.getReadout().getName()); - LCIOCollection clusterHitsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(CalorimeterHit.class); - - // Get the output time range for clusters. This is either the - // user defined output range, or the default readout window - // that is defined by the readout data manager. - double startTime; - if(Double.isNaN(getReadoutWindowBefore())) { startTime = triggerTime - ReadoutDataManager.getTriggerOffset(); } - else { startTime = triggerTime - getReadoutWindowBefore(); } - - double endTime; - if(Double.isNaN(getReadoutWindowAfter())) { endTime = startTime + ReadoutDataManager.getReadoutWindow(); } - else { endTime = triggerTime + getReadoutWindowAfter(); } - - // Get the cluster data and populate a list of cluster hits. - Collection clusters = ReadoutDataManager.getData(startTime, endTime, outputCollectionName, Cluster.class); - List clusterHits = new ArrayList(); - for(Cluster cluster : clusters) { - clusterHits.addAll(cluster.getCalorimeterHits()); - } - - // Create the LCIO on-trigger data lists. - TriggeredLCIOData clusterHitData = new TriggeredLCIOData(clusterHitsCollectionParams); - clusterHitData.getData().addAll(clusterHits); - collectionsList.add(clusterHitData); - - TriggeredLCIOData clusterData = new TriggeredLCIOData(clusterCollectionParams); - clusterData.getData().addAll(clusters); - collectionsList.add(clusterData); - - // Return the on-trigger data. - return collectionsList; - } - - @Override - protected double getTimeDisplacement() { - return localTimeDisplacement; - } - - @Override - protected double getTimeNeededForLocalOutput() { - return 0; - } - - /** - * Creates a new cluster object from a seed hit and list of hits. - * @param seedHit - The seed hit of the new cluster. - * @param hits - The hits for the new cluster. - * @return Returns a {@link org.lcsim.event.Cluster Cluster} - * object with the specified properties. - */ - private static final Cluster createBasicCluster(CalorimeterHit seedHit, List hits) { - BaseCluster cluster = new BaseCluster(); - cluster.setType(ClusterType.GTP.getType()); - cluster.addHit(seedHit); - cluster.setPosition(seedHit.getDetectorElement().getGeometry().getPosition().v()); - cluster.setNeedsPropertyCalculation(false); - cluster.addHits(hits); - return cluster; - } - - /** - * Sets the size of the hit verification temporal window. Note - * that this defines the size of the window in one direction, so - * the full time window will be (2 * clusterWindow)+ - * 1 clock-cycles in length. (i.e., it will be a length of - * clusterWindow before the seed hit, a length of - * clusterWindow after the seed hit, plus the cycle - * that includes the seed hit.) Time length is in clock-cycles. - * @param value - The number of clock-cycles around the hit in - * one direction. - */ - public void setClusterWindow(int value) { - temporalWindow = value * 4; - } - - /** - * Sets the minimum seed energy needed for a hit to be considered - * for forming a cluster. This is the seed energy lower bound - * trigger cut and is in units of GeV. - * @param value - The minimum cluster seed energy in GeV. - */ - public void setSeedEnergyThreshold(double value) { - seedEnergyThreshold = value; - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java deleted file mode 100644 index 57103f85e5..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeDigiWithPulseNoSpacingReadoutDriver.java +++ /dev/null @@ -1,224 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.hps.readout.ReadoutDriver; -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.hodoscope.HodoscopeChannel; -import org.hps.conditions.hodoscope.HodoscopeCalibration; -import org.hps.conditions.hodoscope.HodoscopeCalibration.HodoscopeCalibrationCollection; -import org.hps.conditions.hodoscope.HodoscopeChannel.HodoscopeChannelCollection; -import org.hps.conditions.hodoscope.HodoscopeGain; -import org.hps.conditions.hodoscope.HodoscopeGain.HodoscopeGainCollection; -import org.hps.conditions.hodoscope.HodoscopeTimeShift; -import org.hps.conditions.hodoscope.HodoscopeTimeShift.HodoscopeTimeShiftCollection; -import org.hps.readout.ReadoutTimestamp; -import org.lcsim.event.RawTrackerHit; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.Hodoscope_v1; - -import org.hps.conditions.hodoscope.HodoscopeConditions; - -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig2019.FADCConfigHodo2019; - -/** - * Class HodoscopeDigitizationWithPulserDataMergingReadoutDriver is an - * implementation of the {@link - * org.hps.digi.CalDigiWithPulserNoSpacingReadoutDriver} for a subdetector of type {@link - * org.lcsim.geometry.subdetector.Hodoscope_v1 Hodoscope_v1}. It - * handles all of the hodoscope-specific functions needed by the - * superclass. - * - * @author Tongtong Cao - */ -public class HodoscopeDigiWithPulseNoSpacingReadoutDriver extends CalDigiWithPulserNoSpacingReadoutDriver { - // The DAQ configuration manager for FADC parameters. - private FADCConfigHodo2019 config = new FADCConfigHodo2019(); - private boolean configStat = false; // Indicates if DAQ configuration is loaded - - // The number of nanoseconds in a clock-cycle (sample). - private static final int nsPerSample = 4; - - /** Stores the set of all channel IDs for the hodoscope. */ - private Set channelIDSet = new HashSet(); - /** Maps hodoscope channels to the gain for that channel. */ - private Map channelToGainsMap = new HashMap(); - /** Maps hodoscope channels to the time shifts for that channel. */ - private Map channelToTimeShiftsMap = new HashMap(); - /** Maps hodoscope channels to the noise sigma and pedestals for that channel. */ - private Map channelToCalibrationsMap = new HashMap(); - /** Factor for gain conversion from self-define-unit/ADC to MeV/ADC. */ - private double factorGainConversion = 0.000833333; - /** Gain scaling factor for raw energy (self-defined unit) of FADC hits. - * In DAQ configuration, gains are scaled by the gain scaling factor for two-hole tiles. - * Such gains from DAQ configuration should be divided by the factor. - */ - - private HodoscopeConditions hodoConditions = null; - - public HodoscopeDigiWithPulseNoSpacingReadoutDriver() { - // Set the default values for each subdetector-dependent - // parameter. - setGeometryName("Hodoscope"); - - setInputHitCollectionName("HodoscopeHits"); - setOutputHitCollectionName("HodoscopeRawHits"); - setTruthRelationsCollectionName("HodoscopeTruthRelations"); - setTriggerPathTruthRelationsCollectionName("HodoscopeTriggerPathTruthRelations"); - setReadoutHitCollectionName("HodoscopeReadoutHits"); - - setNumberSamplesAfter(10); - setNumberSamplesBefore(6); - setPulseTimeParameter(4.0); - setPhotoelectronsPerMeV(10.0); - - setIntegrationThreshold(12); - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - - // Load the DAQ settings from the configuration manager. - numSamplesAfter = daq.getHodoFADCConfig().getNSA() / nsPerSample; - numSamplesBefore = daq.getHodoFADCConfig().getNSB() / nsPerSample; - readoutWindow = daq.getHodoFADCConfig().getWindowWidth() / nsPerSample; - pulserDataWindow = readoutWindow; - - // Get the FADC configuration. - config = daq.getHodoFADCConfig(); - configStat = true; - } - }); - } - - } - - - @Override - public void detectorChanged(Detector detector) { - // Get a copy of the calorimeter conditions for the detector. - hodoConditions = DatabaseConditionsManager.getInstance().getHodoConditions(); - - // Populate the channel ID collections. - populateChannelCollections(); - - // Run the superclass method. - super.detectorChanged(detector); - } - - @Override - protected Set getChannelIDs() { - return channelIDSet; - } - - @Override - protected Long getID(RawTrackerHit hit) { - return Long.valueOf(hodoConditions.getChannels().findGeometric(hit.getCellID()).getChannelId().intValue()); - } - - @Override - protected double getGainConditions(long channelID) { - if(channelToGainsMap.containsKey(Long.valueOf(channelID))) { - return channelToGainsMap.get(Long.valueOf(channelID)).getGain() * factorGainConversion; - } else { - throw new IllegalArgumentException("No gain conditions exist for hodoscope channel ID \"" + channelID + "\"."); - } - } - - @Override - protected double getNoiseConditions(long channelID) { - if(channelToCalibrationsMap.containsKey(Long.valueOf(channelID))) { - return channelToCalibrationsMap.get(Long.valueOf(channelID)).getNoise(); - } else { - throw new IllegalArgumentException("No noise conditions exist for hodoscope channel ID \"" + channelID + "\"."); - } - } - - @Override - protected double getPedestalConditions(long channelID) { - if (channelToCalibrationsMap.containsKey(Long.valueOf(channelID))) { - return channelToCalibrationsMap.get(Long.valueOf(channelID)).getPedestal(); - } else { - throw new IllegalArgumentException( - "No pedestal conditions exist for hodoscope channel ID \"" + channelID + "\"."); - } - } - - @Override - protected double getTimeShiftConditions(long channelID) { - if(channelToTimeShiftsMap.containsKey(Long.valueOf(channelID))) { - return channelToTimeShiftsMap.get(Long.valueOf(channelID)).getTimeShift(); - } else { - throw new IllegalArgumentException("No time shift conditions exist for hodoscope channel ID \"" + channelID + "\"."); - } - } - - @Override - protected int getTimestampFlag() { - return ReadoutTimestamp.SYSTEM_HODOSCOPE; - } - - /** - * Populates the channel ID set and maps all existing channels to - * their respective conditions. - */ - private void populateChannelCollections() { - // Load the conditions database and get the hodoscope channel - // collection data. - final DatabaseConditionsManager conditions = DatabaseConditionsManager.getInstance(); - final HodoscopeGainCollection gains = conditions.getCachedConditions(HodoscopeGainCollection.class, "hodo_gains").getCachedData(); - final HodoscopeChannelCollection channels = conditions.getCachedConditions(HodoscopeChannelCollection.class, "hodo_channels").getCachedData(); - final HodoscopeTimeShiftCollection timeShifts = conditions.getCachedConditions(HodoscopeTimeShiftCollection.class, "hodo_time_shifts").getCachedData(); - final HodoscopeCalibrationCollection calibrations = conditions.getCachedConditions(HodoscopeCalibrationCollection.class, "hodo_calibrations").getCachedData(); - - // Map the gains to channel IDs. - for(HodoscopeGain gain : gains) { - channelToGainsMap.put(Long.valueOf(gain.getChannelId().intValue()), gain); - } - - // Map the pedestals and noise to channel IDs. - for(HodoscopeCalibration calibration : calibrations) { - channelToCalibrationsMap.put(Long.valueOf(calibration.getChannelId().intValue()), calibration); - } - - // Map time shifts to channel IDs. - for(HodoscopeTimeShift timeShift : timeShifts) { - channelToTimeShiftsMap.put(Long.valueOf(timeShift.getChannelId().intValue()), timeShift); - } - - // Store the set of all channel IDs. - for(HodoscopeChannel channel : channels) { - channelIDSet.add(Long.valueOf(channel.getChannelId().intValue())); - } - } - - /** - * Sets factor for gain conversion from self-defined unit/ADC to MeV/ADC - * @param factor - factor for gain conversion from self-defined-unit/ADC to MeV/ADC. - */ - public void setFactorGainConversion(double factor) { - factorGainConversion = factor; - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java deleted file mode 100644 index 25ae76ed78..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/HodoscopePatternNoSpacingDriver.java +++ /dev/null @@ -1,436 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; - -import java.util.Map; - -import java.awt.Point; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.ArrayList; -import java.util.List; - -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.hodoscope.HodoscopeChannel; -import org.hps.conditions.hodoscope.HodoscopeChannel.HodoscopeChannelCollection; -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.daqconfig2019.VTPConfig2019; -import org.hps.readout.util.HodoscopePattern; -import org.lcsim.event.CalorimeterHit; -import org.lcsim.event.EventHeader; -import org.lcsim.geometry.Detector; - -/** - * Class HodoscopePatternReadoutDriver produces hodoscope pattern - * objects for Ecal-hodo matching in the trigger simulation. Persistency of Hodo - * FADC hits is persistentTime. On the other hand, hodo FADC hits - * is earlier to enter the trigger system than Ecal by - * timeEarlierThanEcal Therefore, for each clock-cycle, FADC hits - * in [localTime - (persistentTime - timeEarlierThanEcal), localTime + - * timeEarlierThanEcal + 4] are taken into account to generate hodoscope - * patterns for all layers. - */ -public class HodoscopePatternNoSpacingDriver extends ReadoutDriver { - /** Maps hodoscope channel IDs to channels. */ - private Map channelMap = new HashMap(); - - /** - * The name of the collection that contains the hodo FADC hits, which raw energy - * is self-defined. Through the hodo FADC hits, hodoscope pattern is generated. - */ - private String inputCollectionName = "HodoscopeCorrectedHits"; - /** - * The name of the collection into which generated hodoscope patterns for all - * four layers should be output. - */ - private String outputCollectionName = "HodoscopePatterns"; - - /** - * The local time for the driver. - */ - private double localTime = 0.0; - - /** - * Hodoscope FADC hit cut - */ - private double fADCHitThreshold = 1.0; - - /** - * Hodoscope tilt/cluster hit cut - */ - private double hodoHitThreshold = 200.0; - - /** - * Gain scaling factor for hits at two-hole tiles. - * Gains from database need to be scaled by the factor - * Gains in the DAQ configuration have been scaled by the factor. - */ - private double gainFactor = 1.25 / 2; - - /** - * Persistent time for hodoscope FADC hit in unit of ns - */ - private double persistentTime = 60.0; - - /** - * Time for hodoscope FADC hits earlier to enter the trigger system than Ecal - * with unit of ns - */ - private double timeEarlierThanEcal = 0.0; - - /** - * The length of time by which objects produced by this driver are shifted due - * to the need to buffer data from later events. This is calculated - * automatically. Hodo FADC hits enter the trigger system earlier than Ecal hits - * by timeEarlierThanEcal - */ - private double localTimeDisplacement = 0.0; - - /** - * According to setup in database, index for hodoscope layers are expressed as - * (layer+1)*y - */ - public static final int TopLayer1 = 1; - public static final int TopLayer2 = 2; - public static final int BotLayer1 = -1; - public static final int BotLayer2 = -2; - - /** - * List for 4 layers; - */ - private List layerList = new ArrayList<>(4); - - /** - * List for 8 (x, hole) points of each layer - */ - private List xHolePointList = new ArrayList<>(8); - - private boolean daqConfigurationAppliedintoReadout = false; - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - daqConfigurationAppliedintoReadout = state; - - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - VTPConfig2019 config = daq.getVTPConfig(); - - // Load the DAQ settings from the configuration manager. - fADCHitThreshold = config.getHodoFADCHitThr(); - hodoHitThreshold = config.getHodoThr(); - persistentTime = config.getHodoDT(); - } - }); - } - } - - @Override - public void process(EventHeader event) { - - // Check the data management driver to determine whether the - // input collection is available or not. - // if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { - // return; - // } - if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, ReadoutDataManager.getCurrentTime())) { - // System.out.println(this.getClass().getName()+":: "+inputCollectionName+" is not ready for this!"); - return; - } - - // Hodoscope FADC hits enter the trigger system earlier than Ecal by the time - // timeEarlierThanEcal . - // On the other hand, hodoscope FADC hits persist with a range of - // persistentTime. - // To build current hodo patterns, FADC hits between localTime - (persistentTime - // - timeEarlierThanEcal) and localTime + timeEarlierThanEcal + 4 are used. - // Collection fadcHits = ReadoutDataManager.getData( - // localTime - (persistentTime - timeEarlierThanEcal), localTime + timeEarlierThanEcal + 4.0, - // inputCollectionName, CalorimeterHit.class); - - Collection fadcHits = ReadoutDataManager.getData( - ReadoutDataManager.getCurrentTime() - (persistentTime - timeEarlierThanEcal), ReadoutDataManager.getCurrentTime() + timeEarlierThanEcal + 4.0, - inputCollectionName, CalorimeterHit.class); - - // System.out.println(this.getClass().getName()+":: found "+fadcHits.size()+" fadcHits"); - // Increment the local time. - - // All hits over fadcHitThreshold are saved for each hole of each - // layer - Map>> energyListMapForLayerMap = new HashMap>>(); - - for (int layer : layerList) { - Map> energyListMap = new HashMap>(); - for (Point point : xHolePointList) { - energyListMap.put(point, new ArrayList()); - } - energyListMapForLayerMap.put(layer, energyListMap); - } - - for (CalorimeterHit hit : fadcHits) { - double energy = hit.getRawEnergy(); - if (energy > fADCHitThreshold) { - Long cellID = hit.getCellID(); - int layer = channelMap.get(cellID).getLayer(); - int y = channelMap.get(cellID).getIY(); - int x = channelMap.get(cellID).getIX(); - int hole = channelMap.get(cellID).getHole(); - - Point point = new Point(x, hole); - // Energy of hits is scaled except hits at tiles 0 and 4 - if(x == 0 || x == 4) energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy); - else { - // Gains in the DAQ configuration has been scaled by the factor. - if(daqConfigurationAppliedintoReadout) energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy); - else energyListMapForLayerMap.get((layer + 1) * y).get(point).add(energy * gainFactor); - } - } - } - - //Get maximum of energy in lists for each hole of each layer - Map> maxEnergyMapForLayerMap = new HashMap>(); - for (int layer : layerList) { - Map maxEnergyMap = new HashMap<>(); - for (Point point : xHolePointList) { - if(energyListMapForLayerMap.get(layer).get(point).size() != 0) - maxEnergyMap.put(point, Collections.max(energyListMapForLayerMap.get(layer).get(point))); - else - maxEnergyMap.put(point, 0.); - - } - maxEnergyMapForLayerMap.put(layer, maxEnergyMap); - } - - //Hodoscope patterns for all layers - //Order of list: TopLayer1, TopLayer2, BotLayer1, BotLayer2 - List hodoPatterns = new ArrayList<>(4); - - // Flag to determine if a pattern list at the current clock-cycle is added into data manager - boolean flag = false; - - for (int i = 0; i < 4; i++) { - HodoscopePattern pattern = new HodoscopePattern(); - - Map maxEnergyMap = maxEnergyMapForLayerMap.get(layerList.get(i)); - - if (maxEnergyMap.get(xHolePointList.get(0)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_1, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(1)) + maxEnergyMap.get(xHolePointList.get(2)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_2, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(3)) + maxEnergyMap.get(xHolePointList.get(4)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_3, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(5)) + maxEnergyMap.get(xHolePointList.get(6)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_4, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(7)) > hodoHitThreshold) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_5, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(0)) + maxEnergyMap.get(xHolePointList.get(1)) - + maxEnergyMap.get(xHolePointList.get(2)) > hodoHitThreshold - && maxEnergyMap.get(xHolePointList.get(0)) != 0 - && (maxEnergyMap.get(xHolePointList.get(1)) != 0 || maxEnergyMap.get(xHolePointList.get(2)) != 0)) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_12, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(1)) + maxEnergyMap.get(xHolePointList.get(2)) - + maxEnergyMap.get(xHolePointList.get(3)) - + maxEnergyMap.get(xHolePointList.get(4)) > hodoHitThreshold - && (maxEnergyMap.get(xHolePointList.get(1)) != 0 || maxEnergyMap.get(xHolePointList.get(2)) != 0) - && (maxEnergyMap.get(xHolePointList.get(3)) != 0 || maxEnergyMap.get(xHolePointList.get(4)) != 0)) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_23, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(3)) + maxEnergyMap.get(xHolePointList.get(4)) - + maxEnergyMap.get(xHolePointList.get(5)) - + maxEnergyMap.get(xHolePointList.get(6)) > hodoHitThreshold - && (maxEnergyMap.get(xHolePointList.get(3)) != 0 || maxEnergyMap.get(xHolePointList.get(4)) != 0) - && (maxEnergyMap.get(xHolePointList.get(5)) != 0 || maxEnergyMap.get(xHolePointList.get(6)) != 0)) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_34, true); - flag = true; - } - if (maxEnergyMap.get(xHolePointList.get(5)) + maxEnergyMap.get(xHolePointList.get(6)) - + maxEnergyMap.get(xHolePointList.get(7)) > hodoHitThreshold - && (maxEnergyMap.get(xHolePointList.get(5)) != 0 || maxEnergyMap.get(xHolePointList.get(6)) != 0) - && maxEnergyMap.get(xHolePointList.get(7)) != 0) { - pattern.setHitStatus(HodoscopePattern.HODO_LX_CL_45, true); - flag = true; - } - - hodoPatterns.add(pattern); - } - // System.out.println(this.getClass().getName()+":: found "+hodoPatterns.size()+" patterns"); - - // At leaset there is a hodo tilt/cluster hit in any layer, then the pattern list is added into data manager - if(flag == true){ - // System.out.println(this.getClass().getName()+":: at least one of the patterns was good!!!"); - ReadoutDataManager.addData(outputCollectionName, hodoPatterns, HodoscopePattern.class); - } - } - - @Override - public void startOfData() { - // Define the output LCSim collection parameters. - LCIOCollectionFactory.setCollectionName(outputCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollection patternCollectionParams = LCIOCollectionFactory - .produceLCIOCollection(HodoscopePattern.class); - - // Instantiate the GTP cluster collection with the readout - // data manager. - localTimeDisplacement = timeEarlierThanEcal + 4.0; - addDependency(inputCollectionName); - ReadoutDataManager.registerCollection(patternCollectionParams, false); - - initLists(); - } - - /** - * Initiate (layer, y) list and (x, hole) list - */ - private void initLists() { - // Add elements for layer list - layerList.add(TopLayer1); - layerList.add(TopLayer2); - layerList.add(BotLayer1); - layerList.add(BotLayer2); - - // Add elements for (x, hole) point list - xHolePointList.add(new Point(0, 0)); - xHolePointList.add(new Point(1, -1)); - xHolePointList.add(new Point(1, 1)); - xHolePointList.add(new Point(2, -1)); - xHolePointList.add(new Point(2, 1)); - xHolePointList.add(new Point(3, -1)); - xHolePointList.add(new Point(3, 1)); - xHolePointList.add(new Point(4, 0)); - } - - @Override - public void detectorChanged(Detector detector) { - // Populate the channel ID collections. - populateChannelCollections(); - } - - /** - * Populates the channel ID set and maps all existing channels to their - * respective conditions. - */ - private void populateChannelCollections() { - // Load the conditions database and get the hodoscope channel - // collection data. - final DatabaseConditionsManager conditions = DatabaseConditionsManager.getInstance(); - final HodoscopeChannelCollection channels = conditions - .getCachedConditions(HodoscopeChannelCollection.class, "hodo_channels").getCachedData(); - - // Map channels to channel IDs - for (HodoscopeChannel channel : channels) { - channelMap.put(Long.valueOf(channel.getChannelId().intValue()), channel); - } - } - - @Override - protected double getTimeDisplacement() { - return localTimeDisplacement; - } - - @Override - protected double getTimeNeededForLocalOutput() { - return 0; - } - - /** - * Sets the name of the input collection containing the objects of type - * {@link org.lcsim.event.CalorimeterHit CalorimeterHit} that are output by the - * digitization driver. - * - * @param collection - The name of the input hit collection. - */ - public void setInputCollectionName(String collection) { - inputCollectionName = collection; - } - - /** - * Sets the name of the output collection containing the objects of type - * {@link org.hps.readout.hodoscope.HodoscopePattern HodoscopePattern} that are - * output by this driver. - * - * @param collection - The name of the output hodoscope pattern collection. - */ - public void setOutputCollectionName(String collection) { - outputCollectionName = collection; - } - - /** - * Sets hodoscope FADC hit threshold - * - * @param FADC hit threshold - */ - public void setFADCHitThreshold(double fADCHitThreshold) { - this.fADCHitThreshold = fADCHitThreshold; - } - - /** - * Sets hodoscope tilt/cluster hit threshold - * - * @param hodoscope tilt/cluster hit threshold - */ - public void setHodoHitThreshold(double hodoHitThreshold) { - this.hodoHitThreshold = hodoHitThreshold; - } - - /** - * Set persistency for hodoscope FADC hit in unit of ns - * - * @param persistency for hodoscope FADC hit in unit of ns - */ - public void setPersistentTime(double persistentTime) { - this.persistentTime = persistentTime; - } - - /** - * Set time for hodoscope FADC hits earlier to enter the trigger system than - * Ecal with unit of ns - * - * @param time for hodoscope FADC hits earlier to enter the trigger system than - * Ecal with unit of ns - */ - public void setTimeEarlierThanEcal(double timeEarlierThanEcal) { - this.timeEarlierThanEcal = timeEarlierThanEcal; - } - - /** - * Set gain factor for raw energy (self-defined unit) of FADC hits - * - * @param gain factor for raw energy (self-defined unit) of FADC hits - */ - public void setGainFactor(double gainFactor) { - this.gainFactor = gainFactor; - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java deleted file mode 100755 index 337b069269..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/HodoscopeRawConverterNoSpacingReadoutDriver.java +++ /dev/null @@ -1,78 +0,0 @@ -package org.hps.digi.nospacing; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; - -import org.hps.readout.ReadoutDriver; -//import org.hps.readout.RawConverterNoSpacingReadoutDriver; -import org.hps.readout.rawconverter.AbstractMode3RawConverter; -import org.hps.readout.rawconverter.HodoscopeReadoutMode3RawConverter; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.Hodoscope_v1; - -/** - * HodoscopeRawConverterNoSpacingReadoutDriver is an - * implementation of {@link org.hps.readout.RawConverterReadoutDriver - * RawConverterReadoutDriver} for the hodoscope subdetector. - * - * @see org.hps.readout.RawConverterReadoutDriver - */ -public class HodoscopeRawConverterNoSpacingReadoutDriver extends RawConverterNoSpacingReadoutDriver { - /** - * The converter object responsible for processing raw hits into - * proper {@link org.lcsim.event.CalorimeterHit CalorimeterHit} - * objects. - */ - private HodoscopeReadoutMode3RawConverter converter = new HodoscopeReadoutMode3RawConverter(); - - /** - * Instantiates the driver with the correct default parameters. - */ - public HodoscopeRawConverterNoSpacingReadoutDriver() { - super("HodoscopeRawHits", "HodoscopeCorrectedHits"); - } - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // Track changes in the DAQ configuration. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - - // Load the DAQ settings from the configuration manager. - getConverter().setNumberSamplesAfter(daq.getHodoFADCConfig().getNSA()); - getConverter().setNumberSamplesBefore(daq.getHodoFADCConfig().getNSB()); - - // Get the FADC configuration. - getConverter().setFADCConfigHodo2019(daq.getHodoFADCConfig()); - } - }); - } - } - - @Override - protected AbstractMode3RawConverter getConverter() { - return converter; - } - - @Override - protected String getSubdetectorReadoutName(Detector detector) { - Hodoscope_v1 hodoscopeGeometry = (Hodoscope_v1) detector.getSubdetector("Hodoscope"); - return hodoscopeGeometry.getReadout().getName(); - } - - @Override - protected void updateDetectorDependentParameters(Detector detector) { } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing b/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing deleted file mode 100755 index df2678722a..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/NoSpacingTriggerDriver.java.donothing +++ /dev/null @@ -1,159 +0,0 @@ -package org.hps.digi.nospacing; - -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; - -/** - * Class NoSpacingTriggerDriver is a special subclass of {@link - * org.hps.readout.ReadoutDriver ReadoutDriver} that is responsible - * for simulating trigger behavior. It implements additional behavior - * for handling trigger dead times and issuing triggers to the {@link - * org.hps.readout.ReadoutDataManager ReadoutDataManager}.

- * Implementing drivers are responsible for checking if trigger - * conditions are met. In the event that they are, the method {@link - * org.hps.readout.NoSpacingTriggerDriver#sendTrigger() sendTrigger()} should - * be used to issue the trigger to the data manager. This method will - * automatically check that the dead time condition is met, and will - * only issue the trigger command in the event that it is, so - * implementing drivers do not need to check this condition manually. - *

- * For usage instructions, please see ReadoutDriver. - * @see org.hps.readout.ReadoutDriver - */ -public abstract class NoSpacingTriggerDriver extends ReadoutDriver { - /** - * singles trigger types - */ - public static final String SINGLES0 = "singles0"; - public static final String SINGLES1 = "singles1"; - public static final String SINGLES2 = "singles2"; - public static final String SINGLES3 = "singles3"; - - public static final String TOP = "top"; - public static final String BOT = "bot"; - public static final String TOPBOT = "topbot"; - - public static final String PAIR0 = "pair0"; - public static final String PAIR1 = "pair1"; - public static final String PAIR2 = "pair2"; - public static final String PAIR3 = "pair3"; - - public static final String PULSER = "pulser"; - - public static final String FEE = "fee"; - - /** - * The amount of time that must pass after a trigger before a new - * trigger can be issued, in units of nanoseconds. - */ - private double deadTime = 0.0; - /** - * The last time at which a trigger was issued to the data - * manager, in units of nanoseconds. - */ - private double lastTrigger = Double.NaN; - - /** - * Checks whether the trigger is currently in dead time or not. - * @return Returns true if the trigger is currently - * in dead time, and false if it is not and a - * trigger may be issued. - */ - protected boolean isInDeadTime() { - if(Double.isNaN(lastTrigger)) { return false; } - else { return (lastTrigger + deadTime) > ReadoutDataManager.getCurrentTime(); } - } - - @Override - protected boolean isPersistent() { - throw new UnsupportedOperationException(); - } - - /** - * Gets the dead time for this trigger. - * @return Returns the dead time in units of nanoseconds. - */ - protected double getDeadTime() { - return deadTime; - } - - /** - * Gets the time at which the last trigger occurred. - * @return Returns the last trigger time in units of nanoseconds, - * or as {@link java.lang.Double#NaN Double.NaN} if no trigger - * has occurred yet. - */ - protected double getLastTriggerTime() { - return lastTrigger; - } - - @Override - protected double getReadoutWindowAfter() { - throw new UnsupportedOperationException(); - } - - @Override - protected double getReadoutWindowBefore() { - throw new UnsupportedOperationException(); - } - - /** - * Issues a trigger to the data manager so long as the trigger is - * not presently in dead time. - */ - protected void sendTrigger() { - if(!isInDeadTime()) { - ReadoutDataManager.sendTrigger(this); - lastTrigger = ReadoutDataManager.getCurrentTime(); - } - } - - /** - * Issues a trigger to the data manager so long as the trigger is - * not presently in dead time. - * @param trigger type - */ - protected void sendTrigger(String triggerType) { - if(!isInDeadTime()) { - ReadoutDataManager.sendTrigger(this, triggerType); - lastTrigger = ReadoutDataManager.getCurrentTime(); - } - } - - /** - * Issues a trigger to the data manager so long as the trigger is - * not presently in dead time. - * @param trigger type - * @param top/bot singles trigger - */ - protected void sendTrigger(String triggerType, String topBot) { - if(!isInDeadTime()) { - ReadoutDataManager.sendTrigger(this, triggerType, topBot); - lastTrigger = ReadoutDataManager.getCurrentTime(); - } - } - - /** - * Sets the dead time for the trigger. - * @param samples - The amount of time (in events) before another - * trigger is allowed to occur. - */ - public void setDeadTime(int samples) { - deadTime = samples * ReadoutDataManager.getBeamBunchSize(); - } - - @Override - public void setPersistent(boolean state) { - throw new UnsupportedOperationException(); - } - - @Override - public void setReadoutWindowAfter(double value) { - throw new UnsupportedOperationException(); - } - - @Override - public void setReadoutWindowBefore(double value) { - throw new UnsupportedOperationException(); - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java deleted file mode 100755 index a13db77be8..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/RawConverterNoSpacingReadoutDriver.java +++ /dev/null @@ -1,259 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.rawconverter.AbstractMode3RawConverter; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.lcsim.event.CalorimeterHit; -import org.lcsim.event.EventHeader; -import org.lcsim.event.RawCalorimeterHit; -import org.lcsim.geometry.Detector; -import org.lcsim.lcio.LCIOConstants; -import org.hps.readout.ReadoutDataManager; -/** - * RawConverterNoSpacingReadoutDriver processes ADC hit data - * objects and converts them to energy hit objects. It serves as an - * interface to a {@link - * org.hps.readout.rawconverter.AbstractMode3RawConverter - * AbstractMode3RawConverter} object, where the actual conversion is - * performed. - *

- * RawConverterNoSpacingReadoutDriver itself is abstract - it - * requires that implementing classes handle any subdetector-specific - * functionality. - */ -public abstract class RawConverterNoSpacingReadoutDriver extends ReadoutDriver { - /** - * Sets the name of the input {@link - * org.lcsim.event.RawCalorimeterHit RawCalorimeterHit} - * collection. - */ - private String inputCollectionName; - - /** - * Sets the name of the output {@link - * org.lcsim.event.CalorimeterHit CalorimeterHit} collection. - */ - private String outputCollectionName; - - /** - * Tracks the current local time in nanoseconds for this driver. - */ - private double localTime = 0.0; - - //size to look for hits in 4ns clock ticks - private double EVENT_WINDOW=48; - - /** - * Indicates whether channels that are marked as "bad" in the - * conditions database should be skipped when producing hits. - */ - protected boolean skipBadChannels = false; - - protected boolean checkInput = false; - - protected RawConverterNoSpacingReadoutDriver(String defaultInputCollectionName, String defaultOutputCollectionName) { - inputCollectionName = defaultInputCollectionName; - outputCollectionName = defaultOutputCollectionName; - } - - @Override - public final void detectorChanged(Detector detector) { - // Allow implementing drivers to catch the detector changed - // event, if needed. - updateDetectorDependentParameters(detector); - - // Update the converter. - getConverter().updateDetector(detector); - - // Update the readout name for the managed collection. - ReadoutDataManager.updateCollectionReadoutName(outputCollectionName, CalorimeterHit.class, getSubdetectorReadoutName(detector)); - } - - @Override - public final void process(EventHeader event) { - // Check the data management driver to determine whether the - // input collection is available or not. - if(checkInput&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + 4.0)) { - // System.out.println(this.getClass().getName()+":: checkInput or Collection status Failed"); - return; - } - - // Get all of the raw hits in the current clock-cycle. - // Collection rawHits = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, RawCalorimeterHit.class); - Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 4.0*EVENT_WINDOW, inputCollectionName, RawCalorimeterHit.class); - // System.out.println("RawConverter:: "+ inputCollectionName+" local time = "+localTime+" number of seeds = "+rawHits.size()); - // Increment the local time. - // localTime += 4.0; - - // Pass the raw hits to the raw converter to obtain proper - // calorimeter hits. In readout, raw hits are always Mode-3, - // so there is no need to check the form. - List newHits = new ArrayList(); - - for(RawCalorimeterHit hit : rawHits) { - // Convert the raw hit. - CalorimeterHit newHit = getConverter().convertHit(hit, 0.0); - - // If the hit is on a bad channel, and these are set to - // be skipped, ignore the hit. Otherwise, add it to the - // output list. - if(skipBadChannels && isBadChannel(newHit.getCellID())) { - continue; - } - - // Add the new hit. - newHits.add(newHit); - } - // System.out.println("RawConverter:: "+ outputCollectionName+" adding new hits with size = "+newHits.size()+" at time = "+localTime); - // Add the calorimeter hit collection to the data manager. - ReadoutDataManager.addData(outputCollectionName, newHits, CalorimeterHit.class); - // Increment the local time for real. - localTime += 4.0*125; - - } - - @Override - public void startOfData() { - // Set the LCIO flags for the output collection. Flags are - // set to store the hit time and hit position respectively. - int flags = 0; - flags += 1 << LCIOConstants.RCHBIT_TIME; - flags += 1 << LCIOConstants.RCHBIT_LONG; - - // Define the LCSim collection parameters for this driver's - // output. - LCIOCollectionFactory.setCollectionName(outputCollectionName); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(flags); - - // Set the dependencies for the driver and register its - // output collections with the data management driver. - addDependency(inputCollectionName); - - // Register the output collection. - ReadoutDataManager.registerCollection(LCIOCollectionFactory.produceLCIOCollection(CalorimeterHit.class), isPersistent(), - getReadoutWindowBefore(), getReadoutWindowAfter()); - } - - /** - * Gets the {@link org.hps.readout.ReadoutRawConverter - * ReadoutRawConverter} object used to convert hits for this - * subdetector. - * @return Returns the raw converter. - */ - protected abstract AbstractMode3RawConverter getConverter(); - - /** - * Gets the readout name for this subdetector from the geometry. - * @param detector - The geometry object. - * @return Returns the subdetector readout name. - */ - protected abstract String getSubdetectorReadoutName(Detector detector); - - @Override - protected final double getTimeDisplacement() { - return 0; - } - - @Override - protected final double getTimeNeededForLocalOutput() { - return 0; - } - - /** - * Indicates whether or not the channel on which a hit occurs is - * a "bad" channel according to the conditions database. - * @param hit - The hit to check. - * @return Returns true if the hit channel is - * flagged as "bad" and false otherwise. - * @throws UnsupportedOperationException Occurs if the - * subdetector represented by the driver does not support bad - * channel exclusion. - */ - protected boolean isBadChannel(long channelID) { - throw new UnsupportedOperationException("Driver \"" + getClass().getSimpleName() + "\" does not support bad channel exclusion."); - } - - /** - * Updates any detector-specific parameters needed by the - * implementing class. - * @param detector - The current detector geometry. - */ - protected abstract void updateDetectorDependentParameters(Detector detector); - - /** - * Sets the name of the input collection containing the objects - * of type {@link org.lcsim.event.RawCalorimeterHit - * RawCalorimeterHit} that are output by the digitization driver. - * @param collection - The name of the input raw hit collection. - */ - public void setInputCollectionName(String collection) { - inputCollectionName = collection; - } - - /** - * Sets the number of integration samples that should be included - * in a pulse integral after the threshold-crossing event. - * @param samples - The number of samples, where a sample is a - * 4 ns clock-cycle. - */ - public void setNumberSamplesAfter(int samples) { - getConverter().setNumberSamplesAfter(4 * samples); - } - - /** - * Sets the number of integration samples that should be included - * in a pulse integral before the threshold-crossing event. - * @param samples - The number of samples, where a sample is a - * 4 ns clock-cycle. - */ - public void setNumberSamplesBefore(int samples) { - getConverter().setNumberSamplesBefore(4 * samples); - } - - /** - * Sets factor of unit conversion for returned value of the method - * AbstractBaseRawConverter::adcToEnergy(). - * @param factor of unit conversion - */ - public void setFactorUnitConversion(double factor) { - getConverter().setFactorUnitConversion(factor); - } - - /** - * Sets the name of the output collection containing the objects - * of type {@link org.lcsim.event.CalorimeterHit CalorimeterHit} - * that are output by this driver. - * @param collection - The name of the output hit collection. - */ - public void setOutputCollectionName(String collection) { - outputCollectionName = collection; - } - - /** - * Indicates whether or not data from channels flagged as "bad" - * in the conditions system should be ignored. true - * indicates that they should be ignored, and false - * that they should not. - * @param apply - true indicates that "bad" channels - * will be ignored and false that they will not. - * @throws UnsupportedOperationException Occurs if the - * subdetector represented by the driver does not support bad - * channel exclusion. - */ - public void setSkipBadChannels(boolean state) { - throw new UnsupportedOperationException("Driver \"" + getClass().getSimpleName() + "\" does not support bad channel exclusion."); - } - - /** - * Sets the size of the ADC buffer. This is needed for proper - * handling of Mode-3 hits in the raw converter. - * @param window - The buffer size in units of 4 ns clock-cycles. - */ - public void setReadoutWindow(int window) { - getConverter().setWindowSamples(window); - } -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java deleted file mode 100644 index bf7460e074..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/SinglesTrigger2019NoSpacingReadoutDriver.java +++ /dev/null @@ -1,415 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.Collection; -import java.util.List; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.ArrayList; - -import org.hps.readout.ReadoutDataManager; -//import org.hps.digi.nospacing.NoSpacingTriggerDriver; -import org.hps.readout.TriggerDriver; -import org.hps.record.daqconfig2019.ConfigurationManager2019; -import org.hps.record.daqconfig2019.DAQConfig2019; -import org.hps.record.triggerbank.TriggerModule2019; -import org.lcsim.event.Cluster; -import org.lcsim.event.EventHeader; -import org.lcsim.geometry.Detector; -import org.lcsim.geometry.subdetector.HPSEcal3; -import org.lcsim.util.aida.AIDA; - -import org.hps.readout.util.HodoscopePattern; - -import hep.aida.IHistogram1D; -import hep.aida.IHistogram2D; - -/** - * SinglesTrigger2019NoSpacingReadoutDriver simulates an HPS singles trigger - * for 2019 MC. It takes in clusters produced by the - * {@link org.hps.readout.ecal.updated.GTPClusterReadoutDriver - * GTPClusterReadoutDriver} and hodoscope patterns produced by the - * {@link HodoscopePatternReadoutDriver}, and perform the necessary trigger - * logic on them. If a trigger is detected, it is sent to the readout data - * manager so that a triggered readout event may be written. - */ -public class SinglesTrigger2019NoSpacingReadoutDriver extends TriggerDriver { - // ============================================================== - // ==== LCIO Collections ======================================== - // ============================================================== - /** - * Indicates singles trigger type. Corresponding DAQ configuration is accessed by DAQ - * configuration system, and applied into readout. - */ - private String triggerType = "singles3"; - - /** - * Indicates the name of the calorimeter geometry object. This is - * needed to allow access to the calorimeter channel listings. - */ - private String ecalGeometryName = "Ecal"; - /** - * Specifies the name of the LCIO collection containing the input - * GTP clusters that are used for triggering. - */ - private String inputCollectionNameEcal = "EcalClustersGTP"; - - private String inputCollectionNameHodo = "HodoscopePatterns"; - - // ============================================================== - // ==== Driver Options ========================================== - // ============================================================== - - /** - * Specifies the beam energy for the input data. This defines the - * limits of the energy trigger plots and has no further effect. - */ - private double beamEnergy = 4.55; - /** - * Stores the trigger settings and performs trigger logic. - */ - private TriggerModule2019 triggerModule = new TriggerModule2019(); - - private double ecalTimeDisplacement = 20.0; //ns - private double hodoTimeDisplacement = 4.0; //ns - - boolean requireHodo=true; - - // ============================================================== - // ==== Driver Parameters ======================================= - // ============================================================== - - /** - * Tracks the current local time in nanoseconds for this driver. - */ - private double localTime = 0.0; - /** - * Stores a reference to the calorimeter subdetector model. This - * is needed to extract the crystal indices from the cell ID. - */ - private HPSEcal3 ecal = null; - /** - * Defines the size of an energy bin for trigger output plots. - */ - private static final double BIN_SIZE = 0.025; - - - // ============================================================== - // ==== AIDA Plots ============================================== - // ============================================================== - - private AIDA aida = AIDA.defaultInstance(); - private static final int NO_CUTS = 0; - private static final int WITH_CUTS = 1; - private IHistogram1D[] clusterSeedEnergy = new IHistogram1D[2]; - private IHistogram1D[] clusterHitCount = new IHistogram1D[2]; - private IHistogram1D[] clusterTotalEnergy = new IHistogram1D[2]; - private IHistogram2D[] clusterDistribution = new IHistogram2D[2]; - - /** - * Sets whether or not the DAQ configuration is applied into the driver - * the EvIO data stream or whether to read the configuration from data files. - * - * @param state - true indicates that the DAQ configuration is - * applied into the readout system, and false that it - * is not applied into the readout system. - */ - public void setDaqConfigurationAppliedintoReadout(boolean state) { - // If the DAQ configuration should be read, attach a listener - // to track when it updates. - if (state) { - ConfigurationManager2019.addActionListener(new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - // Get the DAQ configuration. - DAQConfig2019 daq = ConfigurationManager2019.getInstance(); - if(triggerType.contentEquals(SINGLES3)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles3Config()); - else if(triggerType.equals(SINGLES2)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles2Config()); - else if(triggerType.equals(SINGLES1)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles1Config()); - else if(triggerType.equals(SINGLES0)) triggerModule.loadDAQConfiguration(daq.getVTPConfig().getSingles0Config()); - } - }); - } - } - - @Override - public void detectorChanged(Detector detector) { - // Get the calorimeter sub-detector. - org.lcsim.geometry.compact.Subdetector ecalSub = detector.getSubdetector(ecalGeometryName); - if(ecalSub instanceof HPSEcal3) { - ecal = (HPSEcal3) ecalSub; - } else { - throw new IllegalStateException("Error: Unexpected calorimeter sub-detector of type \"" + ecalSub.getClass().getSimpleName() + "; expected HPSEcal3."); - } - } - - @Override - public void process(EventHeader event) { - // Check that clusters are available for the trigger. - Collection clusters = null; - Collection hodoPatterns = null; - ArrayList hodoPatternList = null; - // System.out.println(this.getClass().getName()+":: starting process"); - if(ReadoutDataManager.checkCollectionStatus(inputCollectionNameEcal, ReadoutDataManager.getCurrentTime()-ecalTimeDisplacement) && ReadoutDataManager.checkCollectionStatus(inputCollectionNameHodo, ReadoutDataManager.getCurrentTime()-hodoTimeDisplacement)) { - clusters = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime()-ecalTimeDisplacement, ReadoutDataManager.getCurrentTime() -ecalTimeDisplacement+ 192.0, inputCollectionNameEcal, Cluster.class); - hodoPatterns = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime()-hodoTimeDisplacement, ReadoutDataManager.getCurrentTime() -hodoTimeDisplacement+ 192.0, inputCollectionNameHodo, HodoscopePattern.class); - - // System.out.println(this.getClass().getName()+":: number of gtp clusters = "+clusters.size()); - // System.out.println(this.getClass().getName()+":: number of hodo patterns = "+hodoPatterns.size()); - if(clusters.size() == 0){ - // System.out.println(this.getClass().getName()+":: quitting because no gtp clusters"); - return; - } - - if( requireHodo&&hodoPatterns.size() == 0){ - // System.out.println(this.getClass().getName()+":: quitting because no hodo patterns"); - return; - } - hodoPatternList = new ArrayList<>(hodoPatterns); - - } else { - System.out.println(this.getClass().getName()+":: cluster or hodo collection doesn't exist"); - return; - } - - // Track whether or not a trigger was seen. - boolean triggered = false; - - // There is no need to perform the trigger cuts if the - // trigger is in dead time, as no trigger may be issued - // regardless of the outcome. - if(isInDeadTime()) { - System.out.println(this.getClass().getName()+":: I'm in deadtime ... bailing"); - return; - } - - // Record top/bot status for singles triggers - List topBot = new ArrayList(); - - // Plot the trigger distributions before trigger cuts are - // performed. - for(Cluster cluster : clusters) { - // Get the x and y indices. Note that LCSim meta data is - // not available during readout, so crystal indices must - // be obtained directly from the calorimeter geometry. - java.awt.Point ixy = ecal.getCellIndices(cluster.getCalorimeterHits().get(0).getCellID()); - - // Populate the uncut plots. - clusterSeedEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); - clusterTotalEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); - clusterHitCount[NO_CUTS].fill(TriggerModule2019.getClusterHitCount(cluster)); - clusterDistribution[NO_CUTS].fill(ixy.x, ixy.y); - - // Perform the hit count cut. - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_HIT_COUNT_LOW_EN) && !triggerModule.clusterHitCountCut(cluster)) { - // System.out.println(this.getClass().getName()+":: this cluster has too few hits ... continue"); - continue; - } - - // Perform the cluster energy cut. - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW_EN) && !triggerModule.clusterTotalEnergyCutLow(cluster)) { - // System.out.println(this.getClass().getName()+":: this cluster has too low an energy ... continue"); - - continue; - } - - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH_EN) && !triggerModule.clusterTotalEnergyCutHigh(cluster)) { - // System.out.println(this.getClass().getName()+":: this cluster has too HIGH an energy ... continue"); - - continue; - } - // System.out.println(this.getClass().getName()+":: this cluster survived!"); - // In the setup calorimeter geometry, range of X coordinates is [-23, -1] and [1, 23]. - // The hardware uses cluster X coordinates [-22,0] and [1,23]. - int clusterX = ixy.x; - if(clusterX < 0) clusterX++; - - int clusterY = ixy.y; - - // XMin is at least 0. - if(!triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_XMIN_EN) && !triggerModule.clusterXMinCut(clusterX)) { - // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+" no trigger because cluster X failed"); - continue; - } - - // XMin cut has been applied. - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterPDECut(cluster, clusterX)) { - // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+" no trigger because cluster Energy vs X failed"); - continue; - } - } - - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { - // System.out.println(this.getClass().getName()+":: trigger type = "+triggerType+"no trigger because hodo matching failed"); - continue; - } - - //For 2021 update, Moller triggers - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMinCut(clusterX)) { - continue; - } - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMaxCut(clusterX)) { - continue; - } - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterYMinCut(clusterY)) { - continue; - } - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterYMaxCut(clusterY)) { - continue; - } - if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterMollerPDECut(cluster, clusterX)) { - continue; - } - } - // System.out.println(this.getClass().getName()+":: found a trigger!!!"); - - // Note that a trigger occurred. - triggered = true; - - if(ixy.y > 0) topBot.add(TOP); - else topBot.add(BOT); - - // Populate the cut plots. - clusterSeedEnergy[WITH_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); - clusterTotalEnergy[WITH_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); - clusterHitCount[WITH_CUTS].fill(TriggerModule2019.getClusterHitCount(cluster)); - clusterDistribution[WITH_CUTS].fill(ixy.x, ixy.y); - } - - if(triggered) { - boolean topStat = false; - boolean botStat = false; - if(topBot.contains(TOP)) topStat = true; - if(topBot.contains(BOT)) botStat = true; - // System.out.println(this.getClass().getName()+":: Sending Trigger"); - if(topStat && botStat) sendTrigger(triggerType, TOPBOT); - else if(topStat) sendTrigger(triggerType, TOP); - else sendTrigger(triggerType, BOT); - } - } - - @Override - public void startOfData() { - // Define the driver collection dependencies. - addDependency(inputCollectionNameEcal); - - addDependency(inputCollectionNameHodo); - - // Register the trigger. - ReadoutDataManager.registerTrigger(this); - - // Set the plot range based on the beam energy. - int bins = (int) Math.ceil((beamEnergy * 1.1) / BIN_SIZE); - double xMax = bins * BIN_SIZE; - - // Instantiate the trigger plots. - String[] postscripts = { " (No Cuts)", " (With Cuts)" }; - for(int i = NO_CUTS; i <= WITH_CUTS; i++) { - clusterSeedEnergy[i] = aida.histogram1D("Trigger Plots\\Cluster Seed Energy Distribution" + postscripts[i], bins, 0.0, xMax); - clusterHitCount[i] = aida.histogram1D("Trigger Plots\\Cluster Hit Count Distribution" + postscripts[i], 10, -0.5, 9.5); - clusterTotalEnergy[i] = aida.histogram1D("Trigger Plots\\Cluster Total Energy Distribution" + postscripts[i], bins, 0.0, xMax); - clusterDistribution[i] = aida.histogram2D("Trigger Plots\\Cluster Seed Distribution" + postscripts[i], 46, -23, 23, 11, -5.5, 5.5); - } - - // Run the superclass method. - super.startOfData(); - } - - @Override - protected double getTimeDisplacement() { - return 0; - } - - @Override - protected double getTimeNeededForLocalOutput() { - return 0; - } - - /** - * Defines the name of the calorimeter geometry specification. By - * default, this is "Ecal". - * @param ecalName - The calorimeter name. - */ - public void setEcalGeometryName(String value) { - ecalGeometryName = value; - } - - /** - * Sets the name of the LCIO collection from which clusters are - * drawn. - * @param collection - The name of the LCIO collection. - */ - public void setInputCollectionNameEcal(String collection) { - inputCollectionNameEcal = collection; - } - - public void setInputCollectionNameHodo(String collection) { - inputCollectionNameHodo = collection; - } - - public void setTriggerType(String trigger) { - if(!trigger.equals(SINGLES0) && !trigger.equals(SINGLES1) && !trigger.equals(SINGLES2) && !trigger.equals(SINGLES3)) - throw new IllegalArgumentException("Error: wrong trigger type name \"" + trigger + "\"."); - triggerType = trigger; - } - - /** - * Sets the beam energy for the trigger. This is only used to - * determine the range of the x-axis for trigger plots. - * @param value - The beam energy of the input data, in units of - * GeV. - */ - public void setBeamEnergy(double value) { - beamEnergy = value; - } - - /** - * Sets the minimum hit count threshold for the trigger. This - * value is inclusive. - * @param hitCountThreshold - The value of the threshold. - */ - public void setHitCountThreshold(int hitCountThreshold) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_HIT_COUNT_LOW, hitCountThreshold); - } - - /** - * Sets the lower bound for the cluster energy threshold on the - * trigger. This value is inclusive. - * @param clusterEnergyLow - The value of the threshold. - */ - public void setClusterEnergyLowThreshold(double clusterEnergyLow) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW, clusterEnergyLow); - } - - /** - * Sets the upper bound for the cluster energy threshold on the - * trigger. This value is inclusive. - * @param clusterEnergyHigh - The value of the threshold. - */ - public void setClusterEnergyHighThreshold(double clusterEnergyHigh) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH, clusterEnergyHigh); - } - - - public void setClusterXMin(double xMin) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_XMIN, xMin); - } - - public void setClusterPDEC0(double pdeC0) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C0, pdeC0); - } - - public void setClusterPDEC1(double pdeC1) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C1, pdeC1); - } - - public void setClusterPDEC2(double pdeC2) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C2, pdeC2); - } - - public void setClusterPDEC3(double pdeC3) { - triggerModule.setCutValue(TriggerModule2019.CLUSTER_PDE_C3, pdeC3); - } - -} diff --git a/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java b/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java deleted file mode 100755 index f8673ec37d..0000000000 --- a/digi/src/main/java/org/hps/digi/nospacing/SvtDigiWithPulserNoSpacingReadoutDriver.java +++ /dev/null @@ -1,867 +0,0 @@ -package org.hps.digi.nospacing; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.PriorityQueue; -import java.util.Set; -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.conditions.svt.SvtTimingConstants; -import org.hps.readout.svt.HPSSVTConstants; -import org.lcsim.detector.tracker.silicon.ChargeCarrier; -import org.lcsim.detector.tracker.silicon.HpsSiSensor; -import org.lcsim.detector.tracker.silicon.SiSensor; -import org.lcsim.geometry.Detector; -import org.lcsim.lcio.LCIOConstants; -import org.lcsim.event.EventHeader; -import org.lcsim.event.LCRelation; -import org.lcsim.event.MCParticle; -import org.lcsim.event.RawTrackerHit; -import org.lcsim.event.SimTrackerHit; -import org.lcsim.event.base.BaseLCRelation; -import org.lcsim.event.base.BaseRawTrackerHit; -import org.lcsim.recon.tracking.digitization.sisim.CDFSiSensorSim; -import org.lcsim.recon.tracking.digitization.sisim.SiElectrodeData; -import org.lcsim.recon.tracking.digitization.sisim.SiElectrodeDataCollection; -import org.lcsim.recon.tracking.digitization.sisim.SiSensorSim; -import org.lcsim.recon.tracking.digitization.sisim.config.SimTrackerHitReadoutDriver; -import org.hps.readout.ReadoutDataManager; -import org.hps.readout.ReadoutDriver; -import org.hps.readout.ReadoutTimestamp; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.readout.util.collection.TriggeredLCIOData; -import org.hps.recon.tracking.PulseShape; -import org.hps.util.RandomGaussian; - -/** - * SVT readout simulation. - * - * @author Sho Uemura - */ -public class SvtDigiWithPulserNoSpacingReadoutDriver extends ReadoutDriver { - //-----------------// - //--- Constants ---// - //-----------------// - private static final String SVT_SUBDETECTOR_NAME = "Tracker"; - private PulseShape shape = new PulseShape.FourPole(); - - private SimTrackerHitReadoutDriver readoutDriver = new SimTrackerHitReadoutDriver(); - private SiSensorSim siSimulation = new CDFSiSensorSim(); - private Map[]> hitMap = new HashMap[]>(); - private Map[]> pulserHitMap = new HashMap[]>(); - private List sensors = null; - - // readout period time offset in ns - private double readoutOffset = 0.0; - private double readoutLatency = 280.0; - // private double pileupCutoff = 300.0; - private double pileupCutoff = 0.0; - private String readout = "TrackerHits"; - private double timeOffset = 30.0; - private boolean noPileup = false; - private boolean addNoise = true; - - private boolean useTimingConditions = false; - - // cut settings - private boolean enableThresholdCut = true; - private int samplesAboveThreshold = 3; - private double noiseThreshold = 2.0; - private boolean enablePileupCut = true; - private boolean dropBadChannels = true; - private boolean debug_=false; - - // Collection Names - private String outputCollection = "SVTRawTrackerHits"; - private String relationCollection = "SVTTrueHitRelations"; - - private LCIOCollection trackerHitCollectionParams; - private LCIOCollection truthRelationsCollectionParams; - private LCIOCollection truthHitsCollectionParams; - /** - * The name of the input {@link org.lcsim.event.RawTrackerHit - * RawTrackerHit} collection from pulser data. - */ - private String pulserDataCollectionName = "SVTRawTrackerHits"; - - public SvtDigiWithPulserNoSpacingReadoutDriver() { - add(readoutDriver); - } - - /** - * Indicates whether or not noise should be simulated when analog - * hits are generated. - * @param addNoise - true adds noise simulation to - * analog hits, while false uses only contributions - * from pulses generated from truth data. - */ - public void setAddNoise(boolean addNoise) { - this.addNoise = addNoise; - } - - /** - * Indicates whether hits consistent with pile-up effects should - * be dropped or not. A hit is considered to be consistent with - * pile-up effects if its earlier sample indices are larger than - * the later ones, suggesting that it includes the trailing end - * of another pulse from earlier in time. - * @param enablePileupCut - true enables the cut and - * drops pile-up hits, while false disables the cut - * and retains them. - */ - public void setEnablePileupCut(boolean enablePileupCut) { - this.enablePileupCut = enablePileupCut; - } - - /** - * Indicates whether noisy analog hits should be retained in - * readout. Hits are required to have a certain number of samples - * that exceeds a programmable noise threshold. The required - * number of samples may be set by the method {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setSamplesAboveThreshold(int) - * setSamplesAboveThreshold(int)} and the noise threshold may be - * set with the method {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setNoiseThreshold(double) - * setNoiseThreshold(double)}. - * @param enableThresholdCut - true enables the cut - * and drops noisy hits, while false disables the - * cut and retains them. - */ - public void setEnableThresholdCut(boolean enableThresholdCut) { - this.enableThresholdCut = enableThresholdCut; - } - - /** - * Sets the noise threshold used in conjunction with the sample - * threshold cut. The cut is enabled or disabled via the method - * {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setEnableThresholdCut(boolean) - * setEnableThresholdCut(boolean)}. - * @param noiseThreshold - The noise threshold. - */ - public void setNoiseThreshold(double noiseThreshold) { - this.noiseThreshold = noiseThreshold; - } - - /** - * Sets the number of smaples that must be above the noise - * threshold as employed by the sample threshold cut. The cut is - * enabled or disabled via the method {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#setEnableThresholdCut(boolean) - * setEnableThresholdCut(boolean)}. - * @param samplesAboveThreshold - The number of samples. Only six - * samples are used, so values above six will result in every hit - * being rejected. Values of zero or lower will result in the - * acceptance of every hit. Threshold cut is inclusive. - */ - public void setSamplesAboveThreshold(int samplesAboveThreshold) { - this.samplesAboveThreshold = samplesAboveThreshold; - } - - /** - * Indicates whether pile-up should be simulated. If set to - * false, analog hits are generated from the truth - * hits of a given event individually, with no contribution from - * neighboring events included. If set to true, data - * from multiple events is included. - * @param noPileup - true uses data from neighboring - * events when generating analog hits, while false - * uses only contributions from a single event. - */ - public void setNoPileup(boolean noPileup) { - this.noPileup = noPileup; - } - - /** - * Specifies whether analog hits which occur on "bad" channels - * should be included in readout data or not. - * @param dropBadChannels - true means that "bad" - * channel hits will be excluded from readout, while - * false means that they will be retained. - */ - public void setDropBadChannels(boolean dropBadChannels) { - this.dropBadChannels = dropBadChannels; - } - - /** - * Set the readout latency. This does not directly correspond to - * any internal function in the readout simulation, but affects - * what range of SVT ADC values are output around the trigger. It - * is retained to allow a matching to the hardware function. - * @param readoutLatency - The readout latency to use. - */ - public void setReadoutLatency(double readoutLatency) { - this.readoutLatency = readoutLatency; - } - - /** - * Sets whether to use manually defined timing conditions, or if - * they should be loaded from the conditions database. - * @param useTimingConditions - true uses the values - * from the database, and false the manually defined - * values. - */ - public void setUseTimingConditions(boolean useTimingConditions) { - this.useTimingConditions = useTimingConditions; - } - - /** - * Sets the pulse shape to be used when emulating the analog hit - * response. Valid options are CRRC and - * FourPole. - * @param pulseShape - The pulse shape to be used. - */ - public void setPulseShape(String pulseShape) { - switch (pulseShape) { - case "CR-RC": - shape = new PulseShape.CRRC(); - break; - case "FourPole": - shape = new PulseShape.FourPole(); - break; - default: - throw new RuntimeException("Unrecognized pulseShape: " + pulseShape); - } - } - /** - * Sets the name of the input pulser data collection name. - * @param collection - The collection name. - */ - public void setPulserDataCollectionName(String collection) { - this.pulserDataCollectionName = collection; - } - - @Override - public void detectorChanged(Detector detector) { - // TODO: What does this "SimTrackerHitReadoutDriver" do? - String[] readouts = { readout }; - readoutDriver.setCollections(readouts); - - // Get the collection of all silicon sensors from the SVT. - sensors = detector.getSubdetector(SVT_SUBDETECTOR_NAME).getDetectorElement().findDescendants(HpsSiSensor.class); - - // If pile-up simulation is disabled, instantiate all - // possible processing queues. For the pile-up simulation, - // these are generated as needed. - if(!noPileup) { - for(HpsSiSensor sensor : sensors) { - @SuppressWarnings("unchecked") - int nChans=640; - if(sensor.getNumberOfChannels()==510) - nChans=512; - //really dumb way to account for channels not read out - PriorityQueue[] hitQueues = new PriorityQueue[nChans]; - PriorityQueue[] pulserHitQueues = new PriorityQueue[nChans]; - hitMap.put(sensor, hitQueues); - pulserHitMap.put(sensor, pulserHitQueues); - } - } - - // Load timing conditions from the conditions database, if - // this is requested. - if(useTimingConditions) { - SvtTimingConstants timingConstants = DatabaseConditionsManager.getInstance().getCachedConditions(SvtTimingConstants.SvtTimingConstantsCollection.class, "svt_timing_constants").getCachedData().get(0); - readoutOffset = 4 * (timingConstants.getOffsetPhase() + 3); - // readoutLatency = 248.0 + timingConstants.getOffsetTime(); - readoutLatency = readoutLatency + timingConstants.getOffsetTime(); - System.out.println(this.getClass().getName()+":: readout offset = "+readoutOffset+" latency = "+readoutLatency); - } - } - - @Override - public void process(EventHeader event) { - super.process(event); - // get the pulser hits - Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, pulserDataCollectionName, RawTrackerHit.class); - // Generate the truth hits. - List stripHits = doSiSimulation(); - List pulserStripHits=makePulserStripHits(rawHits); - if(debug_){ - System.out.println("In SvtDigi:: Current time is = "+ReadoutDataManager.getCurrentTime()); - System.out.println("Number of Sim StripHits for this bunch is "+stripHits.size()); - } - - if(!noPileup) { - // Process each of the pulser hits - for (StripHit pulserHit : pulserStripHits) { - // Get the sensor and channel for the pulser hit. - HpsSiSensor sensor = (HpsSiSensor) pulserHit.sensor; - int channel = pulserHit.channel; - // Queue the hit in the processing queue appropriate - // to its sensor and channel. - PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); - if(pulserHitQueues[channel] == null) { - pulserHitQueues[channel] = new PriorityQueue(); - } - pulserHitQueues[channel].add(pulserHit); - } - - // Process each of the truth hits - for (StripHit stripHit : stripHits) { - // Get the sensor and channel for the truth hit. - HpsSiSensor sensor = (HpsSiSensor)stripHit.sensor; - int channel = stripHit.channel; - // Queue the hit in the processing queue appropriate - // to its sensor and channel. - PriorityQueue[] hitQueues = hitMap.get(sensor); - if(hitQueues[channel] == null) { - hitQueues[channel] = new PriorityQueue(); - } - hitQueues[channel].add(stripHit); - } - - // Hits older than a certain time frame should no longer - // be used for pile-up simulation and should be removed - // from the processing queues. - for(SiSensor sensor : sensors) { - // Get the processing queue for the current sensor. - PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); - // Check each hit to see if it is still in-time. - for(int i = 0; i < pulserHitQueues.length; i++) { - if(pulserHitQueues[i] != null) { - // Remove old hits. - while(!pulserHitQueues[i].isEmpty() && pulserHitQueues[i].peek().time < ReadoutDataManager.getCurrentTime() - (readoutLatency + pileupCutoff)) { - pulserHitQueues[i].poll(); - } - // If the queue is empty, remove it. - if(pulserHitQueues[i].isEmpty()) { pulserHitQueues[i] = null; } - } - } - - // Get the processing queue for the current sensor. - PriorityQueue[] hitQueues = hitMap.get(sensor); - // Check each hit to see if it is still in-time. - for(int i = 0; i < hitQueues.length; i++) { - if(hitQueues[i] != null) { - // Remove old hits. - while(!hitQueues[i].isEmpty() && hitQueues[i].peek().time < ReadoutDataManager.getCurrentTime() - (readoutLatency + pileupCutoff)) { - hitQueues[i].poll(); - } - // If the queue is empty, remove it. - if(hitQueues[i].isEmpty()) { hitQueues[i] = null; } - } - } - } - } - // Otherwise, process the hits for a no pile-up simulation. - // When no pile-up is simulated, hits are fully processed and - // output on an event-by-event basis. - else { - // Create a list to hold the analog data. - List hits = new ArrayList(); - - // Process each of the truth hits. - for(StripHit stripHit : stripHits) { - // Get the hit parameters. - HpsSiSensor sensor = (HpsSiSensor) stripHit.sensor; - short[] samples = new short[6]; - - // Create a signal buffer and populate it with the - // appropriate pedestal values. - double[] signal = new double[6]; - for(int sampleN = 0; sampleN < 6; sampleN++) { - signal[sampleN] = sensor.getPedestal(stripHit.channel, sampleN); - } - - // If noise should be added, do so. - if(addNoise) { - addNoise(sensor, stripHit.channel, signal); - } - - // Emulate the pulse response and add it to the - // sample array. - for(int sampleN = 0; sampleN < 6; sampleN++) { - double time = sampleN * HPSSVTConstants.SAMPLING_INTERVAL - timeOffset; - shape.setParameters(stripHit.channel, (HpsSiSensor) sensor); - signal[sampleN] += stripHit.amplitude * shape.getAmplitudePeakNorm(time); - samples[sampleN] = (short) Math.round(signal[sampleN]); - } - - // Create raw tracker hits from the sample data. - long channel_id = sensor.makeChannelID(stripHit.channel); - RawTrackerHit hit = new BaseRawTrackerHit(0, channel_id, samples, new ArrayList(stripHit.simHits), sensor); - - // If the analog hit passes the readout cuts, it may - // be added to the data stream. - if(readoutCuts(hit)) { hits.add(hit); } - } - - // Output the processed hits to the LCIO stream. - ReadoutDataManager.addData(outputCollection, hits, RawTrackerHit.class); - } - } - - @Override - public void startOfData() { - // The output collection is only handled by the readout data - // manager if no pile-up simulation is included. Otherwise, - // the driver outputs its own collection at readout. - if(noPileup) { - LCIOCollectionFactory.setCollectionName(outputCollection); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.TRAWBIT_ID1); - LCIOCollectionFactory.setReadoutName(readout); - LCIOCollection noPileUpCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); - ReadoutDataManager.registerCollection(noPileUpCollectionParams, true, 8.0, 32.0); - } - addDependency(pulserDataCollectionName); - // Define the LCSim on-trigger collection parameters. - LCIOCollectionFactory.setCollectionName(outputCollection); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setFlags(1 << LCIOConstants.TRAWBIT_ID1); - LCIOCollectionFactory.setReadoutName(readout); - trackerHitCollectionParams = LCIOCollectionFactory.produceLCIOCollection(RawTrackerHit.class); - - LCIOCollectionFactory.setCollectionName(relationCollection); - LCIOCollectionFactory.setProductionDriver(this); - truthRelationsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(LCRelation.class); - - LCIOCollectionFactory.setCollectionName("TrackerHits"); - LCIOCollectionFactory.setFlags(0xc0000000); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollectionFactory.setReadoutName("TrackerHits"); - truthHitsCollectionParams = LCIOCollectionFactory.produceLCIOCollection(SimTrackerHit.class); - - // Run the superclass method. - super.startOfData(); - } - - /** - * Performs a simulation of silicon sensor response and generates - * a collection of {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver.StripHit StripHit} - * objects representing the detector response. - * @return Returns a collection of StripHit objects describing - * the detector response for the current event. - */ - private List doSiSimulation() { - // Create a list to store the simulated hit objects. - List stripHits = new ArrayList(); - - // Process each of the SVT sensors. - for(SiSensor sensor : sensors) { - // Set the sensor to be used in the charge deposition - // simulation. - siSimulation.setSensor(sensor); - // Perform the charge deposition simulation. - Map electrodeDataMap = siSimulation.computeElectrodeData(); - - // Iterate over all possible charge carriers. - for(ChargeCarrier carrier : ChargeCarrier.values()) { - // If the sensor is capable of collecting the given - // charge carrier, then obtain the electrode data for - // the sensor. - if(sensor.hasElectrodesOnSide(carrier)) { - // Attempt to obtain electrode data. - SiElectrodeDataCollection electrodeDataCol = electrodeDataMap.get(carrier); - - // If there is no electrode data available create - // a new instance of electrode data. - if(electrodeDataCol == null) { - electrodeDataCol = new SiElectrodeDataCollection(); - } - - // Loop over all sensor channels. - for(Integer channel : electrodeDataCol.keySet()) { - // Get the electrode data for this channel. - SiElectrodeData electrodeData = electrodeDataCol.get(channel); - Set simHits = electrodeData.getSimulatedHits(); - - // Compute hit time as the unweighted average - // of SimTrackerHit times; this is dumb but - // okay since there's generally only one - // SimTrackerHit. - double time = 0.0; - for(SimTrackerHit hit : simHits) { - time += hit.getTime(); - } - time /= simHits.size(); - time += ReadoutDataManager.getCurrentTime(); - - // Get the charge in units of electrons. - double charge = electrodeData.getCharge(); - - // Calculate the amplitude. - double resistorValue = 100; // Ohms - double inputStageGain = 1.5; - // FIXME: This should use the gains instead - double amplitude = (charge / HPSSVTConstants.MIP) * resistorValue * inputStageGain * Math.pow(2, 14) / 2000; - - // Generate a StripHit object containing the - // simulation data and add it to the list. - stripHits.add(new StripHit(sensor, channel, amplitude, time, simHits)); - } - } - } - - // Clear the sensors of all deposited charge - siSimulation.clearReadout(); - } - - // Return the collection of StripHit objects. - return stripHits; - } - - private List makePulserStripHits(Collection rawHits) { - // Create a list to store the simulated hit objects. - List stripHits = new ArrayList(); - for (RawTrackerHit hit: rawHits){ - SiSensor sensor=(SiSensor) hit.getDetectorElement(); - int strip = hit.getIdentifierFieldValue("strip"); - double time=ReadoutDataManager.getCurrentTime(); - stripHits.add(new StripHit(sensor, strip, time, hit)); - } - return stripHits; - } - /** - * Adds a random Gaussian noise signature to the specified signal - * buffer based on the sensor and channel parameters. - * @param sensor - The sensor on which the signal buffer occurs. - * @param channel - The channel on which the signal buffer - * occurs. - * @param signal - The signal buffer. This must be an array of - * size six. - */ - private void addNoise(SiSensor sensor, int channel, double[] signal) { - for(int sampleN = 0; sampleN < 6; sampleN++) { - signal[sampleN] += RandomGaussian.getGaussian(0, ((HpsSiSensor) sensor).getNoise(channel, sampleN)); - } - } - - /** - * Performs each of the three readout cuts, if they are enabled. - * This is the equivalent of calling, as appropriate, the methods - * {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#samplesAboveThreshold(RawTrackerHit) - * samplesAboveThreshold(RawTrackerHit)}, {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#pileupCut(RawTrackerHit) - * pileupCut(RawTrackerHit)}, and {@link - * org.hps.readout.svt.SvtDigiWithPulserNoSpacingReadoutDriver#badChannelCut(RawTrackerHit) - * badChannelCut(RawTrackerHit)}. - * @param hit - The analog hit to test. - * @return Returns true if all enabled cuts are - * passed, and false otherwise. - */ - private boolean readoutCuts(RawTrackerHit hit) { - // Perform each enabled cut. - if(enableThresholdCut && !samplesAboveThreshold(hit)) { - return false; - } - if(enablePileupCut && !pileupCut(hit)) { - return false; - } - if(dropBadChannels && !badChannelCut(hit)) { - return false; - } - - // If all enabled cuts are passed, return true. - return true; - } - - /** - * Checks whether an analog hit occurred on a "bad" channel. - * @param hit - The hit to be checked. - * @return Returns true if the hit did not - * occur on a bad channel, and false if it did. - */ - private boolean badChannelCut(RawTrackerHit hit) { - HpsSiSensor sensor = (HpsSiSensor) hit.getDetectorElement(); - int channel = hit.getIdentifierFieldValue("strip"); - return !sensor.isBadChannel(channel); - } - - /** - * Attempts to eliminate samples where the pulse starts before - * the sample array. This is done by requiring the second, third, - * and fourth samples of the array to be increasing in value with - * index. - * @param hit - The hit to check. - * @return Returns true if the no pile-up condition - * is met and false if it is not. - */ - private boolean pileupCut(RawTrackerHit hit) { - short[] samples = hit.getADCValues(); - return (samples[2] > samples[1] || samples[3] > samples[2]); - } - - /** - * Attempts to eliminate false hits generated due to noise by - * requiring that a programmable number of samples exceed a - * similarly programmable noise threshold. - * @param hit - The hit to be checked. - * @return Returns true if the noise threshold count - * cut is met and false if it is not. - */ - private boolean samplesAboveThreshold(RawTrackerHit hit) { - // Get the channel and sensor information for the hit. - int channel = hit.getIdentifierFieldValue("strip"); - HpsSiSensor sensor = (HpsSiSensor) hit.getDetectorElement(); - - // Track the noise and pedestal for each sample. - double noise; - double pedestal; - - // Iterate over the samples and count how many are above the - // noise threshold. - int count = 0; - short[] samples = hit.getADCValues(); - for(int sampleN = 0; sampleN < samples.length; sampleN++) { - pedestal = sensor.getPedestal(channel, sampleN); - noise = sensor.getNoise(channel, sampleN); - if(samples[sampleN] - pedestal > noise * noiseThreshold) { - count++; - } - } - - // The cut is passed if enough samples are above the noise - // threshold to pass the minimum count threshold. - return count >= samplesAboveThreshold; - } - - @Override - protected Collection> getOnTriggerData(double triggerTime) { - // No pile-up events are output on an event-by-event basis, - // and as such, do not output anything at this stage. - if(noPileup) { return null; } - // Create a list to hold the analog data - List hits = new ArrayList(); - List truthHits = new ArrayList(); - List trueHitRelations = new ArrayList(); - // Calculate time of first sample - // double firstSample = Math.floor(((triggerTime + 256) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) - // * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; - - double firstSample = Math.floor(((triggerTime + 0) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) - * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; - - if(debug_){ - System.out.println(this.getClass().getName()+":: trigger time = "+triggerTime); - System.out.println(this.getClass().getName()+":: svt first sample time for trigger = "+firstSample); - } - List processedHits = new ArrayList(); - - for(SiSensor sensor : sensors) { - // Get the hit queues for the current sensor. - PriorityQueue[] hitQueues = hitMap.get(sensor); - PriorityQueue[] pulserHitQueues = pulserHitMap.get(sensor); - - // Iterate over the hit queue channels. - for(int channel = 0; channel < hitQueues.length; channel++) { - // Unless noise should be added, there is nothing to - // process on an empty hit queue. Skip it. - if(!addNoise && (hitQueues[channel] == null || hitQueues[channel].isEmpty()) && (pulserHitQueues[channel] == null || pulserHitQueues[channel].isEmpty())){ - continue; - } - - // Create a buffer to hold the extracted response for - // the channel. - double[] signal = new double[6]; - - //do the pulser hit first...if there is a pulser hit, don't add pedestal or noise to mc hit - boolean hasPulserHit=false; // flag if this channel has a pulser hit - if(pulserHitQueues[channel] != null){ - StripHit ph=pulserHitQueues[channel].poll(); - RawTrackerHit rth=ph.getRawTrackerHit(); - hasPulserHit=true; - short[] samples =rth.getADCValues(); - for(int sampleN = 0; sampleN < 6; sampleN++) { - signal[sampleN] = samples[sampleN]; - } - } - - if(!hasPulserHit){ - // Create a buffer to hold the extracted signal for - // the channel. Populate it with the appropriate - // pedestal values. - - for(int sampleN = 0; sampleN < 6; sampleN++) { - signal[sampleN] = ((HpsSiSensor) sensor).getPedestal(channel, sampleN); - } - - // If noise should be added, do so. - if(addNoise) { - addNoise(sensor, channel, signal); - } - } - - // Create a list to store truth SVT hits. - List simHits = new ArrayList(); - - // If there is data in the mc hit queues, process it. - if(hitQueues[channel] != null) { - if(debug_)System.out.println(this.getClass().getName()+":: data in channel = "+channel); - for(StripHit hit : hitQueues[channel]) { - processedHits.add(hit); - - // Track the noise and contribution to the - // signal from the current hit. - double meanNoise = 0; - double totalContrib = 0; - - // Emulate the pulse response for the hit - // across all size samples. - StringBuffer signalBuffer = new StringBuffer("\t\t\t\tSample Pulse :: ["); - for(int sampleN = 0; sampleN < 6; sampleN++) { - double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL; - shape.setParameters(channel, (HpsSiSensor) sensor); - double signalAtTime = hit.amplitude * shape.getAmplitudePeakNorm(sampleTime - hit.time); - if(debug_){ - System.out.println(this.getClass().getName()+":: making pulse: sample time = " - +sampleTime+"; hit time = "+hit.time); - System.out.println(this.getClass().getName()+":: signal @ time() = "+signalAtTime); - } - totalContrib += signalAtTime; - signal[sampleN] += signalAtTime; - meanNoise += ((HpsSiSensor) sensor).getNoise(channel, sampleN); - - signalBuffer.append(signalAtTime + " (" + sampleTime + ")"); - if(sampleN != 5) { - signalBuffer.append(" "); - } - } - signalBuffer.append("]"); - - // TODO: Move this to the noise comparison below. - meanNoise /= 6; - - // Calculate the average noise across all - // samples and compare it to the contribution - // from the hit. If it exceeds a the noise - // threshold, store it as a truth hit. - //meanNoise /= 6; - if(totalContrib > 4.0 * meanNoise) { - simHits.addAll(hit.simHits); - } - } - } - - // Convert the samples into a short array, - short[] samples = new short[6]; - for(int sampleN = 0; sampleN < 6; sampleN++) { - samples[sampleN] = (short) Math.round(signal[sampleN]); - } - - // Get the proper channel ID. - long channel_id = ((HpsSiSensor) sensor).makeChannelID(channel); - - // Create a new tracker hit. - RawTrackerHit hit = new BaseRawTrackerHit(0, channel_id, samples, simHits, sensor); - // Only tracker hits that pass the readout cuts may - // be passed through to readout. - if(readoutCuts(hit)) { - // Add the hit to the readout hits collection. - hits.add(hit); - // Associate the truth hits with the raw hit and - // add them to the truth hits collection. - for(SimTrackerHit simHit : hit.getSimTrackerHits()) { - LCRelation hitRelation = new BaseLCRelation(hit, simHit); - trueHitRelations.add(hitRelation); - truthHits.add(simHit); - } - } - } - } - - // Create the collection data objects for output to the - // readout event. - TriggeredLCIOData hitCollection = new TriggeredLCIOData(trackerHitCollectionParams); - hitCollection.getData().addAll(hits); - TriggeredLCIOData truthHitCollection = new TriggeredLCIOData(truthHitsCollectionParams); - truthHitCollection.getData().addAll(truthHits); - TriggeredLCIOData truthRelationCollection = new TriggeredLCIOData(truthRelationsCollectionParams); - truthRelationCollection.getData().addAll(trueHitRelations); - - // MC particles need to be extracted from the truth hits - // and included in the readout data to ensure that the - // full truth chain is available. - Set truthParticles = new java.util.HashSet(); - for(SimTrackerHit simHit : truthHits) { - ReadoutDataManager.addParticleParents(simHit.getMCParticle(), truthParticles); - } - - // Create the truth MC particle collection. - LCIOCollectionFactory.setCollectionName("MCParticle"); - LCIOCollectionFactory.setProductionDriver(this); - LCIOCollection truthParticleCollection = LCIOCollectionFactory.produceLCIOCollection(MCParticle.class); - TriggeredLCIOData truthParticleData = new TriggeredLCIOData(truthParticleCollection); - truthParticleData.getData().addAll(truthParticles); - - // A trigger timestamp needs to be produced as well. - ReadoutTimestamp timestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRACKER, firstSample); - LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); - LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); - TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); - timestampData.getData().add(timestamp); - - // Store them in a single collection. - Collection> eventOutput = new ArrayList>(5); - eventOutput.add(hitCollection); - eventOutput.add(truthParticleData); - eventOutput.add(truthHitCollection); - eventOutput.add(truthRelationCollection); - eventOutput.add(timestampData); - - // Return the event output. - return eventOutput; - } - - /** - * Class StripHit is responsible for storing several - * parameters defining a simulated hit object. - */ - private class StripHit implements Comparable { - SiSensor sensor; - int channel; - double amplitude; - double time; - Set simHits; - RawTrackerHit pulserHit; - boolean isPulser=false; - - public StripHit(SiSensor sensor, int channel, double amplitude, double time, Set simHits) { - this.sensor = sensor; - this.channel = channel; - this.amplitude = amplitude; - this.time = time; - this.simHits = simHits; - this.isPulser=false; - } - - public StripHit(SiSensor sensor, int channel, double time, RawTrackerHit pulserHit){ - this.sensor = sensor; - this.channel = channel; - this.pulserHit=pulserHit; - this.time=time; - this.isPulser=false; - } - - public boolean getIsPulser(){return this.isPulser;} - public RawTrackerHit getRawTrackerHit(){return this.pulserHit;} - @Override - public int compareTo(Object o) { - double deltaT = time - ((StripHit) o).time; - if(deltaT > 0) { - return 1; - } else if(deltaT < 0) { - return -1; - } else { - return 0; - } - } - } - - @Override - protected double getTimeDisplacement() { - return 0; - } - - @Override - protected double getTimeNeededForLocalOutput() { - // TODO: Probably should have some defined value - buffer seems to be filled enough from the ecal delay alone, though. - return 100; - } - -} diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java index 71a2c36b29..26f4589ef8 100755 --- a/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java @@ -48,7 +48,9 @@ public abstract class RawConverterReadoutDriver extends ReadoutDriver { * conditions database should be skipped when producing hits. */ protected boolean skipBadChannels = false; - + + private double checkAheadTime = 4.0; + protected RawConverterReadoutDriver(String defaultInputCollectionName, String defaultOutputCollectionName) { inputCollectionName = defaultInputCollectionName; outputCollectionName = defaultOutputCollectionName; @@ -71,14 +73,20 @@ public final void detectorChanged(Detector detector) { public final void process(EventHeader event) { // Check the data management driver to determine whether the // input collection is available or not. - if(!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + 4.0)) { + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + if(!doNoSpacing&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + checkAheadTime)) { + if(debug)System.out.println("Skipping RawConverterReadout because collection = "+inputCollectionName+" doesn't exist at "+(localTime+ checkAheadTime)); return; } // Get all of the raw hits in the current clock-cycle. - Collection rawHits = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, RawCalorimeterHit.class); + Collection rawHits = ReadoutDataManager.getData(localTime, localTime + checkAheadTime, inputCollectionName, RawCalorimeterHit.class); - // Increment the local time. + + if(debug)System.out.println(this.getClass().getName()+":: collection = "+inputCollectionName+" has "+rawHits.size()+" found between time = "+localTime+" and "+(localTime+checkAheadTime)); + + // Increment the local time. localTime += 4.0; // Pass the raw hits to the raw converter to obtain proper @@ -96,11 +104,11 @@ public final void process(EventHeader event) { if(skipBadChannels && isBadChannel(newHit.getCellID())) { continue; } - + if(debug)System.out.println(this.getClass().getName()+":: made newHit with time = "+newHit.getTime()); // Add the new hit. newHits.add(newHit); } - + if(debug)System.out.println(this.getClass().getName()+":: outputting collection = "+outputCollectionName+" with size = "+newHits.size()); // Add the calorimeter hit collection to the data manager. ReadoutDataManager.addData(outputCollectionName, newHits, CalorimeterHit.class); } @@ -246,4 +254,12 @@ public void setSkipBadChannels(boolean state) { public void setReadoutWindow(int window) { getConverter().setWindowSamples(window); } + /** + * Sets the amount of time (+ ns) to check for possible + * seed clusters. + * @param value - time in ns + */ + public void setCheckAheadTime(double value) { + checkAheadTime = value; + } } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java index 84ef87c258..1a86cf155c 100755 --- a/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java @@ -99,6 +99,15 @@ public class GTPClusterReadoutDriver extends ReadoutDriver { * This is calculated automatically. */ private double localTimeDisplacement = 0; + + /** + * The amount of time (ns) to check ahead/behind + * for ecal clusters. + * This can be large for no-spacing running (like 192) + * but should be 4.0 for spaced running + */ + + private double checkAheadTime = 4.0; // ============================================================== // ==== Driver Parameters ======================================= @@ -185,19 +194,23 @@ public void detectorChanged(Detector etector) { @Override public void process(EventHeader event) { - // Check the data management driver to determine whether the + + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + // Check the data management driver to determine whether the // input collection is available or not. - if(!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + temporalWindow + 4.0)) { - return; + if(!doNoSpacing&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + temporalWindow + checkAheadTime)) { + if(debug)System.out.println("Skipping GTP Readout with because collection doesn't exist at "+(localTime+temporalWindow + checkAheadTime)); + return; } // Get the hits that occur during the present clock-cycle, as // well as the hits that occur in the verification window // both before and after the current clock-cycle. // TODO: Simplify this? - Collection seedCandidates = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, CalorimeterHit.class); + Collection seedCandidates = ReadoutDataManager.getData(localTime, localTime + checkAheadTime, inputCollectionName, CalorimeterHit.class); Collection foreHits = ReadoutDataManager.getData(localTime - temporalWindow, localTime, inputCollectionName, CalorimeterHit.class); - Collection postHits = ReadoutDataManager.getData(localTime + 4.0, localTime + temporalWindow + 4.0, inputCollectionName, CalorimeterHit.class); + Collection postHits = ReadoutDataManager.getData(localTime + checkAheadTime, localTime + temporalWindow + checkAheadTime, inputCollectionName, CalorimeterHit.class); // Increment the local time. localTime += 4.0; @@ -208,16 +221,22 @@ public void process(EventHeader event) { allHits.addAll(foreHits); allHits.addAll(seedCandidates); allHits.addAll(postHits); - + if(debug){ + System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: local time = "+localTime+ + " temporalWindow = "+temporalWindow+" checkAheadTime = "+checkAheadTime); + System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: current time = "+ReadoutDataManager.getCurrentTime()+" number of seeds = "+seedCandidates.size()+"; all hits = "+allHits.size()); + } // Store newly created clusters. List gtpClusters = new ArrayList(); // Iterate over all seed hit candidates. seedLoop: for(CalorimeterHit seedCandidate : seedCandidates) { + if(debug)System.out.println(this.getClass().getName()+":: looping through seeds: seed energy = "+seedCandidate.getRawEnergy()); // A seed candidate must meet a minimum energy cut to be // considered for clustering. if(seedCandidate.getRawEnergy() < seedEnergyThreshold) { + if(debug)System.out.println(this.getClass().getName()+":: failed seed energy: threshold = "+seedEnergyThreshold); continue seedLoop; } @@ -254,7 +273,8 @@ public void process(EventHeader event) { // cluster should be formed. gtpClusters.add(createBasicCluster(seedCandidate, clusterHits)); } - + + if(debug)System.out.println(this.getClass().getName()+":: adding gtpClusters to data manager size = "+gtpClusters.size()); // Pass the clusters to the data management driver. ReadoutDataManager.addData(outputCollectionName, gtpClusters, Cluster.class); } @@ -336,7 +356,10 @@ protected Collection> getOnTriggerData(double triggerTime) @Override protected double getTimeDisplacement() { - return localTimeDisplacement; + if(doNoSpacing) + return 0; + else + return localTimeDisplacement; } @Override @@ -384,5 +407,13 @@ public void setClusterWindow(int value) { */ public void setSeedEnergyThreshold(double value) { seedEnergyThreshold = value; - } + } + /** + * Sets the amount of time (+/-ns) to check for possible + * seed clusters. + * @param value - time in ns + */ + public void setCheckAheadTime(double value) { + checkAheadTime = value; + } } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java index cf74aaf81a..9525a47a8c 100644 --- a/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java @@ -145,11 +145,14 @@ public void actionPerformed(ActionEvent e) { } @Override - public void process(EventHeader event) { - + public void process(EventHeader event) { + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + // Check the data management driver to determine whether the // input collection is available or not. - if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { + if (!doNoSpacing && !ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { + if(debug)System.out.println(this.getClass().getName()+":: "+inputCollectionName+" doesn't exist at time = "+(localTime + localTimeDisplacement)); return; } @@ -162,7 +165,7 @@ public void process(EventHeader event) { Collection fadcHits = ReadoutDataManager.getData( localTime - (persistentTime - timeEarlierThanEcal), localTime + timeEarlierThanEcal + 4.0, inputCollectionName, CalorimeterHit.class); - + if(debug)System.out.println(this.getClass().getName()+":: number of fadcHits found = "+fadcHits.size()); // Increment the local time. localTime += 4.0; @@ -279,6 +282,7 @@ public void process(EventHeader event) { } // At leaset there is a hodo tilt/cluster hit in any layer, then the pattern list is added into data manager + if(flag == true && debug) if(debug)System.out.println(this.getClass().getName()+":: outputting "+outputCollectionName+" with size = "+hodoPatterns.size()); if(flag == true) ReadoutDataManager.addData(outputCollectionName, hodoPatterns, HodoscopePattern.class); } @@ -345,7 +349,10 @@ private void populateChannelCollections() { @Override protected double getTimeDisplacement() { - return localTimeDisplacement; + if(doNoSpacing) + return 0; + else + return localTimeDisplacement; } @Override @@ -421,4 +428,5 @@ public void setTimeEarlierThanEcal(double timeEarlierThanEcal) { public void setGainFactor(double gainFactor) { this.gainFactor = gainFactor; } + } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java index ac3c6d1faf..c4e7420bde 100644 --- a/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java @@ -139,20 +139,40 @@ public void detectorChanged(Detector detector) { @Override public void process(EventHeader event) { // Check that clusters are available for the trigger. + // System.out.println(this.getClass().getName()+":: starting trigger determination"); Collection clusters = null; Collection hodoPatterns = null; ArrayList hodoPatternList = null; - if(ReadoutDataManager.checkCollectionStatus(inputCollectionNameEcal, localTime) && ReadoutDataManager.checkCollectionStatus(inputCollectionNameHodo, localTime)) { + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + + if(ReadoutDataManager.checkCollectionStatus(inputCollectionNameEcal, localTime) && ReadoutDataManager.checkCollectionStatus(inputCollectionNameHodo, localTime)) { + if(debug) System.out.println(this.getClass().getName()+":: checkCollectionStatus worked. Getting collection in time window = ["+localTime+","+(localTime+4.0)+"]"); clusters = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionNameEcal, Cluster.class); hodoPatterns = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionNameHodo, HodoscopePattern.class); + if(debug) System.out.println(this.getClass().getName()+":: checkCollectionStatus worked Ecal size = "+clusters.size()+" Hodo size = "+ hodoPatterns.size()); localTime += 4.0; - - if(clusters.size() == 0 || hodoPatterns.size() == 0) return; - + //this is backwards of what I wanted, but whatever... + // if(doNoSpacing&&(clusters.size() == 0 || hodoPatterns.size() == 0)) return; + // if(!doNoSpacing&&(clusters.size() == 0 && hodoPatterns.size() == 0)) return; + // if(doNoSpacing&&(clusters.size() == 0 || hodoPatterns.size() == 0)) return; + + //just quit if 0 clusters. + // if(clusters.size() == 0) + // return; + /* + * I feel like this should be "and" as one of + * the triggers doesn't require hodo, right? + */ + //this is the cut that's in master + if(clusters.size() == 0 || hodoPatterns.size() == 0) return; hodoPatternList = new ArrayList<>(hodoPatterns); - } else { return; } + } else { + if(debug)System.out.println(this.getClass().getName()+":: checkCollectionStatus did not find one of Ecal or Hodo at time = "+localTime); + return; + } // Track whether or not a trigger was seen. boolean triggered = false; @@ -160,7 +180,10 @@ public void process(EventHeader event) { // There is no need to perform the trigger cuts if the // trigger is in dead time, as no trigger may be issued // regardless of the outcome. - if(isInDeadTime()) { return; } + if(isInDeadTime()) { + if(debug)System.out.println(this.getClass().getName()+":: trigger is in dead-time!!!"); + return; + } // Record top/bot status for singles triggers List topBot = new ArrayList(); @@ -172,7 +195,10 @@ public void process(EventHeader event) { // not available during readout, so crystal indices must // be obtained directly from the calorimeter geometry. java.awt.Point ixy = ecal.getCellIndices(cluster.getCalorimeterHits().get(0).getCellID()); - + System.out.println(this.getClass().getName()+ + ":: looping over clusters; number of hits = "+TriggerModule2019.getClusterHitCount(cluster) + +" seed energy value = " + TriggerModule2019.getValueClusterSeedEnergy(cluster) + +" total energy of cluster = "+ TriggerModule2019.getValueClusterTotalEnergy(cluster)); // Populate the uncut plots. clusterSeedEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); clusterTotalEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); @@ -181,17 +207,22 @@ public void process(EventHeader event) { // Perform the hit count cut. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_HIT_COUNT_LOW_EN) && !triggerModule.clusterHitCountCut(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster hit cout (low)"); continue; } // Perform the cluster energy cut. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW_EN) && !triggerModule.clusterTotalEnergyCutLow(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster energy cut (low)"); continue; } if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH_EN) && !triggerModule.clusterTotalEnergyCutHigh(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster energy cout (high)"); continue; } + + System.out.println(this.getClass().getName()+":: made it past basic cluster cuts"); // In the setup calorimeter geometry, range of X coordinates is [-23, -1] and [1, 23]. // The hardware uses cluster X coordinates [-22,0] and [1,23]. @@ -203,19 +234,30 @@ public void process(EventHeader event) { // XMin is at least 0. if(!triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_XMIN_EN) && !triggerModule.clusterXMinCut(clusterX)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster x cut (low)"); continue; } - + System.out.println(this.getClass().getName()+":: made it past xMin cut "); // XMin cut has been applied. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterPDECut(cluster, clusterX)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster PDE cut"); continue; - } - } - - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { - continue; + } + System.out.println(this.getClass().getName()+":: made it past PDE cut "); + } + // if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { + //put in check for hodoscope pattern collection size here + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && hodoPatterns.size()>0){ + if(!triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster-hodo matching cut"); + continue; + } + System.out.println(this.getClass().getName()+":: made it past cluster-hodo matching cut "); + + } + if(debug)System.out.println(this.getClass().getName()+":: made it through all non-moller cuts"); //For 2021 update, Moller triggers if(triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMinCut(clusterX)) { @@ -236,7 +278,9 @@ public void process(EventHeader event) { } // Note that a trigger occurred. triggered = true; - + if(debug) + if(debug)System.out.println(this.getClass().getName()+":: found a trigger!"); + if(ixy.y > 0) topBot.add(TOP); else topBot.add(BOT); @@ -248,6 +292,7 @@ public void process(EventHeader event) { } if(triggered) { + if(debug)System.out.println(this.getClass().getName()+":: sending trigger!!!"); boolean topStat = false; boolean botStat = false; if(topBot.contains(TOP)) topStat = true; diff --git a/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java b/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java index 858405a1d6..20b36619c0 100755 --- a/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java +++ b/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java @@ -127,6 +127,9 @@ public class ReadoutDataManager extends Driver { private static final String nl = String.format("%n"); private static final Logger logger = Logger.getLogger(ReadoutDataManager.class.getSimpleName()); + + private static boolean debug=false; + @Override public void startOfData() { @@ -225,8 +228,13 @@ public void endOfData() { public void process(EventHeader event) { // Check the trigger queue. if(!triggerQueue.isEmpty()) { - // Check the earliest possible trigger write time. + if(debug)System.out.println(this.getClass().getName()+" found a trigger @ "+triggerQueue.peek().getTriggerTime()+"; current time is "+ getCurrentTime()); + // Check the earliest possible trigger write time. boolean isWritable = getCurrentTime() >= triggerQueue.peek().getTriggerTime() + bufferTotal; + if(debug && !isWritable) + System.out.println(this.getClass().getName()+":: can't write this trigger yet because "+getCurrentTime()+" < "+(triggerQueue.peek().getTriggerTime() + bufferTotal)); + + // If all collections are available to be written, the // event should be output. if(isWritable) { @@ -250,8 +258,16 @@ public void process(EventHeader event) { if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); } } - triggers++; + double roughTimeOfEvent=getCurrentTime(); + if(effectiveBunches==1){ //we are doing spaced simulation + //342ns is the typical time to do readout/triggering...subtract this off to compare to spaced + if(debug) + System.out.println(getClass().getName()+":: subtracting 342ns to spaced event to get rough time"); + roughTimeOfEvent=roughTimeOfEvent-342.0; + } + if(debug) + System.out.println(getClass().getName()+":: found trigger number = "+triggers+" at current time = "+roughTimeOfEvent); // Make a new LCSim event. int triggerEventNumber = event.getEventNumber() - ((int) Math.floor((getCurrentTime() - trigger.getTriggerTime()) / 2.0)); @@ -426,7 +442,9 @@ else if(topBot.equals(TriggerDriver.TOPBOT)){ for(TriggeredLCIOData triggerData : triggeredDataMap.values()) { storeCollection(triggerData, lcsimEvent); } - + if(debug) + System.out.println(getClass().getName()+":: writing event!!!"); + // Write the event to the output file. try { outputWriter.write(lcsimEvent); } catch(IOException e) { @@ -443,8 +461,11 @@ else if(topBot.equals(TriggerDriver.TOPBOT)){ data.getData().removeFirst(); } } - - // Increment the current time. + if(debug){ + System.out.println("ReadoutDataManager:: end of event with current time = "+currentTime); + System.out.println("##################### END #################################"); + } + // Increment the current time. currentTime += effectiveBunches*BEAM_BUNCH_SIZE; } @@ -480,7 +501,17 @@ public static final void addData(String collectionName, double dataTime, Col throw new IllegalArgumentException("Error: Saw data type \"" + dataType.getSimpleName() + "\" but expected data type \"" + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "\" instead."); } - + //mg debug + /* + if(debug && Double.isNaN(dataTime)){ + System.out.println("ReadoutDataDriver:: addData no time given. "+ dataType.getName()+": currentTime = "+currentTime+"; global displacement = "+(collectionData.getCollectionParameters().getGlobalTimeDisplacement())); + System.out.println("ReadoutDataDriver:: addData setting time to = "+(currentTime - collectionData.getCollectionParameters().getGlobalTimeDisplacement())); + } else { + System.out.println("ReadoutDataDriver:: addData time provided; setting time to = "+dataTime); + } + */ + // + // If the data is empty, then there is no need to add it to // the buffer. if(!data.isEmpty()) { @@ -998,15 +1029,19 @@ private static final List getDataList(double startTime, double endTime, S // Throw an alert if the earliest requested time precedes the // earliest buffered time, and similarly for the latest time. LinkedList> dataLists = collectionData.getData(); - + //System.out.println("ReadoutDataManager::getDataList number in dataLists of objectType: "+objectType.getName()+" = "+dataLists.size()); // Iterate through the data and collect all entries that have // an associated truth time within the given time range. The // lower bound is inclusive, the upper bound is exclusive. List outputList = new ArrayList(); for(TimedList dataList : dataLists) { + // if(debug) + // System.out.println("ReadoutDataManager::getDataList dataList found at time = "+dataList.getTime()+" looking in time window ["+startTime+"--"+endTime+"]"); if(dataList.getTime() >= startTime && dataList.getTime() < endTime) { // Add the items from the list to the output list. for(Object o : dataList) { + //if(debug) + // System.out.println("ReadoutDataManager:: dataList of type "+o.getClass().getName()+" found in the time window ["+startTime+"--"+endTime+"]"); if(objectType.isAssignableFrom(o.getClass())) { outputList.add(objectType.cast(o)); } else { @@ -1215,5 +1250,9 @@ public static final void setEffectiveBunches(int value){ public static final void setZeroBuffer(boolean zero){ zeroBuffer=zero; } + + public static final void setDebug(boolean value){ + debug=value; + } } diff --git a/record-util/src/main/java/org/hps/readout/ReadoutDriver.java b/record-util/src/main/java/org/hps/readout/ReadoutDriver.java index e781fc0df7..ff7fa6cba0 100755 --- a/record-util/src/main/java/org/hps/readout/ReadoutDriver.java +++ b/record-util/src/main/java/org/hps/readout/ReadoutDriver.java @@ -120,7 +120,13 @@ public abstract class ReadoutDriver extends Driver { * this collection data should be written. */ private double readoutWindowBefore = Double.NaN; - + /** + * Boolean to chose no-spacing readout mode. + * Should be false for MC generated beam background + */ + public boolean doNoSpacing = false; + + public boolean debug=false; /** * Instantiates the readout driver. */ @@ -272,4 +278,16 @@ public void setReadoutWindowAfter(double value) throws UnsupportedOperationExcep public void setReadoutWindowBefore(double value) throws UnsupportedOperationException { readoutWindowBefore = value; } + /** + * Sets do-no-spacing readout mode + * used for unspaced signal events + * @param value - true/false do no spacing + */ + public void setDoNoSpacing(boolean value) { + doNoSpacing = value; + } + + public void setDebug(boolean value){ + debug=value; + } } diff --git a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim new file mode 100644 index 0000000000..418824cf94 --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim @@ -0,0 +1,470 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + EcalHits + + + 8.0 + 32.0 + false + + + + MCParticle + + + 32.0 + 32.0 + false + + + + HodoscopeHits + + + 8.0 + 32.0 + false + + + + TrackerHits + + + 8.0 + 32.0 + false + + + + + EcalReadoutHits + + + 32.0 + 32.0 + false + + + + HodoReadoutHits + + + 32.0 + 32.0 + false + + + + + SVTRawTrackerHits + + + + + + SVTRawTrackerHits + + 32.0 + 32.0 + false + + + + + + EcalHits + PulserDataEcalReadoutHits + EcalRawHits + EcalReadoutHits + EcalTruthRelations + TriggerPathTruthRelations + + true + + + 1 + + false + true + false + + 56 + + + true + + + false + + + + + + EcalRawHits + EcalCorrectedHits + + true + + + 192. + true + false + false + + + + + + true + 48 + + true + 192. + true + true + + + + + HodoscopeHits + HodoscopePreprocessedHits + + true + + + + + + + HodoscopePreprocessedHits + PulserDataHodoReadoutHits + HodoscopeRawHits + HodoscopeReadoutHits + HodoscopeTruthRelations + HodoscopeTriggerPathTruthRelations + + true + + + 1 + false + true + + + 4 + + + 0.000833333 + + + false + + + false + true + 0.0001 + + + + + + HodoscopeRawHits + HodoscopeCorrectedHits + true + true + + + 1 + + 192. + true + false + true + + + + + + + + HodoscopeCorrectedHits + HodoscopePatterns + + true + + false + true + true + + + + + EcalClustersGTP + + HodoscopePatterns + + true + + + 0 + true + true + + + + + EcalClustersGTP + + HodoscopePatterns + + singles2 + + true + + + 0 + true + true + + + + + + PulserDataSVTRawTrackerHits + false + true + false + + false + 20.0 + 0.0 + + + + + 250 + true + 200 + ${outputFile}.slcio + true + + + + + diff --git a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim new file mode 100644 index 0000000000..8747c33ee4 --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF.lcsim @@ -0,0 +1,252 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 10 + + + + + + WARNING + EcalClusters + 0.030 + -5.0 + + + EcalClusters + EcalClustersCorr + + + + HodoscopeReadoutHits + CONFIG + true + + + true + HodoscopeReadoutHits + 8 + CONFIG + true + + + + + SVTRawTrackerHits + + + + Pileup + Migrad + + false + + + + true + + true + + false + + true + + true + + true + true + false + + + + 50 + false + + + + false + 100.0 + 100.0 + 400.0 + + + + Tracks_s123_c4_e56 + HPS_s123_c4_e56_4hit.xml + false + 1000.0 + 250 + + + Tracks_s123_c5_e46 + HPS_s123_c5_e46_4hit.xml + false + 1000.0 + 250 + + + Tracks_s567_c4_e123 + HPS_s567_c4_e123.xml + false + 1000.0 + 250 + + + Tracks_s456_c3_e127 + HPS_s456_c3_e127.xml + false + 1000.0 + 250 + + + Tracks_s356_c7_e124 + HPS_s356_c7_e124.xml + false + 1000.0 + 250 + + + Tracks_s235_c6_e147 + HPS_s235_c6_e147.xml + false + 1000.0 + 250 + + + Tracks_s234_c5_e157 + HPS_s234_c5_e167_4hit.xml + false + 1000.0 + 250 + + + + + EcalClustersCorr + GBLTracks + GBLTracks + TrackClusterMatcherMinDistance + 0 + 0.05 + 0 + 0.02 + -7.5 + 28 + 1000 + false + 0.0 + 7.0 + 7.0 + false + false + true + + + EcalClustersCorr + KalmanFullTracks + KalmanFullTracks + TrackClusterMatcherMinDistance + UnconstrainedV0Candidates_KF + UnconstrainedV0Vertices_KF + BeamspotConstrainedV0Candidates_KF + BeamspotConstrainedV0Vertices_KF + TargetConstrainedV0Candidates_KF + TargetConstrainedV0Vertices_KF + FinalStateParticles_KF + OtherElectrons_KF + 0 + 0.05 + 0 + 0.02 + -7.5 + 28 + 1000 + false + 0.0 + 7.0 + 7.0 + false + true + true + true + + + + + true + + + KalmanFullTracks + true + false + + + GBLTracks + false + false + + + ${outputFile}.slcio + + + + + ${outputFile}.root + true + 0.0 + + KalmanFullTracks + 0.1 + 4.8 + 9999 + true + true + + + + +