diff --git a/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java b/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java index 461b228464..2761babf71 100644 --- a/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java +++ b/digi/src/main/java/org/hps/digi/DigitizationWithPulserDataMergingReadoutDriver.java @@ -65,7 +65,8 @@ public abstract class DigitizationWithPulserDataMergingReadoutDriver500 MeV + /** * Specifies the name of the subdetector geometry object. */ @@ -168,7 +169,9 @@ public abstract class DigitizationWithPulserDataMergingReadoutDriver debugCellIDWithHits=new ArrayList(); // ============================================================== // ==== Driver Parameters ======================================= @@ -394,16 +397,23 @@ public void process(EventHeader event) { // Get current raw hits in pulser data. Collection rawHits = ReadoutDataManager.getData(ReadoutDataManager.getCurrentTime(), ReadoutDataManager.getCurrentTime() + 2.0, PulserDataCollectionName, RawTrackerHit.class); - + if(debug)System.out.println("DigiReadout:: "+truthHitCollectionName +" local time = "+ReadoutDataManager.getCurrentTime()+" number of hits = "+hits.size()); + // Once an overlaid event is input, reset adcBufferMap to ensure that other overlaid events do not affect the current event. if(hits.size()!=0 || rawHits.size()!=0) { - // Get the set of all possible channel IDs. + // Get the set of all possible channel IDs. Set cells = getChannelIDs(); - + if(debug)System.out.println(this.getClass().getName()+":: resetting adc buffers at time = "+ReadoutDataManager.getCurrentTime()); // Reset adcBufferMap. for(Long cellID : cells) adcBufferMap.get(cellID).setAll((int) Math.round(getPedestalConditions(cellID))); - } + debugCellIDWithHits.clear(); + //if we are in no-spacing mode, just clear everything + if(doNoSpacing){ + resetBuffers(); + channelIntegrationSumMap.clear(); + } + } /* To merge MC data with pulser data, three different cases are handled separately. * Case 1: If pulser data does not have a channel in MC data, directly buffer samples @@ -432,9 +442,11 @@ public void process(EventHeader event) { // The hash map is used to check if MC data has a channel that is also in pulser data. Map hitCellIDMap = new HashMap(hits.size()); for(SimCalorimeterHit hit : hits) { - // Store the truth data. + // Store the truth data. Long hitCellID = hit.getCellID(); // For Ecal, cell ID is geometry ID; For hodo, cell ID is channel ID after hodoscope preprocessing - + if(debug) + System.out.println(this.getClass().getName()+":: process:: sim hit energy = "+hit.getRawEnergy()+" on cell = "+hitCellID); + ObjectRingBuffer hitBuffer = truthBufferMap.get(hitCellID); hitBuffer.addToCell(0, hit); @@ -499,7 +511,18 @@ public void process(EventHeader event) { // Get the truth hit energy deposition. double energyAmplitude = hit.getRawEnergy(); - + if(energyAmplitude>debugEnergyThresh && debug){ + System.out.println(this.getClass().getName()+":: process:: Putting sim hits in adcBuffer cellID = "+hitCellID); + System.out.println(this.getClass().getName()+":: process:: adding hits to adcBuffer cellID = "+hitCellID); + + System.out.println(this.getClass().getName()+":: process:: ReadoutDataManager Time = "+ReadoutDataManager.getCurrentTime()); + System.out.println(this.getClass().getName()+":: process:: hit time = "+hit.getTime()); + System.out.println(this.getClass().getName()+":: process:: readouttime() = "+readoutTime()); + + System.out.println(this.getClass().getName()+":: process:: truth energy = "+energyAmplitude); + debugCellIDWithHits.add(hitCellID); + } + if(hitCellIDMap.get(hitCellID) == 1) { // If noise should be added, calculate a random value for // the noise and add it to the truth energy deposition. @@ -531,14 +554,20 @@ public void process(EventHeader event) { double sigma = getNoiseConditions(hitCellID); currentValue += RandomGaussian.getGaussian(0, sigma); } - + // An ADC value is not allowed to exceed 4095. If a // larger value is observed, 4096 (overflow) is given // instead. (This corresponds to >2 Volts.) int digitizedValue = Math.min((int) Math.round(pedestal + currentValue), (int) Math.pow(2, nBit)); - + if(energyAmplitude>debugEnergyThresh&&debug) + System.out.println(this.getClass().getName()+":: process: writing digitized value for sample = "+i + +" post-noise current value = "+currentValue + +"; digitized value = "+digitizedValue); + // Write this value to the ADC buffer. adcBuffer.setValue(i, digitizedValue); + // + } } @@ -602,12 +631,24 @@ public void process(EventHeader event) { // contain any newly integrated hits and perform integration. List newHits = null; List newTruthRelations = null; - while(ReadoutDataManager.getCurrentTime() - readoutTime() + ReadoutDataManager.getBeamBunchSize() >= READOUT_PERIOD) { - if(newHits == null) { newHits = new ArrayList(); } - if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } - readHits(newHits, newTruthRelations); - readoutCounter++; - } + + if(doNoSpacing){ + if(newHits == null) { newHits = new ArrayList(); } + if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } + readoutCounter=0; + for(int i = 0; i < pulserDataWindow; i++){ + // System.out.println(this.getClass().getName()+":: looping over pulse data window readoutCounter = "+readoutCounter); + readHits(newHits, newTruthRelations); + readoutCounter++; + } + }else{ + while(ReadoutDataManager.getCurrentTime() - readoutTime() + ReadoutDataManager.getBeamBunchSize() >= READOUT_PERIOD) { + if(newHits == null) { newHits = new ArrayList(); } + if(newTruthRelations == null) { newTruthRelations = new ArrayList(); } + readHits(newHits, newTruthRelations); + readoutCounter++; + } + } } // TODO: Document this. @@ -624,11 +665,17 @@ private void readHits(List newHits, List newTruth // Store the pedestal subtracted value so that it may // be checked against the integration threshold. int pedestalSubtractedValue = adcBuffer.getValue() - pedestal; - + if(pedestalSubtractedValue > integrationThreshold && debug){ + System.out.println(this.getClass().getName()+":: readHits:: Looping over adcBufferMap cellID = "+cellID); + System.out.println(this.getClass().getName()+":: readHits:: ped subtracted ADC counts = "+pedestalSubtractedValue); + } + // Get the total ADC value that has been integrated // on this channel. Integer sum = channelIntegrationSumMap.get(cellID); - + if(pedestalSubtractedValue >integrationThreshold && debug) + System.out.println(this.getClass().getName()+":: readHits:: sum = "+sum); + // If any readout hits exist on this channel, add the // current ADC values to them. @@ -641,7 +688,7 @@ private void readHits(List newHits, List newTruth // events (4 ns). This will indicate when the // integration started and, in turn, should end. channelIntegrationTimeMap.put(cellID, readoutCounter); - + if(debug)System.out.println(this.getClass().getName()+":: readHits:: Found a hit above threshold = "+cellID); // Integrate the ADC values for a number of // samples defined by NSB and threshold // crossing sample. @@ -649,7 +696,7 @@ private void readHits(List newHits, List newTruth for(int i = 0; i <= numSamplesBefore; i++) { sumBefore += adcBuffer.getValue(-(numSamplesBefore - i)); } - + if(debug)System.out.println(this.getClass().getName()+":: readHits:: sum before this sample = "+sumBefore); // This will represent the total integral sum at // the current point in time. Store it in the sum // buffer so that it may be incremented later as @@ -680,13 +727,16 @@ private void readHits(List newHits, List newTruth // If the integration sum is defined, then pulse // integration is ongoing. if(sum != null) { - // Three cases are treated separataly + if(debug)System.out.println(this.getClass().getName()+":: readHits:: integration is ongoing..."+cellID+" count = "+readoutCounter); + // Three cases are treated separataly // Case 1: CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter // Case 2: CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter // Case 3: CHANNEL_INTEGRATION_DEADTIME < numSamplesAfter if(CHANNEL_INTEGRATION_DEADTIME > numSamplesAfter) { // Case 1 //Continue integration until NSA, the threshold-crossing sample has been added before. - if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 1: channel deadtime > numSamplesAfter "+cellID+" count = "+readoutCounter); + if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 1: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -703,7 +753,8 @@ private void readHits(List newHits, List newTruth // to data manager. else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 1: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); // Cycle-clock for events is 2 ns, while cycle-clock for samples is 4 ns @@ -727,8 +778,10 @@ else if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - } // Case 1 ends else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 // Continue integration until NSA, the threshold-crossing sample has been added before. + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 2: channel deadtime == numSamplesAfter "+cellID+" count = "+readoutCounter); if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case2: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); // Add the new ADC sample. channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); @@ -743,6 +796,7 @@ else if(CHANNEL_INTEGRATION_DEADTIME == numSamplesAfter){ // Case 2 // to data manager. else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//At NSA + 1, hit is added into data manager // Add a new calorimeter hit. + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 2: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); @@ -761,8 +815,10 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC } } // Case 2 ends else { // Case 3 + if(debug)System.out.println(this.getClass().getName()+":: readHits::case 3: channel deadtime < numSamplesAfter "+cellID+" count = "+readoutCounter); if (channelIntegrationTimeMap.get(cellID) + CHANNEL_INTEGRATION_DEADTIME - 1 >= readoutCounter) { - // Continue integration until CHANNEL_INTEGRATION_DEADTIME + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: integration + DEADTIME - 1>= readoutCounter "+cellID+" count = "+readoutCounter); + // Continue integration until CHANNEL_INTEGRATION_DEADTIME channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -779,9 +835,12 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC flagStartNewIntegration.put(cellID, true); } else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutCounter) { + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: integration + numSamplesAfter - 1>= readoutCounter "+cellID+" count = "+readoutCounter); if(flagStartNewIntegration.get(cellID) == true) { // Flag for previous sample is true + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: flagStartNewIntegration = true "+cellID+" count = "+readoutCounter); if(pedestalSubtractedValue <= integrationThreshold) { // If sample is less than threshold, then do not start new integration channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: too small...don't start new integration "+cellID+" count = "+readoutCounter); // Add the new ADC sample. channelIntegrationSumMap.put(cellID, sum + adcBuffer.getValue(0)); @@ -794,6 +853,7 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } else { // if sample is larger than threshold, a hit is added into data manager and start new integration // Add a new calorimeter hit. + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 3: new hit starting, storing old hit; adding new hit "+cellID+" count = "+readoutCounter); RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); @@ -850,6 +910,7 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } } else { // Flag for previous sample is false + if(debug)System.out.println(this.getClass().getName()+":: readHits::Case3: flagStartNewIntegration = false "+cellID+" count = "+readoutCounter); channelIntegrationADCMap.get(cellID).add(adcBuffer.getValue(0)); // Add the new ADC sample. @@ -865,8 +926,9 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 >= readoutC } } else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutCounter - 1) {//If reach NSA + 1, hit is added into data manager, and flag is set as false - // Add a new calorimeter hit. - RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, + if(debug)System.out.println(this.getClass().getName()+":: readHits:: case 3: reached NSA + 1; adding new hit "+cellID+" count = "+readoutCounter); + // Add a new calorimeter hit. + RawCalorimeterHit newHit = new BaseRawCalorimeterHit(cellID, sum, 64 * channelIntegrationTimeMap.get(cellID)); newHits.add(newHit); integrationTime = channelIntegrationTimeMap.get(cellID) * 4 + 2; @@ -905,9 +967,21 @@ else if (channelIntegrationTimeMap.get(cellID) + numSamplesAfter - 1 == readoutC // Write the trigger path output data to the readout data // manager. Truth data is optional. - - + + //if running no-spacing, set the time to current time+readout + //I'm just replacing integration time here to make it easier + //note...I have no idea how using integration time works + //in the "spacing" readout. It's in local units, but the lookup + //in GTPClusters is in global??? I'm missing something + + if(doNoSpacing) + integrationTime=readoutTime()+readoutCounter * READOUT_PERIOD; + + if(debug && newHits.size()>0) + System.out.println("DigiReadout:: "+ outputHitCollectionName+" time = "+integrationTime+" adding trigger hits = "+newHits.size()); ReadoutDataManager.addData(outputHitCollectionName, integrationTime, newHits, RawCalorimeterHit.class); + if(doNoSpacing) + newHits.clear(); if(writeTriggerTruth) { ReadoutDataManager.addData(triggerTruthRelationsCollectionName, integrationTime, newTruthRelations, LCRelation.class); } @@ -1046,7 +1120,8 @@ protected Collection> getOnTriggerData(double triggerTime) } else { collectionsList = new ArrayList>(2); } - + if(debug) + System.out.println(this.getClass().getName()+":: got a trigger at time = "+triggerTime); // Readout drivers need to produce readout timestamps to // specify when they occurred in terms of simulation time. // The readout timestamp for the subdetector data should be @@ -1080,6 +1155,9 @@ protected Collection> getOnTriggerData(double triggerTime) List readoutHits = null; if(mode == 1) { readoutHits = getMode1Hits(triggerTime); } else { readoutHits = getMode3Hits(triggerTime); } + if(debug) + System.out.println(this.getClass().getName()+":: number of readoutHits = "+readoutHits.size()); + TriggeredLCIOData readoutData = new TriggeredLCIOData(mode13HitCollectionParams); readoutData.getData().addAll(readoutHits); collectionsList.add(readoutData); @@ -1159,7 +1237,10 @@ protected double getReadoutWindowBefore() { @Override protected double getTimeDisplacement() { - return localTimeOffset; + if(doNoSpacing) + return 0; + else + return localTimeOffset; } @Override @@ -1272,7 +1353,8 @@ private List getMode1Hits(double triggerTime) { // Check that there is a threshold-crossing at some // point in the ADC buffer. if(adcValues[i] > getPedestalConditions(cellID) + integrationThreshold) { - isAboveThreshold = true; + if(debug)System.out.println(this.getClass().getName()+":: found an adc value above threshold for cellID = "+cellID); + isAboveThreshold = true; break; } } @@ -1389,14 +1471,23 @@ private short[] getTriggerADCValues(long cellID, double triggerTime) { // Calculate the offset between the current position and the // trigger time. int readoutLatency = getReadoutLatency(triggerTime); - // Get the ADC pipeline. IntegerRingBuffer pipeline = adcBufferMap.get(cellID); - + if(debug && debugCellIDWithHits.contains(cellID)){ + System.out.println(this.getClass().getName()+":: getting triggered adc values with latency = "+readoutLatency+" for cellID = "+cellID); + /* + for(int k=0; k(); } + if(debug) + System.out.println(this.getClass().getName()+":: adding pulser-data strip hit for channel = "+channel+" at time = "+pulserHit.time); pulserHitQueues[channel].add(pulserHit); } @@ -306,7 +319,10 @@ public void process(EventHeader event) { if(hitQueues[channel] == null) { hitQueues[channel] = new PriorityQueue(); } - hitQueues[channel].add(stripHit); + if(debug) + System.out.println(this.getClass().getName()+":: adding simulated strip hit for channel = "+channel+" at time = "+stripHit.time); + + hitQueues[channel].add(stripHit); } // Hits older than a certain time frame should no longer @@ -628,9 +644,15 @@ protected Collection> getOnTriggerData(double triggerTime) List truthHits = new ArrayList(); List trueHitRelations = new ArrayList(); // Calculate time of first sample - double firstSample = Math.floor(((triggerTime + 256) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) + double firstSample = Math.floor(((triggerTime + triggerOffset) - readoutLatency - readoutOffset) / HPSSVTConstants.SAMPLING_INTERVAL) * HPSSVTConstants.SAMPLING_INTERVAL + readoutOffset; - + if(debug){ + System.out.println(this.getClass().getName()+":: trigger time = "+triggerTime+ + "; trigger offset = "+triggerOffset+"; readout latency = "+readoutLatency+ + "; readout offset = "+readoutOffset); + + System.out.println(this.getClass().getName()+":: svt first sample time for trigger = "+firstSample); + } List processedHits = new ArrayList(); for(SiSensor sensor : sensors) { @@ -693,11 +715,19 @@ protected Collection> getOnTriggerData(double triggerTime) // across all size samples. StringBuffer signalBuffer = new StringBuffer("\t\t\t\tSample Pulse :: ["); for(int sampleN = 0; sampleN < 6; sampleN++) { + //add the time offset to this. + // double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL-timeOffset; double sampleTime = firstSample + sampleN * HPSSVTConstants.SAMPLING_INTERVAL; shape.setParameters(channel, (HpsSiSensor) sensor); double signalAtTime = hit.amplitude * shape.getAmplitudePeakNorm(sampleTime - hit.time); - totalContrib += signalAtTime; + + totalContrib += signalAtTime; signal[sampleN] += signalAtTime; + if(debug){ + System.out.println(this.getClass().getName()+":: making pulse: sample time = " + +sampleTime+"; hit time = "+hit.time); + System.out.println(this.getClass().getName()+":: signal from pulse @ time() = "+signalAtTime+"; total ADC = "+signal[sampleN]); + } meanNoise += ((HpsSiSensor) sensor).getNoise(channel, sampleN); signalBuffer.append(signalAtTime + " (" + sampleTime + ")"); @@ -736,6 +766,8 @@ protected Collection> getOnTriggerData(double triggerTime) // be passed through to readout. if(readoutCuts(hit)) { // Add the hit to the readout hits collection. + if(debug) + System.out.println(this.getClass().getName()+":: adding svt hit to triggered event"); hits.add(hit); // Associate the truth hits with the raw hit and // add them to the truth hits collection. diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java index 71a2c36b29..26f4589ef8 100755 --- a/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/RawConverterReadoutDriver.java @@ -48,7 +48,9 @@ public abstract class RawConverterReadoutDriver extends ReadoutDriver { * conditions database should be skipped when producing hits. */ protected boolean skipBadChannels = false; - + + private double checkAheadTime = 4.0; + protected RawConverterReadoutDriver(String defaultInputCollectionName, String defaultOutputCollectionName) { inputCollectionName = defaultInputCollectionName; outputCollectionName = defaultOutputCollectionName; @@ -71,14 +73,20 @@ public final void detectorChanged(Detector detector) { public final void process(EventHeader event) { // Check the data management driver to determine whether the // input collection is available or not. - if(!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + 4.0)) { + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + if(!doNoSpacing&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + checkAheadTime)) { + if(debug)System.out.println("Skipping RawConverterReadout because collection = "+inputCollectionName+" doesn't exist at "+(localTime+ checkAheadTime)); return; } // Get all of the raw hits in the current clock-cycle. - Collection rawHits = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, RawCalorimeterHit.class); + Collection rawHits = ReadoutDataManager.getData(localTime, localTime + checkAheadTime, inputCollectionName, RawCalorimeterHit.class); - // Increment the local time. + + if(debug)System.out.println(this.getClass().getName()+":: collection = "+inputCollectionName+" has "+rawHits.size()+" found between time = "+localTime+" and "+(localTime+checkAheadTime)); + + // Increment the local time. localTime += 4.0; // Pass the raw hits to the raw converter to obtain proper @@ -96,11 +104,11 @@ public final void process(EventHeader event) { if(skipBadChannels && isBadChannel(newHit.getCellID())) { continue; } - + if(debug)System.out.println(this.getClass().getName()+":: made newHit with time = "+newHit.getTime()); // Add the new hit. newHits.add(newHit); } - + if(debug)System.out.println(this.getClass().getName()+":: outputting collection = "+outputCollectionName+" with size = "+newHits.size()); // Add the calorimeter hit collection to the data manager. ReadoutDataManager.addData(outputCollectionName, newHits, CalorimeterHit.class); } @@ -246,4 +254,12 @@ public void setSkipBadChannels(boolean state) { public void setReadoutWindow(int window) { getConverter().setWindowSamples(window); } + /** + * Sets the amount of time (+ ns) to check for possible + * seed clusters. + * @param value - time in ns + */ + public void setCheckAheadTime(double value) { + checkAheadTime = value; + } } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java index 84ef87c258..1a86cf155c 100755 --- a/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/ecal/updated/GTPClusterReadoutDriver.java @@ -99,6 +99,15 @@ public class GTPClusterReadoutDriver extends ReadoutDriver { * This is calculated automatically. */ private double localTimeDisplacement = 0; + + /** + * The amount of time (ns) to check ahead/behind + * for ecal clusters. + * This can be large for no-spacing running (like 192) + * but should be 4.0 for spaced running + */ + + private double checkAheadTime = 4.0; // ============================================================== // ==== Driver Parameters ======================================= @@ -185,19 +194,23 @@ public void detectorChanged(Detector etector) { @Override public void process(EventHeader event) { - // Check the data management driver to determine whether the + + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + // Check the data management driver to determine whether the // input collection is available or not. - if(!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + temporalWindow + 4.0)) { - return; + if(!doNoSpacing&&!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + temporalWindow + checkAheadTime)) { + if(debug)System.out.println("Skipping GTP Readout with because collection doesn't exist at "+(localTime+temporalWindow + checkAheadTime)); + return; } // Get the hits that occur during the present clock-cycle, as // well as the hits that occur in the verification window // both before and after the current clock-cycle. // TODO: Simplify this? - Collection seedCandidates = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionName, CalorimeterHit.class); + Collection seedCandidates = ReadoutDataManager.getData(localTime, localTime + checkAheadTime, inputCollectionName, CalorimeterHit.class); Collection foreHits = ReadoutDataManager.getData(localTime - temporalWindow, localTime, inputCollectionName, CalorimeterHit.class); - Collection postHits = ReadoutDataManager.getData(localTime + 4.0, localTime + temporalWindow + 4.0, inputCollectionName, CalorimeterHit.class); + Collection postHits = ReadoutDataManager.getData(localTime + checkAheadTime, localTime + temporalWindow + checkAheadTime, inputCollectionName, CalorimeterHit.class); // Increment the local time. localTime += 4.0; @@ -208,16 +221,22 @@ public void process(EventHeader event) { allHits.addAll(foreHits); allHits.addAll(seedCandidates); allHits.addAll(postHits); - + if(debug){ + System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: local time = "+localTime+ + " temporalWindow = "+temporalWindow+" checkAheadTime = "+checkAheadTime); + System.out.println(this.getClass().getName()+":: "+inputCollectionName+":: current time = "+ReadoutDataManager.getCurrentTime()+" number of seeds = "+seedCandidates.size()+"; all hits = "+allHits.size()); + } // Store newly created clusters. List gtpClusters = new ArrayList(); // Iterate over all seed hit candidates. seedLoop: for(CalorimeterHit seedCandidate : seedCandidates) { + if(debug)System.out.println(this.getClass().getName()+":: looping through seeds: seed energy = "+seedCandidate.getRawEnergy()); // A seed candidate must meet a minimum energy cut to be // considered for clustering. if(seedCandidate.getRawEnergy() < seedEnergyThreshold) { + if(debug)System.out.println(this.getClass().getName()+":: failed seed energy: threshold = "+seedEnergyThreshold); continue seedLoop; } @@ -254,7 +273,8 @@ public void process(EventHeader event) { // cluster should be formed. gtpClusters.add(createBasicCluster(seedCandidate, clusterHits)); } - + + if(debug)System.out.println(this.getClass().getName()+":: adding gtpClusters to data manager size = "+gtpClusters.size()); // Pass the clusters to the data management driver. ReadoutDataManager.addData(outputCollectionName, gtpClusters, Cluster.class); } @@ -336,7 +356,10 @@ protected Collection> getOnTriggerData(double triggerTime) @Override protected double getTimeDisplacement() { - return localTimeDisplacement; + if(doNoSpacing) + return 0; + else + return localTimeDisplacement; } @Override @@ -384,5 +407,13 @@ public void setClusterWindow(int value) { */ public void setSeedEnergyThreshold(double value) { seedEnergyThreshold = value; - } + } + /** + * Sets the amount of time (+/-ns) to check for possible + * seed clusters. + * @param value - time in ns + */ + public void setCheckAheadTime(double value) { + checkAheadTime = value; + } } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java index cf74aaf81a..9525a47a8c 100644 --- a/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/hodoscope/HodoscopePatternReadoutDriver.java @@ -145,11 +145,14 @@ public void actionPerformed(ActionEvent e) { } @Override - public void process(EventHeader event) { - + public void process(EventHeader event) { + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + // Check the data management driver to determine whether the // input collection is available or not. - if (!ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { + if (!doNoSpacing && !ReadoutDataManager.checkCollectionStatus(inputCollectionName, localTime + localTimeDisplacement)) { + if(debug)System.out.println(this.getClass().getName()+":: "+inputCollectionName+" doesn't exist at time = "+(localTime + localTimeDisplacement)); return; } @@ -162,7 +165,7 @@ public void process(EventHeader event) { Collection fadcHits = ReadoutDataManager.getData( localTime - (persistentTime - timeEarlierThanEcal), localTime + timeEarlierThanEcal + 4.0, inputCollectionName, CalorimeterHit.class); - + if(debug)System.out.println(this.getClass().getName()+":: number of fadcHits found = "+fadcHits.size()); // Increment the local time. localTime += 4.0; @@ -279,6 +282,7 @@ public void process(EventHeader event) { } // At leaset there is a hodo tilt/cluster hit in any layer, then the pattern list is added into data manager + if(flag == true && debug) if(debug)System.out.println(this.getClass().getName()+":: outputting "+outputCollectionName+" with size = "+hodoPatterns.size()); if(flag == true) ReadoutDataManager.addData(outputCollectionName, hodoPatterns, HodoscopePattern.class); } @@ -345,7 +349,10 @@ private void populateChannelCollections() { @Override protected double getTimeDisplacement() { - return localTimeDisplacement; + if(doNoSpacing) + return 0; + else + return localTimeDisplacement; } @Override @@ -421,4 +428,5 @@ public void setTimeEarlierThanEcal(double timeEarlierThanEcal) { public void setGainFactor(double gainFactor) { this.gainFactor = gainFactor; } + } diff --git a/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java b/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java index ec49be48d7..e8e7ed33dc 100644 --- a/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java +++ b/ecal-readout-sim/src/main/java/org/hps/readout/trigger2019/SinglesTrigger2019ReadoutDriver.java @@ -139,10 +139,13 @@ public void detectorChanged(Detector detector) { @Override public void process(EventHeader event) { // Check that clusters are available for the trigger. + // System.out.println(this.getClass().getName()+":: starting trigger determination"); Collection clusters = null; Collection hodoPatterns = null; ArrayList hodoPatternList = new ArrayList<>(); - + if(doNoSpacing) + localTime=ReadoutDataManager.getCurrentTime(); // just overwrite local time on every event + if(triggerType.equals("singles3")) { if(ReadoutDataManager.checkCollectionStatus(inputCollectionNameEcal, localTime) && ReadoutDataManager.checkCollectionStatus(inputCollectionNameHodo, localTime)) { clusters = ReadoutDataManager.getData(localTime, localTime + 4.0, inputCollectionNameEcal, Cluster.class); @@ -166,14 +169,17 @@ public void process(EventHeader event) { } else { return; } } - + // Track whether or not a trigger was seen. boolean triggered = false; // There is no need to perform the trigger cuts if the // trigger is in dead time, as no trigger may be issued // regardless of the outcome. - if(isInDeadTime()) { return; } + if(isInDeadTime()) { + if(debug)System.out.println(this.getClass().getName()+":: trigger is in dead-time!!!"); + return; + } // Record top/bot status for singles triggers List topBot = new ArrayList(); @@ -185,7 +191,10 @@ public void process(EventHeader event) { // not available during readout, so crystal indices must // be obtained directly from the calorimeter geometry. java.awt.Point ixy = ecal.getCellIndices(cluster.getCalorimeterHits().get(0).getCellID()); - + if(debug)System.out.println(this.getClass().getName()+ + ":: looping over clusters; number of hits = "+TriggerModule2019.getClusterHitCount(cluster) + +" seed energy value = " + TriggerModule2019.getValueClusterSeedEnergy(cluster) + +" total energy of cluster = "+ TriggerModule2019.getValueClusterTotalEnergy(cluster)); // Populate the uncut plots. clusterSeedEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterSeedEnergy(cluster)); clusterTotalEnergy[NO_CUTS].fill(TriggerModule2019.getValueClusterTotalEnergy(cluster)); @@ -194,17 +203,22 @@ public void process(EventHeader event) { // Perform the hit count cut. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_HIT_COUNT_LOW_EN) && !triggerModule.clusterHitCountCut(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster hit cout (low)"); continue; } // Perform the cluster energy cut. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_LOW_EN) && !triggerModule.clusterTotalEnergyCutLow(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster energy cut (low)"); continue; } if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_TOTAL_ENERGY_HIGH_EN) && !triggerModule.clusterTotalEnergyCutHigh(cluster)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster energy cout (high)"); continue; } + + if(debug)System.out.println(this.getClass().getName()+":: made it past basic cluster cuts"); // In the setup calorimeter geometry, range of X coordinates is [-23, -1] and [1, 23]. // The hardware uses cluster X coordinates [-22,0] and [1,23]. @@ -216,19 +230,30 @@ public void process(EventHeader event) { // XMin is at least 0. if(!triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_XMIN_EN) && !triggerModule.clusterXMinCut(clusterX)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster x cut (low)"); continue; } - + if(debug)System.out.println(this.getClass().getName()+":: made it past xMin cut "); // XMin cut has been applied. if(triggerModule.getCutEn(TriggerModule2019.CLUSTER_PDE_EN) && !triggerModule.clusterPDECut(cluster, clusterX)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster PDE cut"); continue; - } - } - - if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { - continue; + } + if(debug)System.out.println(this.getClass().getName()+":: made it past PDE cut "); + } + // if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && !triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { + //put in check for hodoscope pattern collection size here + if(triggerModule.getCutEn(TriggerModule2019.SINGLES_L1L2ECAL_MATCHING_EN) && hodoPatterns.size()>0){ + if(!triggerModule.geometryMatchingCut(clusterX, ixy.y, hodoPatternList)) { + if(debug)System.out.println(this.getClass().getName()+":: did not satisfy cluster-hodo matching cut"); + continue; + } + if(debug)System.out.println(this.getClass().getName()+":: made it past cluster-hodo matching cut "); + + } + if(debug)System.out.println(this.getClass().getName()+":: made it through all non-moller cuts"); //For 2021 update, Moller triggers if(triggerModule.getCutEn(TriggerModule2019.SINGLES_MOLLERMODE_EN)) { if(triggerModule.getCutEn(TriggerModule2019.SINGLES_XYMINMAX_EN) && !triggerModule.clusterXMinCut(clusterX)) { @@ -249,7 +274,9 @@ public void process(EventHeader event) { } // Note that a trigger occurred. triggered = true; - + if(debug) + if(debug)System.out.println(this.getClass().getName()+":: found a trigger!"); + if(ixy.y > 0) topBot.add(TOP); else topBot.add(BOT); @@ -261,6 +288,7 @@ public void process(EventHeader event) { } if(triggered) { + if(debug)System.out.println(this.getClass().getName()+":: sending trigger!!!"); boolean topStat = false; boolean botStat = false; if(topBot.contains(TOP)) topStat = true; diff --git a/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java b/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java index c5b4c8670b..20b36619c0 100755 --- a/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java +++ b/record-util/src/main/java/org/hps/readout/ReadoutDataManager.java @@ -1,1184 +1,1258 @@ -package org.hps.readout; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.BitSet; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.PriorityQueue; -import java.util.Set; -import java.util.logging.Logger; - -import org.hps.conditions.database.DatabaseConditionsManager; -import org.hps.record.evio.EvioEventConstants; -import org.hps.record.triggerbank.TSGenericObject; -import org.hps.readout.util.TimedList; -import org.hps.readout.util.TriggerTime; -import org.hps.readout.util.collection.LCIOCollection; -import org.hps.readout.util.collection.LCIOCollectionFactory; -import org.hps.readout.util.collection.ManagedLCIOCollection; -import org.hps.readout.util.collection.ManagedLCIOData; -import org.hps.readout.util.collection.TriggeredLCIOData; -import org.hps.record.triggerbank.BaseTriggerData; -import org.lcsim.event.EventHeader; -import org.lcsim.event.GenericObject; -import org.lcsim.event.MCParticle; -import org.lcsim.event.base.BaseLCSimEvent; -import org.lcsim.geometry.IDDecoder; -import org.lcsim.lcio.LCIOWriter; -import org.lcsim.util.Driver; - -/** - * Class ReadoutDataManager is the central management - * class for the HPS readout chain. It is responsible for tracking - * most LCSim collection data, for syncing readout data production - * drivers and their output, for passing managed data objects to - * drivers as input, for managing triggers, and for writing out data. - *

- * More information on how a readout driver should interface - */ -public class ReadoutDataManager extends Driver { - /** - * Defines the default size of the readout window in units of - * nanoseconds. - */ - private static int readoutWindow = 200; - /** - * Defines the name of the output file for the run. - */ - private static String outputFileName = null; - /** - * Defines where the trigger time should occur within the default - * readout window. For instance, a value of t means - * that a period of time equal to t will be included - * before the trigger time, and a period of time equal to - * readoutWindow - t will be included after it. - */ - private static double triggerTimeDisplacement = 50; - /** - * Defines the length of an event in units of nanoseconds. - */ - private static final double BEAM_BUNCH_SIZE = 2.0; - /** - * Tracks the current simulation time in units of nanoseconds. - */ - private static double currentTime = 0.0; - /** - * Tracks all registered readout drivers. - */ - private static final Set driverSet = new HashSet(); - /** - * Tracks all data collections which are managed by the readout - * manager as well as their properties. - */ - private static final Map> collectionMap = new HashMap>(); - /** - * Tracks the time displacement for trigger drivers. - */ - private static final Map triggerTimeDisplacementMap = new HashMap(); - /** - * Stores trigger requests from trigger drivers until enough time - * has passed to fully buffer the necessary readout data. - */ - private static final PriorityQueue triggerQueue = new PriorityQueue(); - /** - * A writer for writing readout events to an output LCIO file. - */ - private static LCIOWriter outputWriter = null; - /** - * Tracks the total amount of time that must be buffered to allow - * for readout to occur. - */ - private static double bufferTotal = 0.0; - /** - * The total number of triggers seen. - */ - private static int triggers = 0; - /** - * The delay between when a trigger occurs, and when readout is - * performed. - */ - private static double triggerDelay = 0.0; - - /** - * Collection parameters for the dummy trigger bank object. - */ - private static LCIOCollection triggerBankParams = null; - - private static final String nl = String.format("%n"); - private static final Logger logger = Logger.getLogger(ReadoutDataManager.class.getSimpleName()); - - @Override - public void startOfData() { - // Instantiate the readout LCIO file. - if(outputFileName == null) { - throw new IllegalArgumentException("Error: Output file name not defined!"); - } - try { outputWriter = new LCIOWriter(new File(outputFileName)); } - catch (IOException e) { - e.printStackTrace(); - throw new RuntimeException(); - } - - // Create a collection for the dummy trigger bank. - LCIOCollectionFactory.setCollectionName("TriggerBank"); - LCIOCollectionFactory.setFlags(0); - triggerBankParams = LCIOCollectionFactory.produceLCIOCollection(GenericObject.class); - - // Get the total amount of time that the readout system must - // wait to make sure that all data has been safely buffered - // and exists to read out. - double longestBufferBefore = 0.0; - double longestBufferAfter = 0.0; - double longestLocalBuffer = 0.0; - double longestTimeDisplacement = 0.0; - double longestDisplacedAfter = 0.0; - double longestTriggerDisplacement = 0.0; - - StringBuffer initializationBuffer = new StringBuffer(); - initializationBuffer.append("Getting longest trigger time displacement..." + nl); - for(Entry entry : triggerTimeDisplacementMap.entrySet()) { - initializationBuffer.append(String.format("\t%-30s :: %.0f%n", entry.getKey().getClass().getSimpleName(), entry.getValue().doubleValue())); - longestTriggerDisplacement = Math.max(longestTriggerDisplacement, entry.getValue().doubleValue()); - } - initializationBuffer.append("Longest is: " + longestTriggerDisplacement + nl + nl); - - initializationBuffer.append("Getting longest driver collection buffers..." + nl); - for(ManagedLCIOData data : collectionMap.values()) { - double before = Double.isNaN(data.getCollectionParameters().getWindowBefore()) ? 0.0 : data.getCollectionParameters().getWindowBefore(); - double after = Double.isNaN(data.getCollectionParameters().getWindowAfter()) ? 0.0 : data.getCollectionParameters().getWindowAfter(); - double displacement = data.getCollectionParameters().getProductionDriver().getTimeDisplacement(); - double local = data.getCollectionParameters().getProductionDriver().getTimeNeededForLocalOutput(); - - initializationBuffer.append("\t" + data.getCollectionParameters().getCollectionName() + nl); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Buffer Before", before)); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Buffer After", after)); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Local Buffer", local)); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Displacement", displacement)); - initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Displaced After", (displacement + after))); - - longestBufferBefore = Math.max(longestBufferBefore, before); - longestBufferAfter = Math.max(longestBufferAfter, after); - longestLocalBuffer = Math.max(longestLocalBuffer, local); - longestTimeDisplacement = Math.max(longestTimeDisplacement, displacement); - longestDisplacedAfter = Math.max(longestDisplacedAfter, displacement + after); - } - initializationBuffer.append("Longest (before) is: " + longestBufferBefore + nl); - initializationBuffer.append("Longest (after) is: " + longestBufferAfter + nl); - initializationBuffer.append("Longest (local) is: " + longestLocalBuffer + nl); - initializationBuffer.append("Longest (displacement) is: " + longestTimeDisplacement + nl); - initializationBuffer.append("Longest (displacemed after) is: " + longestDisplacedAfter + nl + nl); - - initializationBuffer.append("Readout Window: " + readoutWindow + nl); - initializationBuffer.append("Trigger Offset: " + triggerTimeDisplacement + nl); - initializationBuffer.append("Default Before: " + triggerTimeDisplacement + nl); - initializationBuffer.append("Default After : " + (readoutWindow - triggerTimeDisplacement) + nl + nl); - - triggerDelay = Math.max(longestTriggerDisplacement, longestDisplacedAfter); - triggerDelay = Math.max(triggerDelay, longestLocalBuffer); - double totalNeededDisplacement = triggerDelay + longestBufferBefore + 150; - - initializationBuffer.append("Total Time Needed: " + totalNeededDisplacement + nl); - logger.fine(nl + initializationBuffer.toString()); - - // Determine the total amount of time that must be included - // in the data buffer in order to safely write out all data. - // An extra 150 ns of data is retained as a safety, just in - // case some driver needs to look unusually far back. - bufferTotal = totalNeededDisplacement; - } - - @Override - public void endOfData() { - try { outputWriter.close(); } - catch(IOException e) { - e.printStackTrace(); - throw new RuntimeException(); - } - - System.out.println("Wrote " + triggers + " triggers."); - } - - @Override - public void process(EventHeader event) { - // Check the trigger queue. - if(!triggerQueue.isEmpty()) { - // Check the earliest possible trigger write time. - boolean isWritable = getCurrentTime() >= triggerQueue.peek().getTriggerTime() + bufferTotal; - - // If all collections are available to be written, the - // event should be output. - if(isWritable) { - // Store the current trigger data. - TriggerTime trigger = triggerQueue.poll(); - - // 2016 MC only process one trigger, and no TS bank is stored - // 2019 MC can process multi-trigger, and TS bank is stored - List triggerList = new ArrayList(); - if(!trigger.getTriggerType().equals("noSet")) { - triggerList.add(trigger); - - // Iterate triggers in queue, remove next trigger if time of next trigger is the - // same as previous, until time of next trigger is not the same as previous or - // no next trigger - TriggerTime nextTrigger = null; - if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); - while((!triggerQueue.isEmpty()) && (nextTrigger.getTriggerTime() == trigger.getTriggerTime())) { - triggerList.add(nextTrigger); - triggerQueue.poll(); - if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); - } - } - - triggers++; - - // Make a new LCSim event. - int triggerEventNumber = event.getEventNumber() - ((int) Math.floor((getCurrentTime() - trigger.getTriggerTime()) / 2.0)); - EventHeader lcsimEvent = new BaseLCSimEvent(DatabaseConditionsManager.getInstance().getRun(), - triggerEventNumber, event.getDetectorName(), (long) 4 * (Math.round(trigger.getTriggerTime() / 4)), false); - - // 2016 MC only process one trigger, and no TS bank is stored - // 2019 MC can process multi-trigger, and TS bank is stored - if(!trigger.getTriggerType().equals("noSet")) { - List ts_list = new ArrayList(); - TSGenericObject tsBank = new TSGenericObject(); - int[] tsValues = new int[8]; - BitSet bits = new BitSet(32); - for(TriggerTime tri : triggerList) { - String triggerType = tri.getTriggerType(); - if(triggerType.equals(TriggerDriver.SINGLES0)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(0); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(4); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(0); - bits.set(4); - } - } - else if(triggerType.equals(TriggerDriver.SINGLES1)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(1); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(5); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(1); - bits.set(5); - } - } - else if(triggerType.equals(TriggerDriver.SINGLES2)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(2); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(6); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(2); - bits.set(6); - } - } - else if(triggerType.equals(TriggerDriver.SINGLES3)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(3); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(7); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(3); - bits.set(7); - } - } - else if(triggerType.equals(TriggerDriver.PAIR0)) bits.set(8); - else if(triggerType.equals(TriggerDriver.PAIR1)) bits.set(9); - else if(triggerType.equals(TriggerDriver.PAIR2)) bits.set(10); - else if(triggerType.equals(TriggerDriver.PAIR3)) bits.set(11); - else if(triggerType.equals(TriggerDriver.PULSER)) bits.set(15); - else if(triggerType.equals(TriggerDriver.FEE)) { - String topBot = tri.getTopBotStat(); - if(topBot.equals(TriggerDriver.TOP)) bits.set(18); - else if(topBot.equals(TriggerDriver.BOT)) bits.set(19); - else if(topBot.equals(TriggerDriver.TOPBOT)){ - bits.set(18); - bits.set(19); - } - } - } - - tsValues[0] = EvioEventConstants.TS_BANK_TAG; - - if(!bits.isEmpty()) { - tsValues[5] = (int)bits.toLongArray()[0]; - tsValues[6] = (int)bits.toLongArray()[0]; - } - else { - tsValues[5] = 0; - tsValues[6] = 0; - } - - // Filling the generic objects with the integer array - tsBank.setValues(tsValues); - - // Adding the generic object to the list - ts_list.add(tsBank); - lcsimEvent.put("TSBank", ts_list, TSGenericObject.class, 0); - } - - - // Calculate the readout window time range. This is - // used for any production driver that does not have - // a manually specified output range. - double startTime = trigger.getTriggerTime() - triggerTimeDisplacement; - double endTime = startTime + readoutWindow; - - logger.finer("Trigger Time: " + trigger.getTriggerTime()); - logger.finer("Default Time Range: " + startTime + " - " + endTime); - - // All readout output is initially stored in a single - // object. This allows the readout from multiple - // drivers to be merged, if needed, and also prevents - // duplicate instances of an object from being - // written. - Map> triggeredDataMap = new HashMap>(); - - // Write out the writable collections into the event. - for(ManagedLCIOData collectionData : collectionMap.values()) { - // Ignore any collections that are not set to be persisted. - if(!collectionData.getCollectionParameters().isPersistent()) { - continue; - } - - // Get the local start and end times. A driver - // may manually specify an amount of time before - // and after the trigger time which should be - // output. If this is the case, use it instead of - // the time found through use of the readout - // window/trigger time displacement calculation. - double localStartTime = startTime; - if(!Double.isNaN(collectionData.getCollectionParameters().getWindowBefore())) { - localStartTime = trigger.getTriggerTime() - collectionData.getCollectionParameters().getWindowBefore(); - } - - double localEndTime = endTime; - if(!Double.isNaN(collectionData.getCollectionParameters().getWindowAfter())) { - localEndTime = trigger.getTriggerTime() + collectionData.getCollectionParameters().getWindowAfter(); - } - - // Get the object data for the time range. - addDataToMap(collectionData.getCollectionParameters(), localStartTime, localEndTime, triggeredDataMap); - } - - // Write out any special on-trigger collections into - // the event as well. These are collated so that if - // more than one driver contributes to the same - // collection, they will be properly merged. - for(ReadoutDriver driver : driverSet) { - // Get the special collection(s) from the current - // driver, if it exists. - Collection> onTriggerData = driver.getOnTriggerData(trigger.getTriggerTime()); - - // If there are special collections, write them. - if(onTriggerData != null) { - for(TriggeredLCIOData triggerData : onTriggerData) { - addDataToMap(triggerData, triggerData.getCollectionParameters().getObjectType(), triggeredDataMap); - } - } - } - - // Create the dummy trigger bank data and store it. - TriggeredLCIOData triggerBankData = new TriggeredLCIOData(triggerBankParams); - triggerBankData.getData().add(new BaseTriggerData(new int[8])); - addDataToMap(triggerBankData, triggerBankData.getCollectionParameters().getObjectType(), triggeredDataMap); - - // Readout timestamps should be generated for both - // the "system" and the trigger. This corresponds to - // the simulation time at which the trigger occurred. - // Note that there is a "trigger delay" parameter in - // the old readout, but this does not exist in the - // new system, so both timestamps are the same. - - // Calculate the simulation trigger time. - double simTriggerTime = trigger.getTriggerTime() + triggerTimeDisplacementMap.get(trigger.getTriggeringDriver()).doubleValue(); - ReadoutTimestamp systemTimestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRIGGERBITS, simTriggerTime); - ReadoutTimestamp triggerTimestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRIGGERTIME, simTriggerTime); - LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); - LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); - TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); - timestampData.getData().add(systemTimestamp); - timestampData.getData().add(triggerTimestamp); - addDataToMap(timestampData, timestampData.getCollectionParameters().getObjectType(), triggeredDataMap); - - // Store all of the data collections. - for(TriggeredLCIOData triggerData : triggeredDataMap.values()) { - storeCollection(triggerData, lcsimEvent); - } - - // Write the event to the output file. - try { outputWriter.write(lcsimEvent); } - catch(IOException e) { - e.printStackTrace(); - throw new RuntimeException(); - } - } - } - - // Remove all data from the buffer that occurs before the max - // buffer length cut-off. - for(ManagedLCIOData data : collectionMap.values()) { - while(!data.getData().isEmpty() && (data.getData().getFirst().getTime() < (getCurrentTime() - 500))) { - data.getData().removeFirst(); - } - } - - // Increment the current time. - currentTime += BEAM_BUNCH_SIZE; - } - - /** - * Adds a new set of data objects to the data manager at the time - * specified. - * @param collectionName - The collection name to which the data - * should be added. - * @param dataTime - The truth time at which the data objects - * occurred. This represents the time of the object, corrected - * for time displacement due to buffering on processing on the - * part of the production driver. - * @param data - The data to add. - * @param dataType - The class type of the data objects. - * @throws IllegalArgumentException Occurs if either the - * collection specified does not exist, or if the object type of - * the data objects does not match the object type of the data in - * the collection. - * @param - Specifies the class type of the data to be added - * to the collection. - */ - public static final void addData(String collectionName, double dataTime, Collection data, Class dataType) { - // Validate that the collection has been registered. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" has not been registered."); - } - - // Get the collection data object. - ManagedLCIOData collectionData = collectionMap.get(collectionName); - - // Validate that the data type is correct. - if(!collectionData.getCollectionParameters().getObjectType().isAssignableFrom(dataType)) { - throw new IllegalArgumentException("Error: Saw data type \"" + dataType.getSimpleName() + "\" but expected data type \"" - + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "\" instead."); - } - - // If the data is empty, then there is no need to add it to - // the buffer. - if(!data.isEmpty()) { - // Add the new data to the data buffer. - double time = Double.isNaN(dataTime) ? currentTime - collectionData.getCollectionParameters().getGlobalTimeDisplacement() : dataTime; - LinkedList> dataBuffer = collectionData.getData(); - dataBuffer.add(new TimedList(time, data)); - } - } - - /** - * Adds a new set of data objects to the data manager at a time - * calculated based on the current simulation time corrected by - * the total time offset of the collection. - * @param collectionName - The collection name to which the data - * should be added. - * @param data - The data to add. - * @param dataType - The class type of the data objects. - * @throws IllegalArgumentException Occurs if either the - * collection specified does not exist, or if the object type of - * the data objects does not match the object type of the data in - * the collection. - * @param - Specifies the class type of the data to be added - * to the collection. - */ - public static final void addData(String collectionName, Collection data, Class dataType) { - addData(collectionName, Double.NaN, data, dataType); - } - - /** - * Checks whether or not a collection has been populated up to - * the indicated time. - * @param collectionName - The collection to check. - * @param time - The time at which the collection should be - * filled. - * @return Returns true if the collection has data - * generated up to at least the specified time, and - * false if it does not. - */ - public static final boolean checkCollectionStatus(String collectionName, double time) { - // Verify that the requested collection exists. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" is not a registered collection."); - } - - // Otherwise, check if enough time has passed for the driver - // which controls to the collection to have produced output - // for the requested time period. - return time <= getCurrentTime() - collectionMap.get(collectionName).getCollectionParameters().getGlobalTimeDisplacement(); - } - - /** - * Gets the length in nanoseconds of a single event (beam bunch). - * @return Returns the length in ns of a single beam bunch. - */ - public static final double getBeamBunchSize() { - return BEAM_BUNCH_SIZE; - } - - /** - * Gets the LCIO collection parameters for a collection. - * @param collectionName - The name of the collection. - * @param objectType - The data type of the collection. - * @return Returns the collection parameters. - */ - @SuppressWarnings("unchecked") - public static final LCIOCollection getCollectionParameters(String collectionName, Class objectType) { - // Verify that the requested collection actually exists. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - - // Get the collection and check that it is of the appropriate - // parameterized type. - LCIOCollection collection = collectionMap.get(collectionName).getCollectionParameters(); - if(collection.getObjectType() != objectType) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" is of type " + collection.getObjectType().getSimpleName() - + " while object type " + objectType.getSimpleName() + " was requested."); - } - - // Return the collection parameters. - // NOTE: This type case is safe, since it is verified above - // that the collection object is of the same class type - // as the parameterized type. - return (LCIOCollection) collection; - } - - /** - * Gets the current simulation time in nanoseconds. - * @return Returns the simulation time in nanoseconds. - */ - public static final double getCurrentTime() { - return currentTime; - } - - /** - * Gets a collection of data objects from a collection within the - * time range specified. - * @param startTime - The (inclusive) start of the time range. - * @param endTime The (exclusive) end of the time range. - * @param collectionName - The name of the collection. - * @param objectType - The class type of the data stored in the - * collection. - * @return Returns the data in the specified time range in the - * data collection in a {@link java.util.List List}. - * @param - Specifies the class type of the data stored in - * the collection. - */ - public static final Collection getData(double startTime, double endTime, String collectionName, Class objectType) { - return getDataList(startTime, endTime, collectionName, objectType); - } - - /** - * Gets the {@link org.lcsim.geometry.IDDecoder IDDecoder} that - * is used for the indicated managed collection, if it exists. - * @param collectionName - The collection to which the decoder - * should correspond. - * @return Returns the decoder for the collection, if it exists, - * and null otherwise. - */ - public static final IDDecoder getIDDecoder(String collectionName) { - // Verify that the requested collection actually exists. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - - // Get the collection and obtain the ID decoder, if possible. - // If it does not exist, then leave it as a value of null. - LCIOCollection collection = collectionMap.get(collectionName).getCollectionParameters(); - IDDecoder decoder = null; - try { decoder = collection.getProductionDriver().getIDDecoder(collectionName); } - catch(UnsupportedOperationException e) { } - - // Return the decoder. - return decoder; - } - - /** - * Gets the default size of the readout window. - * @return Returns the default size of the readout window in - * units of nanoseconds. - */ - public static final int getReadoutWindow() { - return readoutWindow; - } - - /** - * Gets the total amount of time by which a collection is - * displaced between the actual truth data's occurrence in the - * simulation, and the time at which the object is actually - * produced. This includes both the time displacement introduced - * by the collection's production driver as well as displacement - * introduced by any preceding drivers that serve as input for - * the production driver. - * @param collectionName - The name of the collection. - * @return Returns the total time displacement in nanoseconds. - */ - public static final double getTotalTimeDisplacement(String collectionName) { - if(collectionMap.containsKey(collectionName)) { - return collectionMap.get(collectionName).getCollectionParameters().getGlobalTimeDisplacement(); - } else { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - } - - /** - * Gets the time displacement between when a trigger occurs, and - * when the triggered data is actually written out. - * @return Returns the trigger delay in units of nanoseconds. - */ - public static final double getTriggerDelay() { - return bufferTotal; - } - - /** - * Gets the time by which the trigger is offset in the readout - * window. - * @return Returns the trigger offset in units of nanoseconds. - */ - public static final double getTriggerOffset() { - return triggerTimeDisplacement; - } - - /** - * Adds a managed collection to the data manager. All collections - * which serve as either input or output from a {@link - * org.hps.readout.ReadoutDriver ReadoutDriver} are required to - * be registered and managed by the data manager. On-trigger - * special collections should not be registered. - * @param params - An object describing the collection - * parameters. - * @param persistent - Sets whether this collection should be - * written out to the readout LCIO file. - * @param - Specifies the class type of the data stored by - * the collection. - */ - public static final void registerCollection(LCIOCollection params, boolean persistent) { - registerCollection(params, persistent, Double.NaN, Double.NaN); - } - - /** - * Adds a managed collection to the data manager. All collections - * which serve as either input or output from a {@link - * org.hps.readout.ReadoutDriver ReadoutDriver} are required to - * be registered and managed by the data manager. On-trigger - * special collections should not be registered. - * @param params - An object describing the collection - * parameters. - * @param persistent - Sets whether this collection should be - * written out to the readout LCIO file. - * @param readoutWindowBefore - Defines a custom period of time - * before the trigger time in which all objects will be output to - * the output LCIO file. - * @param readoutWindowAfter - Defines a custom period of time - * after the trigger time in which all objects will be output to - * the output LCIO file. - * @param - Specifies the class type of the data stored by - * the collection. - */ - public static final void registerCollection(LCIOCollection params, boolean persistent, double readoutWindowBefore, double readoutWindowAfter) { - // Make sure that all arguments are defined. - if(params.getCollectionName() == null) { - throw new IllegalArgumentException("Error: Collection name must be defined."); - } - if(params.getObjectType() == null) { - throw new IllegalArgumentException("Error: Collection object class must be defined."); - } - if(params.getProductionDriver() == null) { - throw new IllegalArgumentException("Error: Production driver must be defined."); - } - - // There should only be one collection for a given name. - if(collectionMap.containsKey(params.getCollectionName())) { - throw new IllegalArgumentException("Collection \"" + params.getCollectionName() + "\" of object type " - + params.getObjectType().getSimpleName() + " already exists."); - } - - // Create a collection data object. - double timeDisplacement = getTotalTimeDisplacement(params.getCollectionName(), params.getProductionDriver()); - LCIOCollectionFactory.setParams(params); - LCIOCollectionFactory.setGlobalTimeDisplacement(timeDisplacement); - LCIOCollectionFactory.setPersistent(persistent); - LCIOCollectionFactory.setWindowAfter(readoutWindowAfter); - LCIOCollectionFactory.setWindowBefore(readoutWindowBefore); - ManagedLCIOCollection managedParams = LCIOCollectionFactory.produceManagedLCIOCollection(params.getObjectType()); - ManagedLCIOData collectionData = new ManagedLCIOData(managedParams); - collectionMap.put(params.getCollectionName(), collectionData); - - // Store the readout driver in the driver set. - driverSet.add(params.getProductionDriver()); - - logger.config("Registered collection \"" + managedParams.getCollectionName() + "\" of class type " - + managedParams.getObjectType().getSimpleName() + "."); - - StringBuffer detailsBuffer = new StringBuffer(); - detailsBuffer.append("\tCollection Name :: " + params.getCollectionName()); - detailsBuffer.append("\tFlags :: " + Integer.toHexString(params.getFlags())); - detailsBuffer.append("\tObject Type :: " + params.getObjectType().getSimpleName()); - detailsBuffer.append("\tReadout Name :: " + params.getReadoutName()); - detailsBuffer.append("\tProduction Driver :: " + params.getProductionDriver().getClass().getSimpleName()); - logger.finer(nl + detailsBuffer.toString()); - } - - /** - * Registers a {@link org.hps.readout.ReadoutDriver - * ReadoutDriver} with the data manager. All readout drivers must - * be registered in order for their on-trigger special data to be - * added to the output event. - * @param productionDriver - The readout driver to register. - */ - public static final void registerReadoutDriver(ReadoutDriver productionDriver) { - // Trigger drivers are registered differently. - if(productionDriver instanceof TriggerDriver) { - logger.warning(nl + "Attempted to register TriggerDriver \"" + productionDriver.getClass().getSimpleName() + "\" as a readout driver." - + nl + " Trigger drivers are registered via the method \"registerTrigger(TriggerDriver)\"." - + nl + " Ignoring request."); - return; - } - - // Add the readout driver. - driverSet.add(productionDriver); - logger.config("Registered driver: " + productionDriver.getClass().getSimpleName()); - } - - /** - * Registers a trigger driver with the data manager. - * @param triggerDriver - The trigger driver to register. - */ - public static final void registerTrigger(TriggerDriver triggerDriver) { - // Get the total time displacement for the trigger driver. - double timeDisplacement = getTotalTimeDisplacement("", triggerDriver); - - // Store the time displacement in the trigger driver map. - triggerTimeDisplacementMap.put(triggerDriver, timeDisplacement); - logger.config("Registered trigger: " + triggerDriver.getClass().getSimpleName()); - } - - /** - * Changes the "readout name" parameter for a collection, while - * retaining all other parameters and stored data. - * @param collectionName - The name of the collection to modify. - * @param objectType - The object type of the collection. - * @param newReadoutName - The new name for the "readout name" - * parameter. - * @param - The object type of the data stored in the - * collection that is to be modified. - */ - public static final void updateCollectionReadoutName(String collectionName, Class objectType, String newReadoutName) { - // Get the collection. - if(!collectionMap.containsKey(collectionName)) { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - ManagedLCIOData oldData = collectionMap.get(collectionName); - - // Make a new managed LCIO collection with the new readout. - LCIOCollectionFactory.setParams(oldData.getCollectionParameters()); - LCIOCollectionFactory.setReadoutName(newReadoutName); - ManagedLCIOCollection newParams = LCIOCollectionFactory.produceManagedLCIOCollection(objectType); - - // Create a new managed LCIO data object and transfer all the - // data from the old object to it. - ManagedLCIOData newData = new ManagedLCIOData(newParams); - for(TimedList oldList : oldData.getData()) { - newData.getData().add(oldList); - } - - // Put the new data list into the map. - collectionMap.put(collectionName, newData); - } - - /** - * Indicates that the specified driver saw a trigger and readout - * should occur. - * @param driver - The triggering driver. - * @throws IllegalArgumentException Occurs if the argument - * triggering driver is not registered as a trigger driver with - * the data manager. - */ - static final void sendTrigger(TriggerDriver driver) { - // Check that the triggering driver is registered as a - // trigger driver. - if(!triggerTimeDisplacementMap.containsKey(driver)) { - throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); - } - - // Calculate the trigger and readout times. - double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); - - // Add the trigger to the trigger queue. - triggerQueue.add(new TriggerTime(triggerTime, driver)); - logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " - + driver.getClass().getSimpleName() + "."); - } - - /** - * Indicates that the specified driver saw a trigger and readout - * should occur. - * @param driver - The triggering driver. - * @param triggerType - trigger type. - * @throws IllegalArgumentException Occurs if the argument - * triggering driver is not registered as a trigger driver with - * the data manager. - */ - static final void sendTrigger(TriggerDriver driver, String triggerType) { - // Check that the triggering driver is registered as a - // trigger driver. - if(!triggerTimeDisplacementMap.containsKey(driver)) { - throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); - } - - // Calculate the trigger and readout times. - double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); - - // Add the trigger to the trigger queue. - triggerQueue.add(new TriggerTime(triggerTime, triggerType, driver)); - logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " - + driver.getClass().getSimpleName() + "."); - } - - /** - * Indicates that the specified driver saw a trigger and readout - * should occur. - * @param driver - The triggering driver. - * @param triggerType - trigger type. - * @throws IllegalArgumentException Occurs if the argument - * triggering driver is not registered as a trigger driver with - * the data manager. - */ - static final void sendTrigger(TriggerDriver driver, String triggerType, String topBot) { - // Check that the triggering driver is registered as a - // trigger driver. - if(!triggerTimeDisplacementMap.containsKey(driver)) { - throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); - } - - // Calculate the trigger and readout times. - double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); - - // Add the trigger to the trigger queue. - triggerQueue.add(new TriggerTime(triggerTime, triggerType, topBot, driver)); - logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " - + driver.getClass().getSimpleName() + "."); - } - - /** - * Adds a data collection corresponding to a given parameter set - * to the data map. If there is already data existing under the - * same collection, it is then merged without duplicating any - * objects. - * @param params - The collection parameters for the data. - * @param readoutData - The data to add. - * @param triggeredDataMap - The data map into which the data - * collection should be added. - */ - @SuppressWarnings("unchecked") - private static final void addDataToMap(LCIOCollection params, Collection readoutData, Map> triggeredDataMap) { - // Check and see if an output collection already exists for - // this parameter set. If so, use it; otherwise, make a new - // entry for it. - TriggeredLCIOData untypedData = triggeredDataMap.get(params.getCollectionName()); - TriggeredLCIOData typedData = null; - if(untypedData == null) { - typedData = new TriggeredLCIOData(params); - triggeredDataMap.put(params.getCollectionName(), typedData); - } else { - // Verify that the collection parameters are the same. - if(untypedData.getCollectionParameters().equals(params)) { - // Note: This cast is safe; if the parameters objects - // are the same, then the object sets are necessarily - // of the same object type. - typedData = (TriggeredLCIOData) untypedData; - } else { - throw new RuntimeException("Error: Found multiple collections of name \"" + params.getCollectionName() + "\", but of differing definitions."); - } - } - - // Add the readout data to the collection data list. - typedData.getData().addAll(readoutData); - } - - /** - * Adds data stored in the collection defined by the parameters - * object within the given time range to the data map. If there - * is already data existing under the same collection, it is then - * merged without duplicating any objects. - * @param params - The parameters for the collection to add. - * @param startTime - The start of the time range within the data - * buffer from which data should be drawn. - * @param endTime - The end of the time range within the data - * buffer from which data should be drawn. - * @param triggeredDataMap - The data map into which the data - * collection should be added. - */ - private static final void addDataToMap(LCIOCollection params, double startTime, double endTime, Map> triggeredDataMap) { - // Get the readout data objects. - List triggerData = getDataList(startTime, endTime, params.getCollectionName(), params.getObjectType()); - - // Pass the readout data to the merging method. - addDataToMap(params, triggerData, triggeredDataMap); - } - - /** - * Adds data stored in a triggered collection object to the data - * map. If there is already data existing under the same - * collection, it is then merged without duplicating any objects. - * @param dataList - The collection data to be added. - * @param objectType - the object type of the collection data. - * @param triggeredDataMap - The data map into which the data - * collection should be added. - */ - private static final void addDataToMap(TriggeredLCIOData dataList, Class objectType, Map> triggeredDataMap) { - // Check that the parameters object is the same object type - // as is specified. - if(dataList.getCollectionParameters().getObjectType() != objectType) { - throw new IllegalArgumentException("Error: Can not process class type " + dataList.getCollectionParameters().getObjectType().getSimpleName() - + " as class type " + objectType.getSimpleName()); - } else { - // Note: This is safe - the above check requires that the - // object type be the parameterized type. - @SuppressWarnings("unchecked") - TriggeredLCIOData typedDataList = (TriggeredLCIOData) dataList; - Set triggerData = typedDataList.getData(); - addDataToMap(typedDataList.getCollectionParameters(), triggerData, triggeredDataMap); - } - } - - /** - * Gets a list of data objects from a collection within the time - * range specified. - * @param startTime - The (inclusive) start of the time range. - * @param endTime The (exclusive) end of the time range. - * @param collectionName - The name of the collection. - * @param objectType - The class type of the data stored in the - * collection. - * @return Returns the data in the specified time range in the - * data collection in a {@link java.util.List List}. - * @param - Specifies the class type of the data stored in - * the collection. - */ - private static final List getDataList(double startTime, double endTime, String collectionName, Class objectType) { - // Get the collection data. - ManagedLCIOData collectionData = collectionMap.get(collectionName); - - // Verify that the a collection of the indicated name exists - // and that it is the appropriate object type. - if(collectionData != null) { - if(!objectType.isAssignableFrom(collectionData.getCollectionParameters().getObjectType())) { - throw new IllegalArgumentException("Error: Expected object type " + objectType.getSimpleName() + " for collection \"" + collectionName - + ",\" but found object type " + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "."); - } - } else { - throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); - } - - // Throw an alert if the earliest requested time precedes the - // earliest buffered time, and similarly for the latest time. - LinkedList> dataLists = collectionData.getData(); - - // Iterate through the data and collect all entries that have - // an associated truth time within the given time range. The - // lower bound is inclusive, the upper bound is exclusive. - List outputList = new ArrayList(); - for(TimedList dataList : dataLists) { - if(dataList.getTime() >= startTime && dataList.getTime() < endTime) { - // Add the items from the list to the output list. - for(Object o : dataList) { - if(objectType.isAssignableFrom(o.getClass())) { - outputList.add(objectType.cast(o)); - } else { - throw new ClassCastException("Error: Unexpected object of type " + o.getClass().getSimpleName() + " in collection \"" - + collectionName + ".\""); - } - } - } - } - - // Return the collected items. - return outputList; - } - - /** - * Calculates the total time displacement of a collection based - * on its production driver, and the time displacements of the - * input collections from which it is produced. This is processed - * recursively, so all time displacements in the production chain - * of a collection are accounted for. - * @param collectionName - The name of the collection. - * @param productionDriver - The driver which produces the - * collection. - * @return Returns the total time displacement for the collection - * in units of nanoseconds. - */ - private static final double getTotalTimeDisplacement(String collectionName, ReadoutDriver productionDriver) { - // Make sure that there are no circular dependencies. - validateDependencies(collectionName, productionDriver, new HashSet()); - - // The total time displacement is the displacement of the - // dependent collection with the largest displacement plus - // the local time displacement of the production driver. - double baseDisplacement = 0.0; - for(String dependency : productionDriver.getDependencies()) { - // All dependencies must already be registered. Check - // that it is. - double dependencyDisplacement = 0.0; - if(collectionMap.containsKey(dependency)) { - dependencyDisplacement = collectionMap.get(dependency).getCollectionParameters().getGlobalTimeDisplacement(); - } else { - throw new IllegalArgumentException("Error: Collection \"" + dependency + "\" has not been registered."); - } - - // Select the largest value. - baseDisplacement = Math.max(baseDisplacement, dependencyDisplacement); - } - - // Return the sum of the largest base displacement and the - // production driver. - return baseDisplacement + productionDriver.getTimeDisplacement(); - } - - /** - * Writes an entire {@link org.hps.readout.ReadoutDriver - * ReadoutDriver} on-trigger data collection to the specified - * output event. - * @param collectionData - The on-trigger readout data. - * @param event - The output event. - * @param - Specifies the class type of the data that is to be - * written to the output event. - */ - private static final void storeCollection(TriggeredLCIOData collectionData, EventHeader event) { - storeCollection(collectionData.getCollectionParameters().getCollectionName(), collectionData.getCollectionParameters().getObjectType(), - collectionData.getCollectionParameters().getFlags(), collectionData.getCollectionParameters().getReadoutName(), - collectionData.getData(), event); - } - - /** - * Writes the specified data to the output event. - * @param collectionName - The name of the output collection. - * @param objectType - The class of the output collection data - * objects. - * @param flags - Any LCIO flags which apply to the data. - * @param readoutName - The readout name for the data, if it is - * needed. null should be used if a readout name is - * not required. - * @param collectionData - A parameterized {@link - * java.util.Collection Collection} containing the data that is - * to be written. - * @param event - The event into which the data is to be written. - * @param - Specifies the class type of the data that is to be - * written to the output event. - */ - private static final void storeCollection(String collectionName, Class objectType, int flags, String readoutName, - Collection collectionData, EventHeader event) { - // The input collection must be a list. If it already is, - // just use it directly. Otherwise, copy the contents into an - // appropriately parameterized list. - List dataList; - if(collectionData instanceof List) { - dataList = (List) collectionData; - } else { - dataList = new ArrayList(collectionData.size()); - dataList.addAll(collectionData); - } - - // Place the data into the LCIO event. - if(readoutName == null) { - event.put(collectionName, dataList, objectType, flags); - } else { - event.put(collectionName, dataList, objectType, flags, readoutName); - } - - logger.finer(String.format("Output %d objects of type %s to collection \"%s\".", dataList.size(), objectType.getSimpleName(), collectionName)); - } - - /** - * Checks that the dependencies of a collection are valid. This - * consists of checking that any dependencies are registered with - * the data management driver and also that there are no circular - * dependencies present. - * @param collectionName - The name of the collection to check. - * @param productionDriver - The production driver of the - * collection to check. - * @param dependents - A set containing all of the collections - * which depend on this driver in the chain. Note that for the - * first call, this should be an empty set. - */ - private static final void validateDependencies(String collectionName, ReadoutDriver productionDriver, Set dependents) { - // Add the current driver to the list of dependents. - dependents.add(collectionName); - - // Check that none of the dependencies of the current driver - // are also dependencies of a driver higher in the chain. - for(String dependency : productionDriver.getDependencies()) { - // The dependency must be registered. - if(!collectionMap.containsKey(dependency)) { - throw new IllegalArgumentException("Error: Collection \"" + dependency + "\" has not been registered."); - } - - // Get the collection data for the dependency. - ManagedLCIOData collectionData = collectionMap.get(dependency); - - // Check that this dependency does not depend on the - // higher driver. - for(String dependent : dependents) { - if(collectionData.getCollectionParameters().getProductionDriver().getDependencies().contains(dependent)) { - throw new IllegalStateException("Error: Collection \"" + dependency + "\" depends on collection \"" + dependent - + ",\" but collection \"" + dependent + "\" also depends of collection \"" + dependency + ".\""); - } - } - - // If there are no detected circular dependencies, then - // perform the same check on the dependencies of this - // dependency. - Set dependencySet = new HashSet(); - dependencySet.addAll(dependents); - validateDependencies(dependency, collectionData.getCollectionParameters().getProductionDriver(), dependencySet); - } - } - - /** - * Adds the argument particle and all of its direct parents to - * the particle set. - * @param particle - The base particle. - * @param particleSet - The set that is to contain the full tree - * of particles. - */ - public static final void addParticleParents(MCParticle particle, Set particleSet) { - // Add the particle itself to the set. - particleSet.add(particle); - - // If the particle has parents, run the same method for each - // parent. - if(!particle.getParents().isEmpty()) { - for(MCParticle parent : particle.getParents()) { - addParticleParents(parent, particleSet); - } - } - } - - /** - * Sets the output file name for the triggered data file. - * @param filepath - The file path for the output file. - */ - public static final void setOutputFile(String filepath) { - outputFileName = filepath; - } - - /** - * Sets the default size of the readout window, in units of - * nanoseconds. Note that this can be overridden by specific - * drivers. - * @param nanoseconds - The length of the default readout window. - */ - public static final void setReadoutWindow(int nanoseconds) { - readoutWindow = nanoseconds; - } -} +package org.hps.readout; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.PriorityQueue; +import java.util.Set; +import java.util.logging.Logger; + +import org.hps.conditions.database.DatabaseConditionsManager; +import org.hps.record.evio.EvioEventConstants; +import org.hps.record.triggerbank.TSGenericObject; +import org.hps.readout.util.TimedList; +import org.hps.readout.util.TriggerTime; +import org.hps.readout.util.collection.LCIOCollection; +import org.hps.readout.util.collection.LCIOCollectionFactory; +import org.hps.readout.util.collection.ManagedLCIOCollection; +import org.hps.readout.util.collection.ManagedLCIOData; +import org.hps.readout.util.collection.TriggeredLCIOData; +import org.hps.record.triggerbank.BaseTriggerData; +import org.lcsim.event.EventHeader; +import org.lcsim.event.GenericObject; +import org.lcsim.event.MCParticle; +import org.lcsim.event.base.BaseLCSimEvent; +import org.lcsim.geometry.IDDecoder; +import org.lcsim.lcio.LCIOWriter; +import org.lcsim.util.Driver; + +/** + * Class ReadoutDataManager is the central management + * class for the HPS readout chain. It is responsible for tracking + * most LCSim collection data, for syncing readout data production + * drivers and their output, for passing managed data objects to + * drivers as input, for managing triggers, and for writing out data. + *

+ * More information on how a readout driver should interface + */ +public class ReadoutDataManager extends Driver { + /** + * Defines the default size of the readout window in units of + * nanoseconds. + */ + private static int readoutWindow = 200; + /** + * Defines the name of the output file for the run. + */ + private static String outputFileName = null; + /** + * Defines where the trigger time should occur within the default + * readout window. For instance, a value of t means + * that a period of time equal to t will be included + * before the trigger time, and a period of time equal to + * readoutWindow - t will be included after it. + */ + private static double triggerTimeDisplacement = 50; + /** + * Defines the length of an event in units of nanoseconds. + */ + private static final double BEAM_BUNCH_SIZE = 2.0; + /** + * Tracks the current simulation time in units of nanoseconds. + */ + private static double currentTime = 0.0; + /** + * Tracks all registered readout drivers. + */ + private static final Set driverSet = new HashSet(); + /** + * Tracks all data collections which are managed by the readout + * manager as well as their properties. + */ + private static final Map> collectionMap = new HashMap>(); + /** + * Tracks the time displacement for trigger drivers. + */ + private static final Map triggerTimeDisplacementMap = new HashMap(); + /** + * Stores trigger requests from trigger drivers until enough time + * has passed to fully buffer the necessary readout data. + */ + private static final PriorityQueue triggerQueue = new PriorityQueue(); + /** + * A writer for writing readout events to an output LCIO file. + */ + private static LCIOWriter outputWriter = null; + /** + * Tracks the total amount of time that must be buffered to allow + * for readout to occur. + */ + private static double bufferTotal = 0.0; + /** + * The total number of triggers seen. + */ + private static int triggers = 0; + /** + * The delay between when a trigger occurs, and when readout is + * performed. + */ + private static double triggerDelay = 0.0; + /** + * sets the time passed between LCIO events. + * Used for running MC without putting bunches + * between "signal" events. + * For running of MC-generated beam, set this to 1 + * For pulser-data overlay, set to 250 + * (250*2ns = 500ns empty time) + */ + private static int effectiveBunches = 1; + /** + * set buffer time to 0 + * used for pulser-data overlay MC readout. + * set to false for MC-generated beam + */ + private static boolean zeroBuffer = false; + /** + * Collection parameters for the dummy trigger bank object. + */ + private static LCIOCollection triggerBankParams = null; + + private static final String nl = String.format("%n"); + private static final Logger logger = Logger.getLogger(ReadoutDataManager.class.getSimpleName()); + + private static boolean debug=false; + + + @Override + public void startOfData() { + // Instantiate the readout LCIO file. + if(outputFileName == null) { + throw new IllegalArgumentException("Error: Output file name not defined!"); + } + try { outputWriter = new LCIOWriter(new File(outputFileName)); } + catch (IOException e) { + e.printStackTrace(); + throw new RuntimeException(); + } + + // Create a collection for the dummy trigger bank. + LCIOCollectionFactory.setCollectionName("TriggerBank"); + LCIOCollectionFactory.setFlags(0); + triggerBankParams = LCIOCollectionFactory.produceLCIOCollection(GenericObject.class); + + // Get the total amount of time that the readout system must + // wait to make sure that all data has been safely buffered + // and exists to read out. + double longestBufferBefore = 0.0; + double longestBufferAfter = 0.0; + double longestLocalBuffer = 0.0; + double longestTimeDisplacement = 0.0; + double longestDisplacedAfter = 0.0; + double longestTriggerDisplacement = 0.0; + + StringBuffer initializationBuffer = new StringBuffer(); + initializationBuffer.append("Getting longest trigger time displacement..." + nl); + for(Entry entry : triggerTimeDisplacementMap.entrySet()) { + initializationBuffer.append(String.format("\t%-30s :: %.0f%n", entry.getKey().getClass().getSimpleName(), entry.getValue().doubleValue())); + longestTriggerDisplacement = Math.max(longestTriggerDisplacement, entry.getValue().doubleValue()); + } + initializationBuffer.append("Longest is: " + longestTriggerDisplacement + nl + nl); + + initializationBuffer.append("Getting longest driver collection buffers..." + nl); + for(ManagedLCIOData data : collectionMap.values()) { + double before = Double.isNaN(data.getCollectionParameters().getWindowBefore()) ? 0.0 : data.getCollectionParameters().getWindowBefore(); + double after = Double.isNaN(data.getCollectionParameters().getWindowAfter()) ? 0.0 : data.getCollectionParameters().getWindowAfter(); + double displacement = data.getCollectionParameters().getProductionDriver().getTimeDisplacement(); + double local = data.getCollectionParameters().getProductionDriver().getTimeNeededForLocalOutput(); + + initializationBuffer.append("\t" + data.getCollectionParameters().getCollectionName() + nl); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Buffer Before", before)); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Buffer After", after)); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Local Buffer", local)); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Displacement", displacement)); + initializationBuffer.append(String.format("\t\t%-20s :: %.0f%n", "Displaced After", (displacement + after))); + + longestBufferBefore = Math.max(longestBufferBefore, before); + longestBufferAfter = Math.max(longestBufferAfter, after); + longestLocalBuffer = Math.max(longestLocalBuffer, local); + longestTimeDisplacement = Math.max(longestTimeDisplacement, displacement); + longestDisplacedAfter = Math.max(longestDisplacedAfter, displacement + after); + } + initializationBuffer.append("Longest (before) is: " + longestBufferBefore + nl); + initializationBuffer.append("Longest (after) is: " + longestBufferAfter + nl); + initializationBuffer.append("Longest (local) is: " + longestLocalBuffer + nl); + initializationBuffer.append("Longest (displacement) is: " + longestTimeDisplacement + nl); + initializationBuffer.append("Longest (displacemed after) is: " + longestDisplacedAfter + nl + nl); + + initializationBuffer.append("Readout Window: " + readoutWindow + nl); + initializationBuffer.append("Trigger Offset: " + triggerTimeDisplacement + nl); + initializationBuffer.append("Default Before: " + triggerTimeDisplacement + nl); + initializationBuffer.append("Default After : " + (readoutWindow - triggerTimeDisplacement) + nl + nl); + + triggerDelay = Math.max(longestTriggerDisplacement, longestDisplacedAfter); + triggerDelay = Math.max(triggerDelay, longestLocalBuffer); + double totalNeededDisplacement = triggerDelay + longestBufferBefore + 150; + + initializationBuffer.append("Total Time Needed: " + totalNeededDisplacement + nl); + logger.fine(nl + initializationBuffer.toString()); + + // Determine the total amount of time that must be included + // in the data buffer in order to safely write out all data. + // An extra 150 ns of data is retained as a safety, just in + // case some driver needs to look unusually far back. + bufferTotal = totalNeededDisplacement; + if(zeroBuffer) + bufferTotal = 0.0; + } + + @Override + public void endOfData() { + try { outputWriter.close(); } + catch(IOException e) { + e.printStackTrace(); + throw new RuntimeException(); + } + + System.out.println("Wrote " + triggers + " triggers."); + } + + @Override + public void process(EventHeader event) { + // Check the trigger queue. + if(!triggerQueue.isEmpty()) { + if(debug)System.out.println(this.getClass().getName()+" found a trigger @ "+triggerQueue.peek().getTriggerTime()+"; current time is "+ getCurrentTime()); + // Check the earliest possible trigger write time. + boolean isWritable = getCurrentTime() >= triggerQueue.peek().getTriggerTime() + bufferTotal; + if(debug && !isWritable) + System.out.println(this.getClass().getName()+":: can't write this trigger yet because "+getCurrentTime()+" < "+(triggerQueue.peek().getTriggerTime() + bufferTotal)); + + + // If all collections are available to be written, the + // event should be output. + if(isWritable) { + // Store the current trigger data. + TriggerTime trigger = triggerQueue.poll(); + + // 2016 MC only process one trigger, and no TS bank is stored + // 2019 MC can process multi-trigger, and TS bank is stored + List triggerList = new ArrayList(); + if(!trigger.getTriggerType().equals("noSet")) { + triggerList.add(trigger); + + // Iterate triggers in queue, remove next trigger if time of next trigger is the + // same as previous, until time of next trigger is not the same as previous or + // no next trigger + TriggerTime nextTrigger = null; + if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); + while((!triggerQueue.isEmpty()) && (nextTrigger.getTriggerTime() == trigger.getTriggerTime())) { + triggerList.add(nextTrigger); + triggerQueue.poll(); + if(!triggerQueue.isEmpty()) nextTrigger = triggerQueue.peek(); + } + } + triggers++; + double roughTimeOfEvent=getCurrentTime(); + if(effectiveBunches==1){ //we are doing spaced simulation + //342ns is the typical time to do readout/triggering...subtract this off to compare to spaced + if(debug) + System.out.println(getClass().getName()+":: subtracting 342ns to spaced event to get rough time"); + roughTimeOfEvent=roughTimeOfEvent-342.0; + } + if(debug) + System.out.println(getClass().getName()+":: found trigger number = "+triggers+" at current time = "+roughTimeOfEvent); + + // Make a new LCSim event. + int triggerEventNumber = event.getEventNumber() - ((int) Math.floor((getCurrentTime() - trigger.getTriggerTime()) / 2.0)); + EventHeader lcsimEvent = new BaseLCSimEvent(DatabaseConditionsManager.getInstance().getRun(), + triggerEventNumber, event.getDetectorName(), (long) 4 * (Math.round(trigger.getTriggerTime() / 4)), false); + + // 2016 MC only process one trigger, and no TS bank is stored + // 2019 MC can process multi-trigger, and TS bank is stored + if(!trigger.getTriggerType().equals("noSet")) { + List ts_list = new ArrayList(); + TSGenericObject tsBank = new TSGenericObject(); + int[] tsValues = new int[8]; + BitSet bits = new BitSet(32); + for(TriggerTime tri : triggerList) { + String triggerType = tri.getTriggerType(); + if(triggerType.equals(TriggerDriver.SINGLES0)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(0); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(4); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(0); + bits.set(4); + } + } + else if(triggerType.equals(TriggerDriver.SINGLES1)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(1); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(5); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(1); + bits.set(5); + } + } + else if(triggerType.equals(TriggerDriver.SINGLES2)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(2); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(6); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(2); + bits.set(6); + } + } + else if(triggerType.equals(TriggerDriver.SINGLES3)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(3); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(7); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(3); + bits.set(7); + } + } + else if(triggerType.equals(TriggerDriver.PAIR0)) bits.set(8); + else if(triggerType.equals(TriggerDriver.PAIR1)) bits.set(9); + else if(triggerType.equals(TriggerDriver.PAIR2)) bits.set(10); + else if(triggerType.equals(TriggerDriver.PAIR3)) bits.set(11); + else if(triggerType.equals(TriggerDriver.PULSER)) bits.set(15); + else if(triggerType.equals(TriggerDriver.FEE)) { + String topBot = tri.getTopBotStat(); + if(topBot.equals(TriggerDriver.TOP)) bits.set(18); + else if(topBot.equals(TriggerDriver.BOT)) bits.set(19); + else if(topBot.equals(TriggerDriver.TOPBOT)){ + bits.set(18); + bits.set(19); + } + } + } + + tsValues[0] = EvioEventConstants.TS_BANK_TAG; + + if(!bits.isEmpty()) { + tsValues[5] = (int)bits.toLongArray()[0]; + tsValues[6] = (int)bits.toLongArray()[0]; + } + else { + tsValues[5] = 0; + tsValues[6] = 0; + } + + // Filling the generic objects with the integer array + tsBank.setValues(tsValues); + + // Adding the generic object to the list + ts_list.add(tsBank); + lcsimEvent.put("TSBank", ts_list, TSGenericObject.class, 0); + } + + + // Calculate the readout window time range. This is + // used for any production driver that does not have + // a manually specified output range. + double startTime = trigger.getTriggerTime() - triggerTimeDisplacement; + double endTime = startTime + readoutWindow; + + logger.finer("Trigger Time: " + trigger.getTriggerTime()); + logger.finer("Default Time Range: " + startTime + " - " + endTime); + + // All readout output is initially stored in a single + // object. This allows the readout from multiple + // drivers to be merged, if needed, and also prevents + // duplicate instances of an object from being + // written. + Map> triggeredDataMap = new HashMap>(); + + // Write out the writable collections into the event. + for(ManagedLCIOData collectionData : collectionMap.values()) { + // Ignore any collections that are not set to be persisted. + if(!collectionData.getCollectionParameters().isPersistent()) { + continue; + } + + // Get the local start and end times. A driver + // may manually specify an amount of time before + // and after the trigger time which should be + // output. If this is the case, use it instead of + // the time found through use of the readout + // window/trigger time displacement calculation. + double localStartTime = startTime; + if(!Double.isNaN(collectionData.getCollectionParameters().getWindowBefore())) { + localStartTime = trigger.getTriggerTime() - collectionData.getCollectionParameters().getWindowBefore(); + } + + double localEndTime = endTime; + if(!Double.isNaN(collectionData.getCollectionParameters().getWindowAfter())) { + localEndTime = trigger.getTriggerTime() + collectionData.getCollectionParameters().getWindowAfter(); + } + + // Get the object data for the time range. + addDataToMap(collectionData.getCollectionParameters(), localStartTime, localEndTime, triggeredDataMap); + } + + // Write out any special on-trigger collections into + // the event as well. These are collated so that if + // more than one driver contributes to the same + // collection, they will be properly merged. + for(ReadoutDriver driver : driverSet) { + // Get the special collection(s) from the current + // driver, if it exists. + Collection> onTriggerData = driver.getOnTriggerData(trigger.getTriggerTime()); + + // If there are special collections, write them. + if(onTriggerData != null) { + for(TriggeredLCIOData triggerData : onTriggerData) { + addDataToMap(triggerData, triggerData.getCollectionParameters().getObjectType(), triggeredDataMap); + } + } + } + + // Create the dummy trigger bank data and store it. + TriggeredLCIOData triggerBankData = new TriggeredLCIOData(triggerBankParams); + triggerBankData.getData().add(new BaseTriggerData(new int[8])); + addDataToMap(triggerBankData, triggerBankData.getCollectionParameters().getObjectType(), triggeredDataMap); + + // Readout timestamps should be generated for both + // the "system" and the trigger. This corresponds to + // the simulation time at which the trigger occurred. + // Note that there is a "trigger delay" parameter in + // the old readout, but this does not exist in the + // new system, so both timestamps are the same. + + // Calculate the simulation trigger time. + double simTriggerTime = trigger.getTriggerTime() + triggerTimeDisplacementMap.get(trigger.getTriggeringDriver()).doubleValue(); + ReadoutTimestamp systemTimestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRIGGERBITS, simTriggerTime); + ReadoutTimestamp triggerTimestamp = new ReadoutTimestamp(ReadoutTimestamp.SYSTEM_TRIGGERTIME, simTriggerTime); + LCIOCollectionFactory.setCollectionName(ReadoutTimestamp.collectionName); + LCIOCollection timestampCollection = LCIOCollectionFactory.produceLCIOCollection(ReadoutTimestamp.class); + TriggeredLCIOData timestampData = new TriggeredLCIOData(timestampCollection); + timestampData.getData().add(systemTimestamp); + timestampData.getData().add(triggerTimestamp); + addDataToMap(timestampData, timestampData.getCollectionParameters().getObjectType(), triggeredDataMap); + + // Store all of the data collections. + for(TriggeredLCIOData triggerData : triggeredDataMap.values()) { + storeCollection(triggerData, lcsimEvent); + } + if(debug) + System.out.println(getClass().getName()+":: writing event!!!"); + + // Write the event to the output file. + try { outputWriter.write(lcsimEvent); } + catch(IOException e) { + e.printStackTrace(); + throw new RuntimeException(); + } + } + } + + // Remove all data from the buffer that occurs before the max + // buffer length cut-off. + for(ManagedLCIOData data : collectionMap.values()) { + while(!data.getData().isEmpty() && (data.getData().getFirst().getTime() < (getCurrentTime() - 500))) { + data.getData().removeFirst(); + } + } + if(debug){ + System.out.println("ReadoutDataManager:: end of event with current time = "+currentTime); + System.out.println("##################### END #################################"); + } + // Increment the current time. + currentTime += effectiveBunches*BEAM_BUNCH_SIZE; + } + + /** + * Adds a new set of data objects to the data manager at the time + * specified. + * @param collectionName - The collection name to which the data + * should be added. + * @param dataTime - The truth time at which the data objects + * occurred. This represents the time of the object, corrected + * for time displacement due to buffering on processing on the + * part of the production driver. + * @param data - The data to add. + * @param dataType - The class type of the data objects. + * @throws IllegalArgumentException Occurs if either the + * collection specified does not exist, or if the object type of + * the data objects does not match the object type of the data in + * the collection. + * @param - Specifies the class type of the data to be added + * to the collection. + */ + public static final void addData(String collectionName, double dataTime, Collection data, Class dataType) { + // Validate that the collection has been registered. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" has not been registered."); + } + + // Get the collection data object. + ManagedLCIOData collectionData = collectionMap.get(collectionName); + + // Validate that the data type is correct. + if(!collectionData.getCollectionParameters().getObjectType().isAssignableFrom(dataType)) { + throw new IllegalArgumentException("Error: Saw data type \"" + dataType.getSimpleName() + "\" but expected data type \"" + + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "\" instead."); + } + //mg debug + /* + if(debug && Double.isNaN(dataTime)){ + System.out.println("ReadoutDataDriver:: addData no time given. "+ dataType.getName()+": currentTime = "+currentTime+"; global displacement = "+(collectionData.getCollectionParameters().getGlobalTimeDisplacement())); + System.out.println("ReadoutDataDriver:: addData setting time to = "+(currentTime - collectionData.getCollectionParameters().getGlobalTimeDisplacement())); + } else { + System.out.println("ReadoutDataDriver:: addData time provided; setting time to = "+dataTime); + } + */ + // + + // If the data is empty, then there is no need to add it to + // the buffer. + if(!data.isEmpty()) { + // Add the new data to the data buffer. + double time = Double.isNaN(dataTime) ? currentTime - collectionData.getCollectionParameters().getGlobalTimeDisplacement() : dataTime; + LinkedList> dataBuffer = collectionData.getData(); + dataBuffer.add(new TimedList(time, data)); + } + } + + /** + * Adds a new set of data objects to the data manager at a time + * calculated based on the current simulation time corrected by + * the total time offset of the collection. + * @param collectionName - The collection name to which the data + * should be added. + * @param data - The data to add. + * @param dataType - The class type of the data objects. + * @throws IllegalArgumentException Occurs if either the + * collection specified does not exist, or if the object type of + * the data objects does not match the object type of the data in + * the collection. + * @param - Specifies the class type of the data to be added + * to the collection. + */ + public static final void addData(String collectionName, Collection data, Class dataType) { + addData(collectionName, Double.NaN, data, dataType); + } + + /** + * Checks whether or not a collection has been populated up to + * the indicated time. + * @param collectionName - The collection to check. + * @param time - The time at which the collection should be + * filled. + * @return Returns true if the collection has data + * generated up to at least the specified time, and + * false if it does not. + */ + public static final boolean checkCollectionStatus(String collectionName, double time) { + // Verify that the requested collection exists. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" is not a registered collection."); + } + + // Otherwise, check if enough time has passed for the driver + // which controls to the collection to have produced output + // for the requested time period. + return time <= getCurrentTime() - collectionMap.get(collectionName).getCollectionParameters().getGlobalTimeDisplacement(); + } + + /** + * Gets the length in nanoseconds of a single event (beam bunch). + * @return Returns the length in ns of a single beam bunch. + */ + public static final double getBeamBunchSize() { + return BEAM_BUNCH_SIZE; + } + + /** + * Gets the LCIO collection parameters for a collection. + * @param collectionName - The name of the collection. + * @param objectType - The data type of the collection. + * @return Returns the collection parameters. + */ + @SuppressWarnings("unchecked") + public static final LCIOCollection getCollectionParameters(String collectionName, Class objectType) { + // Verify that the requested collection actually exists. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + + // Get the collection and check that it is of the appropriate + // parameterized type. + LCIOCollection collection = collectionMap.get(collectionName).getCollectionParameters(); + if(collection.getObjectType() != objectType) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" is of type " + collection.getObjectType().getSimpleName() + + " while object type " + objectType.getSimpleName() + " was requested."); + } + + // Return the collection parameters. + // NOTE: This type case is safe, since it is verified above + // that the collection object is of the same class type + // as the parameterized type. + return (LCIOCollection) collection; + } + + /** + * Gets the current simulation time in nanoseconds. + * @return Returns the simulation time in nanoseconds. + */ + public static final double getCurrentTime() { + return currentTime; + } + + /** + * Gets a collection of data objects from a collection within the + * time range specified. + * @param startTime - The (inclusive) start of the time range. + * @param endTime The (exclusive) end of the time range. + * @param collectionName - The name of the collection. + * @param objectType - The class type of the data stored in the + * collection. + * @return Returns the data in the specified time range in the + * data collection in a {@link java.util.List List}. + * @param - Specifies the class type of the data stored in + * the collection. + */ + public static final Collection getData(double startTime, double endTime, String collectionName, Class objectType) { + return getDataList(startTime, endTime, collectionName, objectType); + } + + /** + * Gets the {@link org.lcsim.geometry.IDDecoder IDDecoder} that + * is used for the indicated managed collection, if it exists. + * @param collectionName - The collection to which the decoder + * should correspond. + * @return Returns the decoder for the collection, if it exists, + * and null otherwise. + */ + public static final IDDecoder getIDDecoder(String collectionName) { + // Verify that the requested collection actually exists. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + + // Get the collection and obtain the ID decoder, if possible. + // If it does not exist, then leave it as a value of null. + LCIOCollection collection = collectionMap.get(collectionName).getCollectionParameters(); + IDDecoder decoder = null; + try { decoder = collection.getProductionDriver().getIDDecoder(collectionName); } + catch(UnsupportedOperationException e) { } + + // Return the decoder. + return decoder; + } + + /** + * Gets the default size of the readout window. + * @return Returns the default size of the readout window in + * units of nanoseconds. + */ + public static final int getReadoutWindow() { + return readoutWindow; + } + + /** + * Gets the total amount of time by which a collection is + * displaced between the actual truth data's occurrence in the + * simulation, and the time at which the object is actually + * produced. This includes both the time displacement introduced + * by the collection's production driver as well as displacement + * introduced by any preceding drivers that serve as input for + * the production driver. + * @param collectionName - The name of the collection. + * @return Returns the total time displacement in nanoseconds. + */ + public static final double getTotalTimeDisplacement(String collectionName) { + if(collectionMap.containsKey(collectionName)) { + return collectionMap.get(collectionName).getCollectionParameters().getGlobalTimeDisplacement(); + } else { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + } + + /** + * Gets the time displacement between when a trigger occurs, and + * when the triggered data is actually written out. + * @return Returns the trigger delay in units of nanoseconds. + */ + public static final double getTriggerDelay() { + return bufferTotal; + } + + /** + * Gets the time by which the trigger is offset in the readout + * window. + * @return Returns the trigger offset in units of nanoseconds. + */ + public static final double getTriggerOffset() { + return triggerTimeDisplacement; + } + + /** + * Adds a managed collection to the data manager. All collections + * which serve as either input or output from a {@link + * org.hps.readout.ReadoutDriver ReadoutDriver} are required to + * be registered and managed by the data manager. On-trigger + * special collections should not be registered. + * @param params - An object describing the collection + * parameters. + * @param persistent - Sets whether this collection should be + * written out to the readout LCIO file. + * @param - Specifies the class type of the data stored by + * the collection. + */ + public static final void registerCollection(LCIOCollection params, boolean persistent) { + registerCollection(params, persistent, Double.NaN, Double.NaN); + } + + /** + * Adds a managed collection to the data manager. All collections + * which serve as either input or output from a {@link + * org.hps.readout.ReadoutDriver ReadoutDriver} are required to + * be registered and managed by the data manager. On-trigger + * special collections should not be registered. + * @param params - An object describing the collection + * parameters. + * @param persistent - Sets whether this collection should be + * written out to the readout LCIO file. + * @param readoutWindowBefore - Defines a custom period of time + * before the trigger time in which all objects will be output to + * the output LCIO file. + * @param readoutWindowAfter - Defines a custom period of time + * after the trigger time in which all objects will be output to + * the output LCIO file. + * @param - Specifies the class type of the data stored by + * the collection. + */ + public static final void registerCollection(LCIOCollection params, boolean persistent, double readoutWindowBefore, double readoutWindowAfter) { + // Make sure that all arguments are defined. + if(params.getCollectionName() == null) { + throw new IllegalArgumentException("Error: Collection name must be defined."); + } + if(params.getObjectType() == null) { + throw new IllegalArgumentException("Error: Collection object class must be defined."); + } + if(params.getProductionDriver() == null) { + throw new IllegalArgumentException("Error: Production driver must be defined."); + } + + // There should only be one collection for a given name. + if(collectionMap.containsKey(params.getCollectionName())) { + throw new IllegalArgumentException("Collection \"" + params.getCollectionName() + "\" of object type " + + params.getObjectType().getSimpleName() + " already exists."); + } + + // Create a collection data object. + double timeDisplacement = getTotalTimeDisplacement(params.getCollectionName(), params.getProductionDriver()); + LCIOCollectionFactory.setParams(params); + LCIOCollectionFactory.setGlobalTimeDisplacement(timeDisplacement); + LCIOCollectionFactory.setPersistent(persistent); + LCIOCollectionFactory.setWindowAfter(readoutWindowAfter); + LCIOCollectionFactory.setWindowBefore(readoutWindowBefore); + ManagedLCIOCollection managedParams = LCIOCollectionFactory.produceManagedLCIOCollection(params.getObjectType()); + ManagedLCIOData collectionData = new ManagedLCIOData(managedParams); + collectionMap.put(params.getCollectionName(), collectionData); + + // Store the readout driver in the driver set. + driverSet.add(params.getProductionDriver()); + + logger.config("Registered collection \"" + managedParams.getCollectionName() + "\" of class type " + + managedParams.getObjectType().getSimpleName() + "."); + + StringBuffer detailsBuffer = new StringBuffer(); + detailsBuffer.append("\tCollection Name :: " + params.getCollectionName()); + detailsBuffer.append("\tFlags :: " + Integer.toHexString(params.getFlags())); + detailsBuffer.append("\tObject Type :: " + params.getObjectType().getSimpleName()); + detailsBuffer.append("\tReadout Name :: " + params.getReadoutName()); + detailsBuffer.append("\tProduction Driver :: " + params.getProductionDriver().getClass().getSimpleName()); + logger.finer(nl + detailsBuffer.toString()); + } + + /** + * Registers a {@link org.hps.readout.ReadoutDriver + * ReadoutDriver} with the data manager. All readout drivers must + * be registered in order for their on-trigger special data to be + * added to the output event. + * @param productionDriver - The readout driver to register. + */ + public static final void registerReadoutDriver(ReadoutDriver productionDriver) { + // Trigger drivers are registered differently. + if(productionDriver instanceof TriggerDriver) { + logger.warning(nl + "Attempted to register TriggerDriver \"" + productionDriver.getClass().getSimpleName() + "\" as a readout driver." + + nl + " Trigger drivers are registered via the method \"registerTrigger(TriggerDriver)\"." + + nl + " Ignoring request."); + return; + } + + // Add the readout driver. + driverSet.add(productionDriver); + logger.config("Registered driver: " + productionDriver.getClass().getSimpleName()); + } + + /** + * Registers a trigger driver with the data manager. + * @param triggerDriver - The trigger driver to register. + */ + public static final void registerTrigger(TriggerDriver triggerDriver) { + // Get the total time displacement for the trigger driver. + double timeDisplacement = getTotalTimeDisplacement("", triggerDriver); + + // Store the time displacement in the trigger driver map. + triggerTimeDisplacementMap.put(triggerDriver, timeDisplacement); + logger.config("Registered trigger: " + triggerDriver.getClass().getSimpleName()); + } + + /** + * Changes the "readout name" parameter for a collection, while + * retaining all other parameters and stored data. + * @param collectionName - The name of the collection to modify. + * @param objectType - The object type of the collection. + * @param newReadoutName - The new name for the "readout name" + * parameter. + * @param - The object type of the data stored in the + * collection that is to be modified. + */ + public static final void updateCollectionReadoutName(String collectionName, Class objectType, String newReadoutName) { + // Get the collection. + if(!collectionMap.containsKey(collectionName)) { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + ManagedLCIOData oldData = collectionMap.get(collectionName); + + // Make a new managed LCIO collection with the new readout. + LCIOCollectionFactory.setParams(oldData.getCollectionParameters()); + LCIOCollectionFactory.setReadoutName(newReadoutName); + ManagedLCIOCollection newParams = LCIOCollectionFactory.produceManagedLCIOCollection(objectType); + + // Create a new managed LCIO data object and transfer all the + // data from the old object to it. + ManagedLCIOData newData = new ManagedLCIOData(newParams); + for(TimedList oldList : oldData.getData()) { + newData.getData().add(oldList); + } + + // Put the new data list into the map. + collectionMap.put(collectionName, newData); + } + + /** + * Indicates that the specified driver saw a trigger and readout + * should occur. + * @param driver - The triggering driver. + * @throws IllegalArgumentException Occurs if the argument + * triggering driver is not registered as a trigger driver with + * the data manager. + */ + static final void sendTrigger(TriggerDriver driver) { + // Check that the triggering driver is registered as a + // trigger driver. + if(!triggerTimeDisplacementMap.containsKey(driver)) { + throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); + } + + // Calculate the trigger and readout times. + double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); + + // Add the trigger to the trigger queue. + triggerQueue.add(new TriggerTime(triggerTime, driver)); + logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " + + driver.getClass().getSimpleName() + "."); + } + + /** + * Indicates that the specified driver saw a trigger and readout + * should occur. + * @param driver - The triggering driver. + * @param triggerType - trigger type. + * @throws IllegalArgumentException Occurs if the argument + * triggering driver is not registered as a trigger driver with + * the data manager. + */ + static final void sendTrigger(TriggerDriver driver, String triggerType) { + // Check that the triggering driver is registered as a + // trigger driver. + if(!triggerTimeDisplacementMap.containsKey(driver)) { + throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); + } + + // Calculate the trigger and readout times. + double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); + + // Add the trigger to the trigger queue. + triggerQueue.add(new TriggerTime(triggerTime, triggerType, driver)); + logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " + + driver.getClass().getSimpleName() + "."); + } + + /** + * Indicates that the specified driver saw a trigger and readout + * should occur. + * @param driver - The triggering driver. + * @param triggerType - trigger type. + * @throws IllegalArgumentException Occurs if the argument + * triggering driver is not registered as a trigger driver with + * the data manager. + */ + static final void sendTrigger(TriggerDriver driver, String triggerType, String topBot) { + // Check that the triggering driver is registered as a + // trigger driver. + if(!triggerTimeDisplacementMap.containsKey(driver)) { + throw new IllegalArgumentException("Error: Driver \"" + driver.getClass().getSimpleName() + "\" is not a registered trigger driver."); + } + + // Calculate the trigger and readout times. + double triggerTime = getCurrentTime() - triggerTimeDisplacementMap.get(driver); + + // Add the trigger to the trigger queue. + triggerQueue.add(new TriggerTime(triggerTime, triggerType, topBot, driver)); + logger.finer("Added trigger to queue with trigger time " + triggerTime + " and readout time " + (triggerTime + bufferTotal) + " from driver " + + driver.getClass().getSimpleName() + "."); + } + + /** + * Adds a data collection corresponding to a given parameter set + * to the data map. If there is already data existing under the + * same collection, it is then merged without duplicating any + * objects. + * @param params - The collection parameters for the data. + * @param readoutData - The data to add. + * @param triggeredDataMap - The data map into which the data + * collection should be added. + */ + @SuppressWarnings("unchecked") + private static final void addDataToMap(LCIOCollection params, Collection readoutData, Map> triggeredDataMap) { + // Check and see if an output collection already exists for + // this parameter set. If so, use it; otherwise, make a new + // entry for it. + TriggeredLCIOData untypedData = triggeredDataMap.get(params.getCollectionName()); + TriggeredLCIOData typedData = null; + if(untypedData == null) { + typedData = new TriggeredLCIOData(params); + triggeredDataMap.put(params.getCollectionName(), typedData); + } else { + // Verify that the collection parameters are the same. + if(untypedData.getCollectionParameters().equals(params)) { + // Note: This cast is safe; if the parameters objects + // are the same, then the object sets are necessarily + // of the same object type. + typedData = (TriggeredLCIOData) untypedData; + } else { + throw new RuntimeException("Error: Found multiple collections of name \"" + params.getCollectionName() + "\", but of differing definitions."); + } + } + + // Add the readout data to the collection data list. + typedData.getData().addAll(readoutData); + } + + /** + * Adds data stored in the collection defined by the parameters + * object within the given time range to the data map. If there + * is already data existing under the same collection, it is then + * merged without duplicating any objects. + * @param params - The parameters for the collection to add. + * @param startTime - The start of the time range within the data + * buffer from which data should be drawn. + * @param endTime - The end of the time range within the data + * buffer from which data should be drawn. + * @param triggeredDataMap - The data map into which the data + * collection should be added. + */ + private static final void addDataToMap(LCIOCollection params, double startTime, double endTime, Map> triggeredDataMap) { + // Get the readout data objects. + List triggerData = getDataList(startTime, endTime, params.getCollectionName(), params.getObjectType()); + + // Pass the readout data to the merging method. + addDataToMap(params, triggerData, triggeredDataMap); + } + + /** + * Adds data stored in a triggered collection object to the data + * map. If there is already data existing under the same + * collection, it is then merged without duplicating any objects. + * @param dataList - The collection data to be added. + * @param objectType - the object type of the collection data. + * @param triggeredDataMap - The data map into which the data + * collection should be added. + */ + private static final void addDataToMap(TriggeredLCIOData dataList, Class objectType, Map> triggeredDataMap) { + // Check that the parameters object is the same object type + // as is specified. + if(dataList.getCollectionParameters().getObjectType() != objectType) { + throw new IllegalArgumentException("Error: Can not process class type " + dataList.getCollectionParameters().getObjectType().getSimpleName() + + " as class type " + objectType.getSimpleName()); + } else { + // Note: This is safe - the above check requires that the + // object type be the parameterized type. + @SuppressWarnings("unchecked") + TriggeredLCIOData typedDataList = (TriggeredLCIOData) dataList; + Set triggerData = typedDataList.getData(); + addDataToMap(typedDataList.getCollectionParameters(), triggerData, triggeredDataMap); + } + } + + /** + * Gets a list of data objects from a collection within the time + * range specified. + * @param startTime - The (inclusive) start of the time range. + * @param endTime The (exclusive) end of the time range. + * @param collectionName - The name of the collection. + * @param objectType - The class type of the data stored in the + * collection. + * @return Returns the data in the specified time range in the + * data collection in a {@link java.util.List List}. + * @param - Specifies the class type of the data stored in + * the collection. + */ + private static final List getDataList(double startTime, double endTime, String collectionName, Class objectType) { + // Get the collection data. + ManagedLCIOData collectionData = collectionMap.get(collectionName); + + // Verify that the a collection of the indicated name exists + // and that it is the appropriate object type. + if(collectionData != null) { + if(!objectType.isAssignableFrom(collectionData.getCollectionParameters().getObjectType())) { + throw new IllegalArgumentException("Error: Expected object type " + objectType.getSimpleName() + " for collection \"" + collectionName + + ",\" but found object type " + collectionData.getCollectionParameters().getObjectType().getSimpleName() + "."); + } + } else { + throw new IllegalArgumentException("Error: Collection \"" + collectionName + "\" does not exist."); + } + + // Throw an alert if the earliest requested time precedes the + // earliest buffered time, and similarly for the latest time. + LinkedList> dataLists = collectionData.getData(); + //System.out.println("ReadoutDataManager::getDataList number in dataLists of objectType: "+objectType.getName()+" = "+dataLists.size()); + // Iterate through the data and collect all entries that have + // an associated truth time within the given time range. The + // lower bound is inclusive, the upper bound is exclusive. + List outputList = new ArrayList(); + for(TimedList dataList : dataLists) { + // if(debug) + // System.out.println("ReadoutDataManager::getDataList dataList found at time = "+dataList.getTime()+" looking in time window ["+startTime+"--"+endTime+"]"); + if(dataList.getTime() >= startTime && dataList.getTime() < endTime) { + // Add the items from the list to the output list. + for(Object o : dataList) { + //if(debug) + // System.out.println("ReadoutDataManager:: dataList of type "+o.getClass().getName()+" found in the time window ["+startTime+"--"+endTime+"]"); + if(objectType.isAssignableFrom(o.getClass())) { + outputList.add(objectType.cast(o)); + } else { + throw new ClassCastException("Error: Unexpected object of type " + o.getClass().getSimpleName() + " in collection \"" + + collectionName + ".\""); + } + } + } + } + + // Return the collected items. + return outputList; + } + + /** + * Calculates the total time displacement of a collection based + * on its production driver, and the time displacements of the + * input collections from which it is produced. This is processed + * recursively, so all time displacements in the production chain + * of a collection are accounted for. + * @param collectionName - The name of the collection. + * @param productionDriver - The driver which produces the + * collection. + * @return Returns the total time displacement for the collection + * in units of nanoseconds. + */ + private static final double getTotalTimeDisplacement(String collectionName, ReadoutDriver productionDriver) { + // Make sure that there are no circular dependencies. + validateDependencies(collectionName, productionDriver, new HashSet()); + + // The total time displacement is the displacement of the + // dependent collection with the largest displacement plus + // the local time displacement of the production driver. + double baseDisplacement = 0.0; + for(String dependency : productionDriver.getDependencies()) { + // All dependencies must already be registered. Check + // that it is. + double dependencyDisplacement = 0.0; + if(collectionMap.containsKey(dependency)) { + dependencyDisplacement = collectionMap.get(dependency).getCollectionParameters().getGlobalTimeDisplacement(); + } else { + throw new IllegalArgumentException("Error: Collection \"" + dependency + "\" has not been registered."); + } + + // Select the largest value. + baseDisplacement = Math.max(baseDisplacement, dependencyDisplacement); + } + + // Return the sum of the largest base displacement and the + // production driver. + return baseDisplacement + productionDriver.getTimeDisplacement(); + } + + /** + * Writes an entire {@link org.hps.readout.ReadoutDriver + * ReadoutDriver} on-trigger data collection to the specified + * output event. + * @param collectionData - The on-trigger readout data. + * @param event - The output event. + * @param - Specifies the class type of the data that is to be + * written to the output event. + */ + private static final void storeCollection(TriggeredLCIOData collectionData, EventHeader event) { + storeCollection(collectionData.getCollectionParameters().getCollectionName(), collectionData.getCollectionParameters().getObjectType(), + collectionData.getCollectionParameters().getFlags(), collectionData.getCollectionParameters().getReadoutName(), + collectionData.getData(), event); + } + + /** + * Writes the specified data to the output event. + * @param collectionName - The name of the output collection. + * @param objectType - The class of the output collection data + * objects. + * @param flags - Any LCIO flags which apply to the data. + * @param readoutName - The readout name for the data, if it is + * needed. null should be used if a readout name is + * not required. + * @param collectionData - A parameterized {@link + * java.util.Collection Collection} containing the data that is + * to be written. + * @param event - The event into which the data is to be written. + * @param - Specifies the class type of the data that is to be + * written to the output event. + */ + private static final void storeCollection(String collectionName, Class objectType, int flags, String readoutName, + Collection collectionData, EventHeader event) { + // The input collection must be a list. If it already is, + // just use it directly. Otherwise, copy the contents into an + // appropriately parameterized list. + List dataList; + if(collectionData instanceof List) { + dataList = (List) collectionData; + } else { + dataList = new ArrayList(collectionData.size()); + dataList.addAll(collectionData); + } + + // Place the data into the LCIO event. + if(readoutName == null) { + event.put(collectionName, dataList, objectType, flags); + } else { + event.put(collectionName, dataList, objectType, flags, readoutName); + } + + logger.finer(String.format("Output %d objects of type %s to collection \"%s\".", dataList.size(), objectType.getSimpleName(), collectionName)); + } + + /** + * Checks that the dependencies of a collection are valid. This + * consists of checking that any dependencies are registered with + * the data management driver and also that there are no circular + * dependencies present. + * @param collectionName - The name of the collection to check. + * @param productionDriver - The production driver of the + * collection to check. + * @param dependents - A set containing all of the collections + * which depend on this driver in the chain. Note that for the + * first call, this should be an empty set. + */ + private static final void validateDependencies(String collectionName, ReadoutDriver productionDriver, Set dependents) { + // Add the current driver to the list of dependents. + dependents.add(collectionName); + + // Check that none of the dependencies of the current driver + // are also dependencies of a driver higher in the chain. + for(String dependency : productionDriver.getDependencies()) { + // The dependency must be registered. + if(!collectionMap.containsKey(dependency)) { + throw new IllegalArgumentException("Error: Collection \"" + dependency + "\" has not been registered."); + } + + // Get the collection data for the dependency. + ManagedLCIOData collectionData = collectionMap.get(dependency); + + // Check that this dependency does not depend on the + // higher driver. + for(String dependent : dependents) { + if(collectionData.getCollectionParameters().getProductionDriver().getDependencies().contains(dependent)) { + throw new IllegalStateException("Error: Collection \"" + dependency + "\" depends on collection \"" + dependent + + ",\" but collection \"" + dependent + "\" also depends of collection \"" + dependency + ".\""); + } + } + + // If there are no detected circular dependencies, then + // perform the same check on the dependencies of this + // dependency. + Set dependencySet = new HashSet(); + dependencySet.addAll(dependents); + validateDependencies(dependency, collectionData.getCollectionParameters().getProductionDriver(), dependencySet); + } + } + + /** + * Adds the argument particle and all of its direct parents to + * the particle set. + * @param particle - The base particle. + * @param particleSet - The set that is to contain the full tree + * of particles. + */ + public static final void addParticleParents(MCParticle particle, Set particleSet) { + // Add the particle itself to the set. + particleSet.add(particle); + + // If the particle has parents, run the same method for each + // parent. + if(!particle.getParents().isEmpty()) { + for(MCParticle parent : particle.getParents()) { + addParticleParents(parent, particleSet); + } + } + } + + /** + * Sets the output file name for the triggered data file. + * @param filepath - The file path for the output file. + */ + public static final void setOutputFile(String filepath) { + outputFileName = filepath; + } + + /** + * Sets the default size of the readout window, in units of + * nanoseconds. Note that this can be overridden by specific + * drivers. + * @param nanoseconds - The length of the default readout window. + */ + public static final void setReadoutWindow(int nanoseconds) { + readoutWindow = nanoseconds; + } + /** + * sets the time passed between LCIO events. + * Used for running MC without putting bunches + * between "signal" events. + * For running of MC-generated beam, set this to 1 + * For pulser-data overlay, set to 250 + * (250*2ns = 500ns empty time) + */ + public static final void setEffectiveBunches(int value){ + effectiveBunches=value; + } + /** + * if true set buffer time to 0 + * used for pulser-data overlay MC readout. + * set to false for MC-generated beam + */ + public static final void setZeroBuffer(boolean zero){ + zeroBuffer=zero; + } + + public static final void setDebug(boolean value){ + debug=value; + } + +} diff --git a/record-util/src/main/java/org/hps/readout/ReadoutDriver.java b/record-util/src/main/java/org/hps/readout/ReadoutDriver.java index e781fc0df7..ff7fa6cba0 100755 --- a/record-util/src/main/java/org/hps/readout/ReadoutDriver.java +++ b/record-util/src/main/java/org/hps/readout/ReadoutDriver.java @@ -120,7 +120,13 @@ public abstract class ReadoutDriver extends Driver { * this collection data should be written. */ private double readoutWindowBefore = Double.NaN; - + /** + * Boolean to chose no-spacing readout mode. + * Should be false for MC generated beam background + */ + public boolean doNoSpacing = false; + + public boolean debug=false; /** * Instantiates the readout driver. */ @@ -272,4 +278,16 @@ public void setReadoutWindowAfter(double value) throws UnsupportedOperationExcep public void setReadoutWindowBefore(double value) throws UnsupportedOperationException { readoutWindowBefore = value; } + /** + * Sets do-no-spacing readout mode + * used for unspaced signal events + * @param value - true/false do no spacing + */ + public void setDoNoSpacing(boolean value) { + doNoSpacing = value; + } + + public void setDebug(boolean value){ + debug=value; + } } diff --git a/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2019TrigSinglesWithPulserDataMergingNoSpacing.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2019TrigSinglesWithPulserDataMergingNoSpacing.lcsim new file mode 100644 index 0000000000..af841a8ba0 --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2019TrigSinglesWithPulserDataMergingNoSpacing.lcsim @@ -0,0 +1,472 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + EcalHits + false + + + 8.0 + 32.0 + false + + + + MCParticle + + + 32.0 + 32.0 + false + + + + HodoscopeHits + false + + + 8.0 + 32.0 + false + + + + TrackerHits + + + 8.0 + 32.0 + false + + + + + EcalReadoutHits + + + 32.0 + 32.0 + false + + + + HodoReadoutHits + + + 32.0 + 32.0 + false + + + + + SVTRawTrackerHits + + + + + + SVTRawTrackerHits + + 32.0 + 32.0 + false + + + + + + EcalHits + PulserDataEcalReadoutHits + EcalRawHits + EcalReadoutHits + EcalTruthRelations + TriggerPathTruthRelations + + true + + + 1 + true + + true + false + + 55 + + + true + + + false + + + + + + EcalRawHits + EcalCorrectedHits + + true + + + 192. + true + false + false + + + + + + true + 48 + + true + 192. + true + false + + + + + HodoscopeHits + HodoscopePreprocessedHits + + true + + + + + + + HodoscopePreprocessedHits + PulserDataHodoReadoutHits + HodoscopeRawHits + HodoscopeReadoutHits + HodoscopeTruthRelations + HodoscopeTriggerPathTruthRelations + + true + + + 1 + false + true + + + 4 + + + 0.000833333 + + + false + + + false + false + 0.0001 + + + + + + HodoscopeRawHits + HodoscopeCorrectedHits + false + true + + + 1 + + 192. + true + false + false + + + + + + + + HodoscopeCorrectedHits + HodoscopePatterns + + true + + false + true + + + + + + EcalClustersGTP + + HodoscopePatterns + + true + + + 0 + true + + + + + + EcalClustersGTP + + HodoscopePatterns + + singles2 + + true + + + 0 + true + + + + + + + PulserDataSVTRawTrackerHits + false + true + false + true + + 20.0 + 24.0 + + + + + 250 + true + 200 + ${outputFile}.slcio + false + + + + + diff --git a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim new file mode 100644 index 0000000000..32c0ceda4b --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacing.lcsim @@ -0,0 +1,449 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + EcalHits + + + 8.0 + 32.0 + false + + + + MCParticle + + + 32.0 + 32.0 + false + + + + HodoscopeHits + + + 8.0 + 32.0 + false + + + + TrackerHits + + + 8.0 + 32.0 + false + + + + + EcalReadoutHits + + + 32.0 + 32.0 + false + + + + HodoReadoutHits + + + 32.0 + 32.0 + false + + + + + SVTRawTrackerHits + + + + + + SVTRawTrackerHits + + 32.0 + 32.0 + false + + + + + + EcalHits + PulserDataEcalReadoutHits + EcalRawHits + EcalReadoutHits + EcalTruthRelations + TriggerPathTruthRelations + + true + + + 1 + true + + + 48 + + + true + + + false + + + + + + EcalRawHits + EcalCorrectedHits + + true + + + false + + + + + + true + + + true + + + + + HodoscopeHits + HodoscopePreprocessedHits + + true + + + + + + + HodoscopePreprocessedHits + PulserDataHodoReadoutHits + HodoscopeRawHits + HodoscopeReadoutHits + HodoscopeTruthRelations + HodoscopeTriggerPathTruthRelations + + true + + + 1 + false + + + + 32 + + 0.000833333 + + + false + + + false + + + + + + HodoscopeRawHits + HodoscopeCorrectedHits + + true + + + 1 + + false + + + + + + HodoscopeCorrectedHits + HodoscopePatterns + + true + + false + + + + + EcalClustersGTP + + HodoscopePatterns + + true + + + 15 + + + + + EcalClustersGTP + + HodoscopePatterns + + singles2 + + true + + + 15 + + + + + + PulserDataSVTRawTrackerHits + false + true + 0.0 + true + + + + + 250 + true + 200 + ${outputFile}.slcio + + + + + + + + diff --git a/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim new file mode 100644 index 0000000000..5b4a7ebbbb --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/readout/TestNoSpacingModifyCurrent.lcsim @@ -0,0 +1,472 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + EcalHits + false + + + 8.0 + 32.0 + false + + + + MCParticle + + + 32.0 + 32.0 + false + + + + HodoscopeHits + false + + + 8.0 + 32.0 + false + + + + TrackerHits + + + 8.0 + 32.0 + false + + + + + EcalReadoutHits + + + 32.0 + 32.0 + false + + + + HodoReadoutHits + + + 32.0 + 32.0 + false + + + + + SVTRawTrackerHits + + + + + + SVTRawTrackerHits + + 32.0 + 32.0 + false + + + + + + EcalHits + PulserDataEcalReadoutHits + EcalRawHits + EcalReadoutHits + EcalTruthRelations + TriggerPathTruthRelations + + true + + + 1 + + false + true + true + + 55 + + + true + + + false + + + + + + EcalRawHits + EcalCorrectedHits + + true + + + 192. + true + false + false + + + + + + true + 48 + + true + 192. + true + false + + + + + HodoscopeHits + HodoscopePreprocessedHits + + true + + + + + + + HodoscopePreprocessedHits + PulserDataHodoReadoutHits + HodoscopeRawHits + HodoscopeReadoutHits + HodoscopeTruthRelations + HodoscopeTriggerPathTruthRelations + + true + + + 1 + false + true + + + 4 + + + 0.000833333 + + + false + + + false + false + 0.0001 + + + + + + HodoscopeRawHits + HodoscopeCorrectedHits + false + true + + + 1 + + 192. + true + false + false + + + + + + + + HodoscopeCorrectedHits + HodoscopePatterns + + true + + false + true + + + + + + EcalClustersGTP + + HodoscopePatterns + + true + + + 0 + true + + + + + + EcalClustersGTP + + HodoscopePatterns + + singles2 + + true + + + 0 + true + + + + + + + PulserDataSVTRawTrackerHits + false + true + false + + false + 20.0 + 24.0 + + + + + 250 + true + 200 + ${outputFile}.slcio + false + + + + + diff --git a/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim new file mode 100644 index 0000000000..a2b37866d3 --- /dev/null +++ b/steering-files/src/main/resources/org/hps/steering/recon/PhysicsRun2019MCRecon_KF_NoSpacing.lcsim @@ -0,0 +1,208 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1000 + + + + + + WARNING + EcalClusters + 0.030 + -5.0 + + + EcalClusters + EcalClustersCorr + + + + HodoscopeReadoutHits + CONFIG + true + + + true + HodoscopeReadoutHits + 8 + CONFIG + true + + + + + SVTRawTrackerHits + + + + .5 + 1 + Pileup + Migrad + + true + + + 0 + + true + + true + + false + + true + + false + + true + true + false + + + + 24.0 + 3.0 + false + 400 + 4.0 + 1.0 + 3.0 + 3.0 + true + true + false + + + + EcalClustersCorr + KalmanFullTracks + KalmanFullTracks + TrackClusterMatcherMinDistance + UnconstrainedV0Candidates_KF + UnconstrainedV0Vertices_KF + BeamspotConstrainedV0Candidates_KF + BeamspotConstrainedV0Vertices_KF + TargetConstrainedV0Candidates_KF + TargetConstrainedV0Vertices_KF + FinalStateParticles_KF + OtherElectrons_KF + 0 + 0.3 + 0 + 0.02 + 0.0 + 25 + 40 + false + 0.0 + 40.0 + 7.0 + 7.0 + false + false + true + true + true + false + true + UnconstrainedMollerCandidates_KF + UnconstrainedMollerVertices_KF + BeamspotConstrainedMollerCandidates_KF + BeamspotConstrainedMollerVertices_KF + TargetConstrainedMollerCandidates_KF + TargetConstrainedMollerVertices_KF + + + + + 3 + 1 + 8.757651 + 38.0487 + 3.98915 + 11.777395 + 0 + 3 + 3 + 39.95028 + 8.186345 + 13.71568 + 13.52662 + 7.00678 + 13.967129 + 9.771546584 + 1.7652935 + 5 + false + 466 + .725912 + + 0.0 + 0.02 + 0.0 + 0.05 + 0.0 + 1.0 + 7.204329 + false + + + KalmanFullTracks + true + false + + + + ${outputFile}.slcio + + + + + ${outputFile}.root + false + 0.0 + + KalmanFullTracks + 0.1 + 4.8 + 9999 + true + true + + + + + diff --git a/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java b/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java index 083c7186c0..9b454ac29a 100644 --- a/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java +++ b/tracking/src/main/java/org/hps/recon/tracking/kalman/KFOutputDriver.java @@ -81,6 +81,7 @@ public class KFOutputDriver extends Driver { private boolean b_doKFresiduals = true; private boolean b_doDetailPlots = false; private boolean b_doRawHitPlots = true; + private boolean b_doAllRawHitPlots = true; //The field map for extrapolation private FieldMap bFieldMap; @@ -272,6 +273,20 @@ public void process(EventHeader event) { } } + //plot all raw hits + if(b_doAllRawHitPlots){ + for (LCRelation fittedHit : _fittedHits) { + RawTrackerHit rth=FittedRawTrackerHit.getRawTrackerHit(fittedHit); + HpsSiSensor sensor = (HpsSiSensor) rth.getDetectorElement(); + double t0 = FittedRawTrackerHit.getT0(fittedHit); + double amplitude = FittedRawTrackerHit.getAmp(fittedHit); + double chi2Prob = ShapeFitParameters.getChiProb(FittedRawTrackerHit.getShapeFitParameters(fittedHit)); + aidaKF.histogram1D(hitFolder+"all_raw_hit_t0_"+sensor.getName()).fill(t0); + aidaKF.histogram1D(hitFolder+"all_raw_hit_amplitude_"+sensor.getName()).fill(amplitude); + aidaKF.histogram1D(hitFolder+"all_raw_hit_chisq_"+sensor.getName()).fill(chi2Prob); + } + } + int nTracks=tracks.size(); if(debug) System.out.println(this.getClass()+":: found "+nTracks + " tracks"); @@ -324,7 +339,7 @@ public void process(EventHeader event) { HpsSiSensor sensor = ((HpsSiSensor) ((RawTrackerHit) hit.getRawHits().get(0)).getDetectorElement()); if (sensor != null) { if(debug){ - System.out.println(this.getClass().getName()+":: inserting hit on sensor = "+sensor.getName()); + System.out.println(this.getClass().getName()+":: inserting hit on sensor = "+sensor.getName()); } sensorHits.put(sensor, hit); } @@ -697,6 +712,7 @@ private void doBasicKFtrack(Track trk, Map sensorHits) } } if (b_doDetailPlots) { + int ibins = 15; double start= -12; double end = -5; @@ -834,6 +850,7 @@ private void doKFresiduals(Track trk, Map sensorHits, E } int nres = (trackRes.getNInt()-1); + if(debug){ System.out.println(this.getClass().getName()+":: number entries in trackRes = "+nres); } @@ -843,6 +860,7 @@ private void doKFresiduals(Track trk, Map sensorHits, E // get the unbias for (int i_hit =0; i_hit < nres ; i_hit+=1) { if (trackRes.getIntVal(i_hit)!=-999) { + if(debug){ System.out.println(this.getClass().getName()+":: getting residual for ihit = "+i_hit+" trackResValue = "+ trackRes.getIntVal(i_hit)); } @@ -893,6 +911,7 @@ private void doKFresiduals(Track trk, Map sensorHits, E aidaKF.histogram2D(resFolder+"uresidual_KF_vs_u_hit_" + sensorName).fill(hitPosSensorG.x(),trackRes.getDoubleVal(i_hit)); aidaKF.histogram2D(resFolder+"uresidual_KF_vs_v_pred_" + sensorName).fill(extrapPosSensor.y(),trackRes.getDoubleVal(i_hit)); aidaKF.histogram1D(epullFolder+"ureserror_KF_" + sensorName).fill(trackRes.getFloatVal(i_hit)); + aidaKF.histogram1D(epullFolder+"ures_pull_KF_" + sensorName).fill(trackRes.getDoubleVal(i_hit) / Math.sqrt(trackRes.getFloatVal(i_hit))); //Get the hit time @@ -1002,7 +1021,9 @@ private void setupEoPPlots() { for (String charge : charges) { //put the trk-cluster time in trkpFolder - aidaKF.histogram1D(trkpFolder+"trk-cluTime"+charge+vol,100,-20,20); + aidaKF.histogram1D(trkpFolder+"trk-cluTime"+charge+vol,100,-75,75); + + aidaKF.histogram2D(eopFolder+"EoP_vs_trackP"+charge+vol+"_fid",200,0,6,200,0,2); aidaKF.histogram2D(eopFolder+"EoP_vs_tanLambda"+charge+vol+"_fid",200,0.01,0.08,200,0,2); @@ -1056,11 +1077,8 @@ private void setupPlots() { int mod_2dplot_bins = sensors.size()+mod*2; for (String vol : volumes) { - // aidaKF.histogram1D(resFolder+"bresidual_KF"+vol,nbins, -xmax, xmax); aidaKF.histogram1D(resFolder+"uresidual_KF"+vol,nbins, -xmax, xmax); - // aidaKF.histogram1D(resFolder+"bresidual_KF"+vol+"_L1L4",nbins,-xmax,xmax); aidaKF.histogram1D(resFolder+"uresidual_KF"+vol+"_L1L4",nbins,-xmax,xmax); - // aidaKF.histogram1D(resFolder+"bresidual_KF"+vol+"_L5L7",nbins,-xmax,xmax); aidaKF.histogram1D(resFolder+"uresidual_KF"+vol+"_L5L7",nbins,-xmax,xmax); } @@ -1068,14 +1086,12 @@ private void setupPlots() { //res/kinks TH2D //5 empty bins to distinguish between top and bottom - // aidaKF.histogram2D(resFolder+"bresidual_KF_mod",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5, nbins, -xmax,xmax); - // aidaKF.profile1D(resFolder+"bresidual_KF_mod_p",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5); aidaKF.histogram2D(resFolder+"uresidual_KF_mod",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5, 400, -0.4,0.4); aidaKF.profile1D(resFolder+"uresidual_KF_mod_p",mod_2dplot_bins,-0.5,mod_2dplot_bins-0.5); //Hits vs channel - /* + int nch = 400; aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL1b",nch,0,nch,nch,0,nch); aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL2b",nch,0,nch,nch,0,nch); @@ -1092,7 +1108,6 @@ private void setupPlots() { aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL5t",nch,0,nch,nch,0,nch); aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL6t",nch,0,nch,nch,0,nch); aidaKF.histogram2D(resFolder+"Axial_vs_Stereo_channel_moduleL7t",nch,0,nch,nch,0,nch); - */ for (SiSensor sensor : sensors) { @@ -1102,24 +1117,19 @@ private void setupPlots() { nbins = 250; int l = (sens.getLayerNumber() + 1) / 2; if (l > 1) xmax = 0.05 + (l - 1) * 0.08; - // aidaKF.histogram1D(resFolder+"residual_before_KF_" + sensor.getName(), nbins, -xmax, xmax); xmax = 0.250; if (l >= 6) xmax = 0.250; aidaKF.histogram1D(resFolder+"residual_after_KF_" + sensor.getName(), nbins, -xmax, xmax); - // aidaKF.histogram1D(resFolder+"bresidual_KF_" + sensor.getName(), nbins, -xmax, xmax); + aidaKF.histogram1D(resFolder+"uresidual_KF_" + sensor.getName(), nbins, -xmax, xmax); aidaKF.histogram2D(resFolder+"uresidual_KF_vs_u_hit_" + sensor.getName(),100,-20.0,20.0,100,-0.1,0.1); aidaKF.histogram2D(resFolder+"uresidual_KF_vs_v_pred_" + sensor.getName(),300,-60.0,60.0,100,-0.1,0.1); aidaKF.histogram2D(resFolder+"uresidual_KF_vs_dT_hit_" + sensor.getName(),100,-10.0,10.0,100,-0.1,0.1); - aidaKF.histogram2D(resFolder+"uresidual_KF_vs_dTs_hit_" + sensor.getName(),100,-5.0,5.0,100,-0.1,0.1); - - - // aidaKF.histogram1D(epullFolder+"breserror_KF_" + sensor.getName(), nbins, 0.0, 0.1); + aidaKF.histogram2D(resFolder+"uresidual_KF_vs_dTs_hit_" + sensor.getName(),100,-5.0,5.0,100,-0.1,0.1); aidaKF.histogram1D(epullFolder+"ureserror_KF_" + sensor.getName(), nbins, 0.0, 0.2); - // aidaKF.histogram1D(epullFolder+"bres_pull_KF_" + sensor.getName(), nbins, -5, 5); aidaKF.histogram1D(epullFolder+"ures_pull_KF_" + sensor.getName(), nbins, -5, 5); aidaKF.histogram2D(resFolder+"residual_after_KF_vs_u_hit_" + sensor.getName(), 100, -20.0, 20.0, 100, -0.04, 0.04); @@ -1133,6 +1143,9 @@ private void setupPlots() { aidaKF.histogram1D(hitFolder+"raw_hit_amplitude_"+sensor.getName(),200, 0.0, 4000.0); aidaKF.histogram1D(hitFolder+"raw_hit_chisq_"+sensor.getName(),200, 0.0, 2.0); + aidaKF.histogram1D(hitFolder+"all_raw_hit_t0_"+sensor.getName(),200, -100, 100.0); + aidaKF.histogram1D(hitFolder+"all_raw_hit_amplitude_"+sensor.getName(),200, 0.0, 4000.0); + aidaKF.histogram1D(hitFolder+"all_raw_hit_chisq_"+sensor.getName(),200, 0.0, 2.0); xmax = 0.0006; @@ -1182,7 +1195,7 @@ private void setupPlots() { aidaKF.histogram1D(trkpFolder+"z0"+vol+charge,nbins_t,-1.3,1.3); aidaKF.histogram1D(trkpFolder+"phi"+vol+charge,nbins_t,-0.06,0.06); aidaKF.histogram1D(trkpFolder+"tanLambda"+vol+charge,nbins_t,-0.2,0.2); - aidaKF.histogram1D(trkpFolder+"trkTime"+vol+charge,nbins_t,-20,20); + aidaKF.histogram1D(trkpFolder+"trkTime"+vol+charge,nbins_t,-75,75); aidaKF.histogram1D(trkpFolder+"trkTimeSD"+vol+charge,nbins_t,0,10); aidaKF.histogram1D(trkpFolder+"p"+vol+charge,nbins_p,0.,pmax); @@ -1194,7 +1207,7 @@ private void setupPlots() { aidaKF.histogram1D(trkpFolder+"p_slot"+vol+charge,nbins_p,0.,pmax); aidaKF.histogram1D(trkpFolder+"Chi2"+vol+charge,nbins_t*2,0,200); - aidaKF.histogram1D(trkpFolder+"Chi2oNDF"+vol+charge,nbins_t*2,0,50); + aidaKF.histogram1D(trkpFolder+"Chi2oNDF"+vol+charge,nbins_t*2,0,50); aidaKF.histogram1D(trkpFolder+"nHits"+vol+charge,15,0,15); aidaKF.histogram1D(trkpFolder+"trk_extr_or_x"+vol+charge,nbins_t,-3,3); aidaKF.histogram1D(trkpFolder+"trk_extr_or_y"+vol+charge,nbins_t,-3,3); @@ -1309,3 +1322,4 @@ private LCRelation getFittedHit(RawTrackerHit rawHit) { return fittedRawTrackerHitMap.get(rawHit); } } +