Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
9b0c279
add convenience method
baltzell Apr 22, 2026
4b1b28a
remove unused stuff
baltzell Apr 22, 2026
2c68f8b
add decoding engine
baltzell Apr 22, 2026
8d3d469
add it to the clara yaml
baltzell Apr 22, 2026
f87b747
use a pool
baltzell Apr 23, 2026
90581ae
hmm
baltzell Apr 23, 2026
a33c1bb
allow decoder instances to share ConstantsManagers
baltzell Apr 23, 2026
64de21e
share ConstantsManagers
baltzell Apr 23, 2026
1c6c280
kludge test
baltzell Apr 23, 2026
b94fbe1
cleanup
baltzell Apr 23, 2026
67d4b55
try this
baltzell Apr 23, 2026
588b65d
inherit ConstantsManagers
baltzell Apr 23, 2026
9b79dad
cleanup
baltzell Apr 23, 2026
283e1cf
only check tables if not shared
baltzell Apr 23, 2026
83095a4
higher ports on macos, cleanup process dpe process
baltzell Apr 23, 2026
6dc2216
remove ineffective pid trap
baltzell Apr 23, 2026
3ee7e84
Revert "higher ports on macos, cleanup process dpe process"
baltzell Apr 24, 2026
71b4055
Revert "fix job name"
baltzell Apr 24, 2026
8be927f
Revert "decouple ubuntu/macos builds to reduce wait"
baltzell Apr 24, 2026
11e6b6f
Revert "remove example engine"
baltzell Apr 24, 2026
412040a
restore reported data type
baltzell Apr 25, 2026
9f22d1f
rename class
baltzell Apr 27, 2026
eabd5f0
undo
baltzell Apr 27, 2026
4c54cd6
fix rebase oops
baltzell Apr 27, 2026
1fab062
rename class
baltzell Apr 27, 2026
8b1dc56
fix rebase oops
baltzell Apr 27, 2026
842bf16
stf
baltzell Apr 27, 2026
90a186f
just events, not per thread
baltzell Apr 28, 2026
fcdd018
reduce constants sharing
baltzell Apr 28, 2026
c87118a
bugfix
baltzell Apr 28, 2026
a566963
share one RCDBManager
baltzell Apr 28, 2026
855a4da
bugfix
baltzell Apr 28, 2026
f9bbfda
restore
baltzell May 9, 2026
6373bbc
DecoderEngine: just pass along HIPO events
baltzell May 11, 2026
b4fdd59
cleanup, avoid class variable
baltzell May 12, 2026
e613971
remove debugging leftover
baltzell May 12, 2026
396cf92
revert to hard-coded byte order
baltzell May 13, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public int readEventCount() throws EventReaderException {

@Override
public ByteOrder readByteOrder() throws EventReaderException {
return reader.getFileByteOrder();
return ByteOrder.LITTLE_ENDIAN; //reader.getFileByteOrder();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@

import org.jlab.io.base.DataEvent;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.io.hipo.HipoDataSync;

import org.jlab.jnp.hipo4.data.Bank;
import org.jlab.jnp.hipo4.data.Event;
Expand All @@ -38,27 +36,28 @@ public class CLASDecoder {
protected SchemaFactory schemaFactory = new SchemaFactory();
private CodaEventDecoder codaDecoder = null;
private List<DetectorDataDgtz> dataList = new ArrayList<>();
private HipoDataSync writer = null;
private HipoDataEvent hipoEvent = null;
private boolean isRunNumberFixed = false;
private int decoderDebugMode = 0;
private ModeAHDC ahdcExtractor = new ModeAHDC();
private RCDBManager rcdbManager = new RCDBManager();
private static RCDBManager rcdbManager = new RCDBManager();

public CLASDecoder(boolean development){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(development);
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder();
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(CLASDecoder d) {
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(d.detectorDecoder);
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}
Expand Down Expand Up @@ -792,4 +791,8 @@ public Event getDecodedEvent(EvioDataEvent rawEvent, int run, int counter, Doubl

return decodedEvent;
}

public Event getDecodedEvent(EvioDataEvent rawEvent) {
return getDecodedEvent(rawEvent, -1, -1, null, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,17 @@ public DetectorEventDecoder(boolean development){
}
}

public DetectorEventDecoder(){
this.initDecoder();
}

public DetectorEventDecoder(DetectorEventDecoder d) {
translationManager = d.translationManager;
fitterManager = d.fitterManager;
scalerManager = d.scalerManager;
initDecoder(false);
}

public void setTimestamp(String timestamp) {
translationManager.setTimeStamp(timestamp);
fitterManager.setTimeStamp(timestamp);
Expand Down Expand Up @@ -80,10 +91,6 @@ public float getRcdbSolenoidScale() {
getValue()).floatValue();
}

public DetectorEventDecoder(){
this.initDecoder();
}

public final void initDecoderDev(){
keysTrans = Arrays.asList(new DetectorType[]{ DetectorType.HTCC,DetectorType.BST,DetectorType.RTPC} );
tablesTrans = Arrays.asList(new String[]{ "/daq/tt/clasdev/htcc","/daq/tt/clasdev/svt","/daq/tt/clasdev/rtpc" });
Expand All @@ -95,7 +102,11 @@ public final void initDecoderDev(){
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
}

public final void initDecoder(){
public final void initDecoder() {
initDecoder(true);
}

public final void initDecoder(boolean initializeManagers){

// Detector translation table
keysTrans = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.LTCC,DetectorType.ECAL,DetectorType.FTOF,
Expand All @@ -109,7 +120,6 @@ public final void initDecoder(){
"/daq/tt/rf","/daq/tt/bmt","/daq/tt/fmt","/daq/tt/rich2","/daq/tt/hel","/daq/tt/band","/daq/tt/rtpc",
"/daq/tt/raster","/daq/tt/atof","/daq/tt/ahdc"
});
translationManager.init(tablesTrans);

// ADC waveform fitter translation table
keysFitter = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.FTOF,DetectorType.LTCC,
Expand All @@ -122,17 +132,20 @@ public final void initDecoder(){
"/daq/config/fmt","/daq/fadc/hel","/daq/fadc/rf","/daq/fadc/band","/daq/fadc/raster",
"/daq/config/ahdc"
});
fitterManager.init(tablesFitter);

// Data filter list
keysFilter = Arrays.asList(new DetectorType[]{DetectorType.DC});

scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));


keysMicromega = Arrays.asList(new DetectorType[]{DetectorType.BMT,DetectorType.FMT,DetectorType.FTTRK});

checkTables();
if (initializeManagers) {
translationManager.init(tablesTrans);
fitterManager.init(tablesFitter);
scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
checkTables();
}

}

public void checkTables() {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
package org.jlab.clas.reco;

import java.util.Set;
import java.util.HashSet;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.jlab.clara.base.ClaraUtil;
import org.jlab.clara.engine.Engine;
import org.jlab.clara.engine.EngineData;
import org.jlab.clara.engine.EngineDataType;
import org.jlab.clara.engine.EngineStatus;
import org.jlab.detector.decode.CLASDecoder;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.jnp.hipo4.data.SchemaFactory;
import org.json.JSONObject;

/**
*
* @author baltzell
*/
public class DecoderEngine implements Engine {

static final int POOL_SIZE = 64;
static final Set<EngineDataType> ED_TYPES = ClaraUtil.buildDataTypes(
Clas12Types.EVIO,Clas12Types.HIPO,EngineDataType.JSON,EngineDataType.STRING);

SchemaFactory schema;
BlockingQueue<CLASDecoder> pool;
int constantsShared = 16;

public DecoderEngine() {
schema = new SchemaFactory();
schema.initFromDirectory(System.getenv("CLAS12DIR") + "/etc/bankdefs/hipo4");
Comment thread
baltzell marked this conversation as resolved.
}

@Override
public Set<EngineDataType> getInputDataTypes() { return ED_TYPES; }
@Override
public Set<EngineDataType> getOutputDataTypes() { return ED_TYPES; }
@Override
public EngineData executeGroup(Set<EngineData> set) { return null; }
@Override
public Set<String> getStates() { return new HashSet<>(); }
@Override
public String getDescription() { return "decoder engine"; }
@Override
public String getVersion() { return "1.0"; }
@Override
public String getAuthor() { return "baltzell"; }
@Override
public void reset() {}
@Override
public void destroy() {}

@Override
public EngineData configure(EngineData ed) {
JSONObject json = new JSONObject(ed.getData());
pool = new ArrayBlockingQueue<>(POOL_SIZE);
CLASDecoder d0 = null;
for (int i=0; i<POOL_SIZE; i++) {
CLASDecoder d;
if (i % constantsShared == 0) {
d0 = new CLASDecoder();
if (json.has("variation")) d0.setVariation(json.getString("variation"));
if (json.has("timestamp")) d0.setVariation(json.getString("timestamp"));
d = d0;
}
else {
d = new CLASDecoder(d0);
}
pool.add(d);
}
return ed;
}

@Override
public EngineData execute(EngineData input) {

EngineData output = input;

// if it's EVIO, decode it, otherwise just pass it along
if (input.getMimeType().equals("binary/data-evio")) {
EvioDataEvent evio;
try {
ByteBuffer bb = (ByteBuffer) input.getData();
//evio = new EvioDataEvent(bb.array(), bb.order());
evio = new EvioDataEvent(bb.array(), ByteOrder.LITTLE_ENDIAN);
} catch (Exception e) {
String msg = String.format("Error reading input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
HipoDataEvent hipo;
try {
CLASDecoder d = pool.take();
hipo = new HipoDataEvent(d.getDecodedEvent(evio),schema);
pool.put(d);
output.setData("binary/data-hipo", hipo.getHipoEvent());
} catch (Exception e) {
String msg = String.format("Error processing input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
}

return output;
}
}
6 changes: 4 additions & 2 deletions etc/services/rgd-clarode.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,14 @@ configuration:
outputBankPrefix: "HB"
io-services:
reader:
class: org.jlab.io.clara.DecoderReader
name: DecoderReader
class: org.jlab.io.clara.EvioToEvioReader
name: EvioReader
writer:
class: org.jlab.io.clara.DecoderWriter
name: DecoderWriter
services:
- class: org.jlab.clas.reco.DecoderEngine
name: DECO
- class: org.jlab.service.ai.DCDenoiseEngine
name: DCDN
- class: org.jlab.clas.swimtools.MagFieldsEngine
Expand Down