Skip to content

Commit a73d8c2

Browse files
committed
optimise kafka log
1 parent ef9e313 commit a73d8c2

File tree

1 file changed

+64
-56
lines changed

1 file changed

+64
-56
lines changed

src/net/preibisch/fusiontask/task/MainJob.java

Lines changed: 64 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import net.imglib2.util.Util;
2020
import net.preibisch.distribution.algorithm.blockmanager.block.BasicBlockInfo;
2121
import net.preibisch.distribution.algorithm.clustering.kafka.KafkaManager;
22+
import net.preibisch.distribution.algorithm.clustering.kafka.KafkaProperties;
2223
import net.preibisch.distribution.algorithm.clustering.scripting.TaskType;
2324
import net.preibisch.distribution.algorithm.controllers.items.BlocksMetaData;
2425
import net.preibisch.distribution.io.img.XMLFile;
@@ -51,61 +52,64 @@ public MainJob() {
5152
@Override
5253
public Void call() throws Exception {
5354
try {
54-
55-
try{id = id -1;
56-
}catch(Exception e) {
57-
KafkaManager.error(-1,e.toString());
55+
56+
try {
57+
id = id - 1;
58+
} catch (Exception e) {
59+
KafkaManager.error(-1, e.toString());
5860
System.out.println("Error id");
5961
throw new Exception("Specify id!");
6062
}
61-
TaskType type = TaskType.of(task);
62-
switch (type) {
63-
case PREPARE:
64-
generateN5(input, metadataPath, output, id);
65-
return null;
66-
case PROCESS:
67-
blockTask(input, metadataPath, output, id);
68-
return null;
69-
70-
default:
71-
KafkaManager.error(id,"Specify task");
72-
System.out.println("Error");
73-
throw new Exception("Specify task!");
74-
}
75-
} catch(Exception e) {
76-
KafkaManager.error(id,e.toString());
63+
TaskType type = TaskType.of(task);
64+
switch (type) {
65+
case PREPARE:
66+
generateN5(input, metadataPath, output, id);
67+
return null;
68+
case PROCESS:
69+
blockTask(input, metadataPath, output, id);
70+
return null;
71+
72+
default:
73+
KafkaManager.error(id, "Specify task");
74+
System.out.println("Error");
75+
throw new Exception("Specify task!");
76+
}
77+
} catch (Exception e) {
78+
KafkaManager.error(id, e.toString());
7779
System.out.println("Error");
7880
throw new Exception("Specify task!");
7981
}
80-
// MyLogger.log.info("Block " + id + " saved !");
82+
// MyLogger.log.info("Block " + id + " saved !");
8183
}
82-
84+
8385
public static void blockTask(String inputPath, String metadataPath, String outputPath, int id) {
8486
try {
8587
KafkaManager.log(id, "Start process");
86-
// XMLFile inputFile = XMLFile.XMLFile(inputPath);
88+
// XMLFile inputFile = XMLFile.XMLFile(inputPath);
8789
BlocksMetaData md = BlocksMetaData.fromJson(metadataPath);
88-
KafkaManager.log(id,"Got metadata !");
90+
String jobId = md.getJobId();
91+
KafkaProperties.setJobId(jobId);
92+
KafkaManager.log(id, "Got metadata !");
8993
BasicBlockInfo binfo = md.getBlocksInfo().get(id);
90-
KafkaManager.log(id,"Got block info !");
94+
KafkaManager.log(id, "Got block info !");
9195
BoundingBox bb = new BoundingBox(Util.long2int(binfo.getMin()), Util.long2int(binfo.getMax()));
92-
KafkaManager.log(id,"Bounding box created: "+bb.toString());
93-
List<ViewId> viewIds = md.getViewIds() ;
94-
KafkaManager.log(id,"Got view ids ");
96+
KafkaManager.log(id, "Bounding box created: " + bb.toString());
97+
List<ViewId> viewIds = md.getViewIds();
98+
KafkaManager.log(id, "Got view ids ");
9599

96-
XMLFile inputFile = XMLFile.XMLFile(inputPath, bb, md.getDownsample() , viewIds);
100+
XMLFile inputFile = XMLFile.XMLFile(inputPath, bb, md.getDownsample(), viewIds);
97101

98-
KafkaManager.log(id,"Input loaded. ");
99-
// XMLFile inputFile = XMLFile.XMLFile(inputPath);
102+
KafkaManager.log(id, "Input loaded. ");
103+
// XMLFile inputFile = XMLFile.XMLFile(inputPath);
100104
RandomAccessibleInterval<FloatType> block = inputFile.fuse(bb);
101105

102-
KafkaManager.log(id,"Got block. ");
106+
KafkaManager.log(id, "Got block. ");
103107
N5File outputFile = N5File.open(outputPath);
104108
outputFile.saveBlock(block, binfo.getGridOffset());
105-
KafkaManager.log(id,"Task finished "+id);
106-
KafkaManager.done(id,"Task finished "+id);
109+
KafkaManager.log(id, "Task finished " + id);
110+
KafkaManager.done(id, "Task finished " + id);
107111
} catch (SpimDataException | IOException e) {
108-
KafkaManager.error(id,e.toString());
112+
KafkaManager.error(id, e.toString());
109113
e.printStackTrace();
110114
}
111115
}
@@ -116,7 +120,7 @@ public static void generateN5(String inputPath, String metadataPath, String outp
116120
BlocksMetaData md = BlocksMetaData.fromJson(metadataPath);
117121
long[] dims = md.getDimensions();
118122
int blockUnit = md.getBlockUnit();
119-
N5File outputFile = new N5File(outputPath, dims,blockUnit );
123+
N5File outputFile = new N5File(outputPath, dims, blockUnit);
120124
outputFile.create();
121125
KafkaManager.log(id, "N5 Generated");
122126
KafkaManager.done(id, "N5 Generated");
@@ -126,30 +130,34 @@ public static void generateN5(String inputPath, String metadataPath, String outp
126130
e.printStackTrace();
127131
}
128132
}
133+
129134
public static void generateN5fromXML(String inputPath, String metadataPath, String outputPath, int id) {
130-
try {
131-
System.out.println("Start generating output");
132-
XMLFile inputFile = XMLFile.XMLFile(inputPath);
133-
RandomAccessibleInterval<FloatType> virtual = inputFile.fuse();
134-
String dataset = "/volumes/raw";
135-
N5Writer writer = new N5FSWriter(outputPath);
136-
BlocksMetaData md = BlocksMetaData.fromJson(metadataPath);
137-
// long[] dims = md.getDimensions();
138-
int blockUnit = md.getBlockUnit();
139-
int[] blocks = Tools.array(blockUnit, virtual.numDimensions());
140-
141-
N5Utils.save(virtual, writer, dataset, blocks, new RawCompression());
142-
System.out.println("Ouptut generated");
143-
} catch (SpimDataException | IOException e1) {
144-
// TODO Auto-generated catch block
145-
e1.printStackTrace();
146-
}
135+
try {
136+
System.out.println("Start generating output");
137+
XMLFile inputFile = XMLFile.XMLFile(inputPath);
138+
RandomAccessibleInterval<FloatType> virtual = inputFile.fuse();
139+
String dataset = "/volumes/raw";
140+
N5Writer writer = new N5FSWriter(outputPath);
141+
BlocksMetaData md = BlocksMetaData.fromJson(metadataPath);
142+
// long[] dims = md.getDimensions();
143+
int blockUnit = md.getBlockUnit();
144+
int[] blocks = Tools.array(blockUnit, virtual.numDimensions());
145+
146+
N5Utils.save(virtual, writer, dataset, blocks, new RawCompression());
147+
System.out.println("Ouptut generated");
148+
} catch (SpimDataException | IOException e1) {
149+
// TODO Auto-generated catch block
150+
e1.printStackTrace();
151+
}
147152
}
148153

149154
public static void main(String[] args) {
150-
// new ImageJ();
151-
// String str = "-t proc -i /Users/Marwan/Desktop/Task/grid-3d-stitched-h5/dataset.xml -o /Users/Marwan/Desktop/Task/output.n5 -m /Users/Marwan/Desktop/Task/metadata.json -id 1";
152-
// System.out.println(String.join(" ", args));
155+
// new ImageJ();
156+
// String str = "-t proc -i
157+
// /Users/Marwan/Desktop/Task/grid-3d-stitched-h5/dataset.xml -o
158+
// /Users/Marwan/Desktop/Task/output.n5 -m
159+
// /Users/Marwan/Desktop/Task/metadata.json -id 1";
160+
// System.out.println(String.join(" ", args));
153161
CommandLine.call(new MainJob(), args);
154162
}
155163
}

0 commit comments

Comments
 (0)