import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.awt.image.Raster;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import javax.imageio.ImageIO;
import javax.xml.soap.Text;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
public class blur {
public static class BlurMapper extends MapReduceBase implements Mapper<Text, BytesWritable, LongWritable, BytesWritable>
{
OutputCollector<LongWritable, BytesWritable> goutput;
int IMAGE_HEIGHT = 240;
int IMAGE_WIDTH = 320;
public BytesWritable Gmiu;
public BytesWritable Gsigma;
public BytesWritable w;
byte[] bytes = new byte[IMAGE_HEIGHT*IMAGE_WIDTH*3];
public BytesWritable emit = new BytesWritable(bytes);
int count = 0;
int initVar = 125;
public LongWritable l = new LongWritable(1);
public void map(Text key, BytesWritable file,OutputCollector<LongWritable, BytesWritable> output, Reporter reporter) throws IOException {
//Read Current Image from File.
goutput = output;
//System.out.println("akhil langer");
BufferedImage img = ImageIO.read(new ByteArrayInputStream(file.getBytes()));
// BufferedImage dest = null;
//Apply Blur on Filter Operation - External JAR
// BoxBlurFilter BlurOp = new BoxBlurFilter(10,10,2);
Raster ras=img.getData();
DataBufferByte db= (DataBufferByte)ras.getDataBuffer();
byte[] data = db.getData();
byte[] byte1 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH];
byte[] byte2 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH];
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
byte1[i]=20;
byte2[i]=125;
}
byte [] oldmiu;
oldmiu = new byte[IMAGE_HEIGHT*IMAGE_WIDTH] ;
byte [] oldsigma;
oldsigma = new byte[IMAGE_HEIGHT*IMAGE_WIDTH] ;
if(count==0){
Gmiu = new BytesWritable(data);
Gsigma = new BytesWritable(byte1);
w = new BytesWritable(byte2);
count++;
oldmiu= Gmiu.getBytes();
oldsigma= Gmiu.getBytes();
}
else{
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
byte pixel = data[i];
Double tempmiu=new Double(0.0);
Double tempsig=new Double(0.0);
Double weight = new Double(0.0);
double temp1=0; double alpha = 0.05;
tempmiu = (1-alpha)*oldmiu[i] + alpha*pixel;
temp1=temp1+(pixel-oldmiu[i])*(pixel-oldmiu[i]);
tempsig=(1-alpha)*oldsigma[i]+ alpha*temp1;
byte1[i] = tempmiu.byteValue();
byte2[i]= tempsig.byteValue();
Gmiu.set(byte1,i,1);
Gsigma.set(byte2,i,1);
byte1 = w.getBytes();
Double w1=new Double((1-alpha)*byte1[i]+alpha*100);
byte2[i] = w1.byteValue();
w.set(byte2,i,1);
}
}
byte1 = Gsigma.getBytes();
emit.set(byte1,0,IMAGE_HEIGHT*IMAGE_WIDTH);
byte1 = Gsigma.getBytes();
emit.set(byte1,IMAGE_HEIGHT*IMAGE_WIDTH,IMAGE_HEIGHT*IMAGE_WIDTH);
byte1 = w.getBytes();
emit.set(byte1,2*IMAGE_HEIGHT*IMAGE_WIDTH,IMAGE_HEIGHT*IMAGE_WIDTH);
}
@Override
public void close(){
try{
goutput.collect(l, emit);
}
catch(Exception e){
e.printStackTrace();
System.exit(-1);
}
}
}
public static void main(String[] args) {
if(args.length!=2) {
System.err.println("Usage: blurvideo input output");
System.exit(-1);
}
JobClient client = new JobClient();
JobConf conf = new JobConf(blur.class);
conf.setOutputValueClass(BytesWritable.class);
conf.setInputFormat(SequenceFileInputFormat.class);
//conf.setNumMapTasks(n)
SequenceFileInputFormat.addInputPath(conf, new Path(args[0]));
SequenceFileOutputFormat.setOutputPath(conf, new Path(args[1]));
conf.setMapperClass(BlurMapper.class);
conf.setNumReduceTasks(0);
//conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
client.setConf(conf);
try {
JobClient.runJob(conf);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Error:
java.lang.NullPointerException at blur$BlurMapper.close(blur.java:99) at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:57) at org.apache.hadoop.mapred.MapTask.run(MapTask.java:342) at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:138)
Please reply!