Java Code Examples for org.apache.hadoop.io.DataOutputByteBuffer

Following code examples demonstrate how to use org.apache.hadoop.io.DataOutputByteBuffer from hadoop. These examples are extracted from various highly rated open source projects. You can directly use these code snippets or view their entire linked source code. These snippets are extracted to provide contextual information about how to use this class in the real world. These samples also let you understand some good practices on how to use org.apache.hadoop.io.DataOutputByteBuffer and various code implementation of this class.
Example 1
Project : hadoop Source File : SimulatorSettingsWritableTest.java View Source Code on GitHub

	public void testHadoopSimulatorSettings() throws IOException, BadAlgorithmException {
		final SimulatorSettingsGeneticListImpl list = TestGeneticSimulatorSettings.getGeneticList();

		for (int i = 0; i < 100; ++i) {
			final DataOutputByteBuffer output = new DataOutputByteBuffer();
			final DataInputByteBuffer input = new DataInputByteBuffer();

			final Execution ss = list.generateRandom();
			final SimulatorSettingsWritable hss = new SimulatorSettingsWritable(ss);

			hss.write(output);
			input.reset(output.getData());

			final SimulatorSettingsWritable hssCopy = new SimulatorSettingsWritable();
			hssCopy.readFields(input);

			final Execution settingsCopy = hssCopy.getSimulatorSettings(list.getStockStorage());
			Assert.assertEquals(ss.stringHashCode(), settingsCopy.stringHashCode());
		}
	}
			
Example 2
Project : hadoop Source File : TestBytesBitmap.java View Source Code on GitHub

  public static void main(String[] args) throws Exception {
     RoaringBitmap bitmap = new RoaringBitmap();
    
     DataOutputByteBuffer bos = new DataOutputByteBuffer(128);
     
     
     Random ran = new Random();
     
    for(int i = 0; i < 100 ; i ++) {
      int v = Math.abs(ran.nextInt()) % 100;
      bitmap.add(v);
      System.out.println(v + ": " + bitmap.rank(v) );
    }
    
    System.out.println();
    bitmap.serialize(bos);
    
    byte[] data = Bytes.toBytes(bos);
    
    ByteBuffer buf = ByteBuffer.wrap(data, 0, data.length);
    DataInputByteBuffer bis = new DataInputByteBuffer();
    bis.reset(buf);
    bitmap.deserialize(bis);
    
    System.out.println("-------");
    for(int i = 0; i < bitmap.getCardinality() ; i ++) {
      System.out.println(bitmap.select(i) + ": " + bitmap.rank(bitmap.select(i)) );
    }
    System.out.println("-------");
    Iterator<Integer> it = bitmap.iterator();
    while(it.hasNext()) {
      System.out.println(it.next());
    }
    
  }
			
Example 3
Project : hadoop Source File : NNMapReduceTest.java View Source Code on GitHub

  public void testWritable() throws IOException {

    final PartitionDataWritable writable1 = new PartitionDataWritable();
    final PartitionDataWritable writable2 = new PartitionDataWritable();

    writable1.setPartitionData(
        new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abc"), true));
    writable2.setPartitionData(
        new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abc"), false));

    assertTrue(writable1.compareTo(writable2) == 0);
    writable2.setPartitionData(
        new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abd"), false));
    assertTrue(writable1.compareTo(writable2) < 0);
    writable2.setPartitionData(
        new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abd"), true));
    assertTrue(writable1.compareTo(writable2) < 0);

    final DataOutputByteBuffer output = new DataOutputByteBuffer();
    writable1.write(output);
    output.flush();
    final DataInputByteBuffer input = new DataInputByteBuffer();
    input.reset(output.getData());

    writable2.readFields(input);
    assertTrue(writable1.compareTo(writable2) == 0);
  }
			
Example 4
Project : hadoop Source File : SimulatorSettingsGridListWritableTest.java View Source Code on GitHub

	public void testSimulatorSettingsGridListWritable() throws IOException, BadParameterException {
		final SimulatorSettingsGridList list = TestGridSimulatorSettings.getGridList();

		final DataOutputByteBuffer output = new DataOutputByteBuffer();
		final DataInputByteBuffer input = new DataInputByteBuffer();

		final SimulatorSettingsGridListWritable ssgl = new SimulatorSettingsGridListWritable(list);

		ssgl.write(output);
		input.reset(output.getData());

		final SimulatorSettingsGridListWritable ssglCopy = new SimulatorSettingsGridListWritable();
		ssglCopy.readFields(input);

		final SimulatorSettingsGridList listCopy = ssglCopy.getGridList(list.getStockStorage());
		Assert.assertEquals(list.getPeriod().toString(), listCopy.getPeriod().toString());
		Assert.assertEquals(list.getStockInitializers().size(), listCopy.getStockInitializers().size());
		Assert.assertEquals(list.getEodInitializers().size(), listCopy.getEodInitializers().size());
	}
			
Example 5
Project : hadoop Source File : SimulatorSettingsGeneticListWritableTest.java View Source Code on GitHub

	public void testSimulatorSettingsGeneticListWritable() throws BadAlgorithmException, IOException, BadParameterException {
		final SimulatorSettingsGeneticListImpl list = TestGeneticSimulatorSettings.getGeneticList();

		final DataOutputByteBuffer output = new DataOutputByteBuffer();
		final DataInputByteBuffer input = new DataInputByteBuffer();

		final SimulatorSettingsGeneticListWritable ssgl = new SimulatorSettingsGeneticListWritable(list);

		ssgl.write(output);
		input.reset(output.getData());

		final SimulatorSettingsGeneticListWritable ssglCopy = new SimulatorSettingsGeneticListWritable();
		ssglCopy.readFields(input);

		final SimulatorSettingsGeneticListImpl listCopy = ssglCopy.getGeneticList(list.getStockStorage());

		final List<GeneticExecutionInitializer> stocks = list.getStockInitializers();
		final List<GeneticExecutionInitializer> stocksCopy = listCopy.getStockInitializers();
		final List<GeneticExecutionInitializer> eods = list.getEodInitializers();
		final List<GeneticExecutionInitializer> eodsCopy = listCopy.getEodInitializers();

		Assert.assertEquals(stocks.size(), stocksCopy.size());
		Assert.assertEquals(eods.size(), eodsCopy.size());

		Assert.assertEquals(stocks.get(0).algorithmName, stocksCopy.get(0).algorithmName);
		Assert.assertEquals(stocks.get(1).algorithmName, stocksCopy.get(1).algorithmName);
	}
			
Example 6
Project : hadoop Source File : TestTaskID.java View Source Code on GitHub

  public void testWrite() throws Exception {
    JobID jobId = new JobID("1234", 1);
    TaskID taskId = new TaskID(jobId, TaskType.JOB_SETUP, 0);
    DataOutputByteBuffer out = new DataOutputByteBuffer();

    taskId.write(out);

    DataInputByteBuffer in = new DataInputByteBuffer();
    byte[] buffer = new byte[4];

    in.reset(out.getData());

    assertEquals("The write() method did not write the expected task ID",
        0, in.readInt());
    assertEquals("The write() method did not write the expected job ID",
        1, in.readInt());
    assertEquals("The write() method did not write the expected job "
        + "identifier length", 4, WritableUtils.readVInt(in));
    in.readFully(buffer, 0, 4);
    assertEquals("The write() method did not write the expected job "
        + "identifier length", "1234", new String(buffer));
    assertEquals("The write() method did not write the expected task type",
        TaskType.JOB_SETUP, WritableUtils.readEnum(in, TaskType.class));
  }
			
Example 7
Project : hadoop Source File : TestTaskID.java View Source Code on GitHub

  public void testReadFields() throws Exception {
    DataOutputByteBuffer out = new DataOutputByteBuffer();

    out.writeInt(0);
    out.writeInt(1);
    WritableUtils.writeVInt(out, 4);
    out.write(new byte[] { 0x31, 0x32, 0x33, 0x34});
    WritableUtils.writeEnum(out, TaskType.REDUCE);

    DataInputByteBuffer in = new DataInputByteBuffer();

    in.reset(out.getData());

    TaskID instance = new TaskID();

    instance.readFields(in);

    assertEquals("The readFields() method did not produce the expected task ID",
        "task_1234_0001_r_000000", instance.toString());
  }
			
Example 8
Project : hadoop Source File : ExtCharDictTest.java View Source Code on GitHub

  public static void main(String[] args) throws Exception {
    CortaraConfiguration conf = new CortaraConfiguration();

    FileSystemSupport fsSupport = FileSystemFactory.createFileSystemSupport(conf);

    FileSystem fs = ((HdfsSupport) fsSupport).getFs();
    FSDataOutputStream fsdos = fs.create(new Path("/tmp/ChineseTest"));
    long start = System.nanoTime();
    Random r = new Random();
    ExtCharDictionary dict = new ExtCharDictionary();
    DataOutputByteBuffer bos = new DataOutputByteBuffer(128);
    int xi = 1024;
    int ii = 1024;
    
    for (int x = 0; x <= xi; x++) {
      for (int i = 0; i < ii; i++) {
        byte[] byt = dict.addTerm(TestUtils.genChinese(r, 2));
        bos.write(byt);
      }
      fsdos.write(Bytes.toBytes(bos));
      bos.reset();
      fsdos.flush();
    }
    long dicPos = fsdos.getPos();
    System.out.println("pos : " + dicPos + " Dict length : " + dict.getDictionaryBytes().length);
    fsdos.write(dict.getDictionaryBytes());
    fsdos.writeLong(dicPos);
    fsdos.close();

    System.out.println("cost : " + ((double) (System.nanoTime() - start) / 1000000));
  }
			
Example 9
Project : hadoop Source File : StatisticsWritableTest.java View Source Code on GitHub

	public void testStatisticsWritable() throws IOException {
		final Map<MetricType, Double> doubleList = new HashMap<>();
		doubleList.put(MetricType.avGain, 10.45);
		doubleList.put(MetricType.avWinAvLoss, 62.13);
		final Map<MetricType, Integer> integerList = new HashMap<>();
		integerList.put(MetricType.period, 16);
		final Metrics s = new Metrics(doubleList, integerList);
		Assert.assertEquals(10.45, s.getMetric(MetricType.avGain), Settings.doubleEpsilon);
		Assert.assertEquals(62.13, s.getMetric(MetricType.avWinAvLoss), Settings.doubleEpsilon);
		Assert.assertEquals(16, s.getIntegerMetric(MetricType.period).intValue());

		final DataOutputByteBuffer output = new DataOutputByteBuffer();
		final DataInputByteBuffer input = new DataInputByteBuffer();

		final MetricsWritable sw = new MetricsWritable(s);

		sw.write(output);
		input.reset(output.getData());

		final MetricsWritable swCopy = new MetricsWritable();
		swCopy.readFields(input);

		final Metrics sCopy = swCopy.getMetrics();

		Assert.assertEquals(10.45, sCopy.getMetric(MetricType.avGain), Settings.doubleEpsilon);
		Assert.assertEquals(62.13, sCopy.getMetric(MetricType.avWinAvLoss), Settings.doubleEpsilon);
		Assert.assertEquals(16, sCopy.getIntegerMetric(MetricType.period).intValue());
	}
			
Example 10
Project : hadoop Source File : TestCharLoadDict.java View Source Code on GitHub

  public static void main(String[] args) throws Exception {
    CortaraConfiguration conf = new CortaraConfiguration();

    FileSystemSupport fsSupport = FileSystemFactory.createFileSystemSupport(conf);

    FileSystem fs = ((HdfsSupport) fsSupport).getFs();

    long start = System.nanoTime();
    FSDataOutputStream fsdos = fs.create(new Path("/tmp/ChineseTest"));
    Random r = new Random();
    ExtCharDictionary dict = new ExtCharDictionary();
    DataOutputByteBuffer bos = new DataOutputByteBuffer(128);
    int xi = 1024;
    int ii = 1;

    for (int x = 0; x <= xi; x++) {
      for (int i = 0; i < ii; i++) {
        bos.write(dict.addTerm(TestUtils.genChinese(r, 2)));
      }
      fsdos.write(Bytes.toBytes(bos));
      bos.reset();
      fsdos.flush();
    }
    long dicPos = fsdos.getPos();
    System.out.println("pos : " + dicPos + " Dict length : " + dict.getDictionaryBytes().length);
    fsdos.write(dict.getDictionaryBytes());
    fsdos.writeLong(dicPos);
    fsdos.close();

    System.out.println("cost : " + ((double) (System.nanoTime() - start) / 1000000));


    start = System.nanoTime();

    FSDataInputStream fsdis = fs.open(new Path("/tmp/ChineseTest"));
    FileStatus fileStatus = fs.getFileStatus(new Path("/tmp/ChineseTest"));

    ExtCharDictionary dict1 = new ExtCharDictionary();

    long v = fileStatus.getLen() - 8;
    fsdis.seek(v);
    long pos = fsdis.readLong();
    fsdis.seek(pos);

    long length = fileStatus.getLen() - 8 - fsdis.getPos();
    ByteBuffer bf = ByteBuffer.allocate((int) length);
    byte[] ba = bf.array();
    fsdis.read(ba);

    System.out.println(length + " = " + ba.length);

    dict1.loadDictFromBytes(ba, 0, ba.length);

    System.out.println(TestUtils.equalsBytes(dict.getDictionaryBytes(), dict1.getDictionaryBytes()));

  }
			
Example 11
Project : hadoop Source File : TradingStrategyWritableTest.java View Source Code on GitHub

	public void testTradingStrategyWritable() throws BadAlgorithmException, IOException {
		final TradingStrategy ts = new TradingStrategy(getSettings(), getMetrics());

		final DataOutputByteBuffer output = new DataOutputByteBuffer();
		final DataInputByteBuffer input = new DataInputByteBuffer();

		final TradingStrategyWritable tsw = new TradingStrategyWritable(ts);

		tsw.write(output);
		input.reset(output.getData());

		final TradingStrategyWritable tswCopy = new TradingStrategyWritable();
		tswCopy.readFields(input);

		final TradingStrategy tsCopy = tswCopy.getTradingStrategy(StockStorageMock.getStockStorage());
		Assert.assertEquals(ts.getAvGain(), tsCopy.getAvGain(), Settings.doubleEpsilon);
		Assert.assertEquals(ts.getSettings().stringHashCode(), tsCopy.getSettings().stringHashCode());
		Assert.assertEquals(ts.getMetrics().getIntegerMetric(MetricType.period), tsCopy.getMetrics().getIntegerMetric(MetricType.period));
	}