Java Code Examples for java.io.DataInput
The following code examples are extracted from open source projects. You can click to
vote up the examples that are useful to you.
Example 1
From project appdriver, under directory /android/src/com/google/android/testing/nativedriver/client/.
Source file: FrameBufferFormat.java

/** * Copies the frame buffer data from an {@code InputStream] to a given}location in a {@code BufferedImage}. * @param source the source to read the raw frame buffer data from. The formatof the data should correspond to the format represented by this instance * @param destination the image to which to write the converted image data */ public void copyFrameBufferToImage(InputStream source,BufferedImage destination){ DataInput sourceDataStream=new LittleEndianDataInputStream(source); int[] oneLine=new int[getXResolution()]; for (int y=0; y < yResolution; y++) { convertToRgba32(sourceDataStream,oneLine); destination.setRGB(0,y,xResolution,1,oneLine,0,xResolution); } }
Example 2
From project Cloud9, under directory /src/dist/edu/umd/hooka/alignment/.
Source file: HadoopAlign.java

static public ATable loadATable(Path path,Configuration job) throws IOException { org.apache.hadoop.conf.Configuration conf=new org.apache.hadoop.conf.Configuration(job); FileSystem fileSys=FileSystem.get(conf); DataInput in=new DataInputStream(new BufferedInputStream(fileSys.open(path))); ATable at=new ATable(); at.readFields(in); return at; }
Example 3
From project dawn-common, under directory /org.dawb.hdf5/src/ncsa/hdf/object/fits/.
Source file: FitsFile.java

@Override public void close() throws IOException { if (fitsFile == null) { return; } DataInput di=fitsFile.getStream(); if (di instanceof InputStream) { ((InputStream)di).close(); } }
Example 4
From project Flume-Hive, under directory /src/java/com/cloudera/flume/handlers/batch/.
Source file: UnbatchingDecorator.java

/** * if it is not a batch event, pass it through, otherwise, unbatch and pass through the events. */ @Override public void append(Event e) throws IOException { if (!BatchingDecorator.isBatch(e)) { super.append(e); return; } int sz=ByteBuffer.wrap(e.get(BatchingDecorator.BATCH_SIZE)).getInt(); byte[] data=e.get(BatchingDecorator.BATCH_DATA); DataInput in=new DataInputStream(new ByteArrayInputStream(data)); for (int i=0; i < sz; i++) { WriteableEvent we=new WriteableEvent(); we.readFields(in); super.append(we); } }
Example 5
From project flume_1, under directory /flume-core/src/main/java/com/cloudera/flume/handlers/hdfs/.
Source file: WriteableEvent.java

public static WriteableEvent create(byte[] raw) throws IOException { WriteableEvent e=new WriteableEvent(); DataInput in=new DataInputStream(new ByteArrayInputStream(raw)); e.readFields(in); return e; }
Example 6
From project behemoth, under directory /io/src/test/java/com/digitalpebble/behemoth/io/sequencefile/.
Source file: SequenceFileConverterMapperTest.java

@Test public void testWritable() throws Exception { Configuration conf=new Configuration(); FileSystem fs=FileSystem.get(conf); File tmpDir=new File(System.getProperty("java.io.tmpdir")); File tmpLoc=new File(tmpDir,"sfcmt"); tmpLoc.mkdirs(); Path in=new Path(tmpLoc.getAbsolutePath(),"foo"); SequenceFile.Writer writer=SequenceFile.createWriter(fs,conf,in,Text.class,MyWritable.class); for (int i=0; i < 100; i++) { writer.append(new Text(String.valueOf(i)),new MyWritable(i,"document " + i)); } writer.close(); SequenceFileConverterMapper mapper=new SequenceFileConverterMapper(); SequenceFileIterator<Text,Text> iter=new SequenceFileIterator<Text,Text>(in,true,conf); DummyRecordWriter out=new DummyRecordWriter(); Mapper.Context context=mapper.new Context(conf,new TaskAttemptID(),null,out,null,new DummyStatusReporter(),null); while (iter.hasNext()) { Pair<Text,Text> next=iter.next(); mapper.map(next.getFirst(),next.getSecond(),context); } assertEquals(100,out.keys.size()); int i=0; for ( BehemothDocument doc : out.vals) { byte[] bytes=doc.getContent(); DataInput input=new DataInputStream(new ByteArrayInputStream(bytes)); MyWritable writable=new MyWritable(); writable.readFields(input); assertEquals(writable.i,i); assertEquals(writable.in,"document " + i); i++; } }
Example 7
From project crunch, under directory /crunch/src/main/java/org/apache/crunch/types/writable/.
Source file: Writables.java

/** * Perform a deep copy of a writable value. * @param value The value to be copied * @param writableClass The Writable class of the value to be copied * @return A fully detached deep copy of the input value */ public static <T extends Writable>T deepCopy(T value,Class<T> writableClass){ ByteArrayOutputStream byteOutStream=new ByteArrayOutputStream(); DataOutputStream dataOut=new DataOutputStream(byteOutStream); T copiedValue=null; try { value.write(dataOut); dataOut.flush(); ByteArrayInputStream byteInStream=new ByteArrayInputStream(byteOutStream.toByteArray()); DataInput dataInput=new DataInputStream(byteInStream); copiedValue=writableClass.newInstance(); copiedValue.readFields(dataInput); } catch ( Exception e) { throw new CrunchRuntimeException("Error while deep copying " + value,e); } return copiedValue; }
Example 8
From project action-core, under directory /src/main/java/com/ning/metrics/action/hdfs/data/.
Source file: RowSmile.java

/** * Replace the current row content with a specified DataInput * @param in DataInput to read * @throws java.io.IOException generic serialization error */ @Override public void readFields(DataInput in) throws IOException { schema.readFields(in); int numberOfItems=WritableUtils.readVInt(in); int smilePayloadSize=WritableUtils.readVInt(in); int itemsRead=0; byte[] smilePayload=new byte[smilePayloadSize]; in.readFully(smilePayload); JsonParser jp=objectMapper.getJsonFactory().createJsonParser(smilePayload); while (jp.nextToken() != null && itemsRead < numberOfItems) { objectMapper.readValue(jp,JsonNodeComparable.class); itemsRead++; } jp.close(); }
Example 9
From project activemq-apollo, under directory /apollo-broker/src/main/scala/org/apache/activemq/apollo/broker/.
Source file: XidImpl.java

/** * read xid from an Array and set each fields. * @param in the data input array * @throws IOException */ public void readbody(DataInput in) throws IOException { formatId=in.readInt(); int length=in.readInt(); byte[] data=new byte[length]; in.readFully(data); setGlobalTransactionId(data); length=in.readInt(); data=new byte[length]; in.readFully(data); setBranchQualifier(data); }
Example 10
From project archive-commons, under directory /ia-tools/src/main/java/org/archive/hadoop/io/.
Source file: MergeClusterRangesInputSplit.java

public void readFields(DataInput in) throws IOException { length=in.readLong(); start=in.readUTF(); end=in.readUTF(); int cl=in.readInt(); clusterPaths=new String[cl]; for (int i=0; i < cl; i++) { clusterPaths[i]=in.readUTF(); } }
Example 11
From project ARCInputFormat, under directory /src/org/commoncrawl/hadoop/io/.
Source file: ARCSplit.java

/** * @inheritDoc */ public void readFields(DataInput in) throws IOException { int nResources=in.readInt(); resources=new ARCResource[nResources]; for (int i=0; i < nResources; i++) { resources[i]=new ARCResource(Text.readString(in),in.readLong()); } size=in.readLong(); hosts=null; }
Example 12
From project asakusafw-examples, under directory /example-seqfile/src/main/java/com/asakusafw/example/direct/seqfile/writable/.
Source file: ItemInfoWritable.java

@Override public void readFields(DataInput in) throws IOException { itemName=Text.readString(in); departmentCode=Text.readString(in); departmentName=Text.readString(in); categoryCode=Text.readString(in); categoryName=Text.readString(in); unitSellingPrice=in.readInt(); registeredDate=new Date(in.readLong()); beginDate=new Date(in.readLong()); endDate=new Date(in.readLong()); }
Example 13
public void readFields(DataInput in) throws IOException { m_pageURL=Text.readString(in); m_pageProps.readFields(in); int numVersions=WritableUtils.readVInt(in); m_versions=new ArrayList<PageVersion>(numVersions); PageVersion curVer; for (int i=0; i < numVersions; i++) { curVer=new PageVersion(); curVer.readFields(in); m_versions.add(curVer); } }
Example 14
From project Baseform-Epanet-Java-Library, under directory /src/org/addition/epanet/hydraulic/io/.
Source file: AwareStep.java

public static HeaderInfo readHeader(DataInput in) throws IOException, ENException { HeaderInfo headerInfo=new HeaderInfo(); headerInfo.version=in.readInt(); headerInfo.nodes=in.readInt(); headerInfo.links=in.readInt(); headerInfo.rstart=in.readLong(); headerInfo.rstep=in.readLong(); headerInfo.duration=in.readLong(); return headerInfo; }
Example 15
From project BeeQueue, under directory /src/org/beequeue/piles/flock/.
Source file: ExternalizableUtil.java

public static String readNullableUTF(DataInput in) throws IOException { boolean isNull=in.readBoolean(); String s=null; if (!isNull) { s=in.readUTF(); } return s; }
Example 16
From project big-data-plugin, under directory /shims/common/src/org/pentaho/hbase/shim/common/.
Source file: DeserializedNumericComparator.java

@Override public void readFields(DataInput in) throws IOException { m_isInteger=in.readBoolean(); m_isLongOrDouble=in.readBoolean(); m_longValue=in.readLong(); m_doubleValue=in.readDouble(); }
Example 17
From project blacktie, under directory /stompconnect-1.0/src/main/java/org/codehaus/stomp/.
Source file: StompMarshaller.java

protected String readLine(DataInput in,int maxLength,String errorMessage) throws IOException { byte b; ByteArrayOutputStream baos=new ByteArrayOutputStream(maxLength); while ((b=in.readByte()) != '\n') { if (baos.size() > maxLength) { throw new ProtocolException(errorMessage,true); } baos.write(b); } byte[] sequence=baos.toByteArray(); return new String(sequence,"UTF-8"); }
Example 18
From project C-Cat, under directory /util/src/main/java/gov/llnl/ontology/util/.
Source file: StringCounter.java

public void readFields(DataInput in) throws IOException { int size=in.readInt(); for (int i=0; i < size; ++i) { String key=in.readUTF(); int value=in.readInt(); count(key,value); } }
Example 19
From project cascading, under directory /src/hadoop/cascading/tap/hadoop/io/.
Source file: MultiInputSplit.java

public void readFields(DataInput in) throws IOException { String splitType=in.readUTF(); config=new HashMap<String,String>(); String[] keys=WritableUtils.readStringArray(in); String[] values=WritableUtils.readStringArray(in); for (int i=0; i < keys.length; i++) config.put(keys[i],values[i]); if (LOG.isDebugEnabled()) { LOG.debug("current split config diff:"); for ( Map.Entry<String,String> entry : config.entrySet()) LOG.debug("key: {}, value: {}",entry.getKey(),entry.getValue()); } JobConf currentConf=HadoopUtil.mergeConf(jobConf,config,false); try { inputSplit=(InputSplit)ReflectionUtils.newInstance(currentConf.getClassByName(splitType),currentConf); } catch ( ClassNotFoundException exp) { throw new IOException("split class " + splitType + " not found"); } inputSplit.readFields(in); if (inputSplit instanceof FileSplit) { Path path=((FileSplit)inputSplit).getPath(); if (path != null) { jobConf.set(CASCADING_SOURCE_PATH,path.toString()); LOG.info("current split input path: {}",path); } } }
Example 20
@Override public void readFields(DataInput in) throws IOException { initialize(); int numFields=in.readInt(); for (int i=0; i < numFields; ++i) { byte type=in.readByte(); if (type == BYTE) { fields.add(in.readByte()); } else if (type == BOOLEAN) { fields.add(in.readBoolean()); } else if (type == INT) { fields.add(in.readInt()); } else if (type == LONG) { fields.add(in.readLong()); } else if (type == FLOAT) { fields.add(in.readFloat()); } else if (type == DOUBLE) { fields.add(in.readDouble()); } else if (type == STRING) { fields.add(in.readUTF()); } else if (type == BYTE_ARRAY) { int len=in.readShort(); byte[] bytes=new byte[len]; in.readFully(bytes); fields.add(bytes); } else { throw new IllegalArgumentException("Failed encoding, unknown element type in stream"); } } }
Example 21
From project chukwa, under directory /src/main/java/org/apache/hadoop/chukwa/.
Source file: ChunkImpl.java

/** * @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput) */ public void readFields(DataInput in) throws IOException { setProtocolVersion(in.readInt()); if (protocolVersion != PROTOCOL_VERSION) { throw new IOException("Protocol version mismatched, drop data. source version: " + protocolVersion + ", collector version:"+ PROTOCOL_VERSION); } setSeqID(in.readLong()); setSource(in.readUTF()); tags=in.readUTF(); setStreamName(in.readUTF()); setDataType(in.readUTF()); setDebugInfo(in.readUTF()); int numRecords=in.readInt(); recordEndOffsets=new int[numRecords]; for (int i=0; i < numRecords; ++i) recordEndOffsets[i]=in.readInt(); data=new byte[recordEndOffsets[recordEndOffsets.length - 1] + 1]; in.readFully(data); }
Example 22
From project ciel-java, under directory /examples/Grep/src/skywriting/examples/grep/.
Source file: Text.java

/** * Reads a zero-compressed encoded long from input stream and returns it. * @param stream Binary input stream * @throws java.io.IOException * @return deserialized long from stream. */ public static long readVLong(DataInput stream) throws IOException { byte firstByte=stream.readByte(); int len=decodeVIntSize(firstByte); if (len == 1) { return firstByte; } long i=0; for (int idx=0; idx < len - 1; idx++) { byte b=stream.readByte(); i=i << 8; i=i | (b & 0xFF); } return (isNegativeVInt(firstByte) ? (i ^ -1L) : i); }
Example 23
From project CommunityCase, under directory /src/org/community/intellij/plugins/communitycase/history/.
Source file: NewUsersComponent.java

@Override public List<String> read(DataInput in) throws IOException { final int size=in.readInt(); final ArrayList<String> result=new ArrayList<String>(size); for (int i=0; i < size; i++) { result.add(in.readUTF()); } return result; }
Example 24
From project core_4, under directory /impl/src/main/java/org/richfaces/renderkit/html/.
Source file: BaseGradient.java

public void readState(FacesContext context,DataInput dataInput) throws IOException { this.width=readIntegerParameterAsShort(dataInput); this.height=readIntegerParameterAsShort(dataInput); this.gradientHeight=readIntegerParameterAsShort(dataInput); this.headerBackgroundColor=readIntegerParameter(dataInput); this.headerGradientColor=readIntegerParameter(dataInput); this.horizontal=dataInput.readBoolean(); this.gradientType=GradientType.values()[dataInput.readByte()]; }
Example 25
From project craftbook, under directory /oldsrc/commonrefactor/com/sk89q/craftbook/mech/ic/plc/types/.
Source file: Perlstone32_1.java

public void readCommonData(DataInput in) throws IOException { Map<String,int[]> publicPersistentStorage=new HashMap<String,int[]>(); if (in.readByte() != 0) throw new IOException("wrong version"); int l=in.readInt(); for (int j=0; j < l; j++) { String name=in.readUTF(); int[] data=new int[in.readInt()]; for (int k=0; k < data.length; k++) if (in.readBoolean()) data[k]=in.readInt(); publicPersistentStorage.put(name,data); } this.publicPersistentStorage=publicPersistentStorage; }
Example 26
From project culvert, under directory /culvert-main/src/main/java/com/bah/culvert/constraints/.
Source file: Constraint.java

/** * Read the constraint from the stream * @param in to read the constraint from * @return specified {@link Constraint} * @throws IOException if the constraint could not be created or read */ public static Constraint readFromStream(DataInput in) throws IOException { ObjectWritable ow=new ObjectWritable(); Configuration conf=new Configuration(); ow.setConf(conf); ow.readFields(in); return (Constraint)ow.get(); }
Example 27
From project daily-money, under directory /dailymoney/src/com/bottleworks/dailymoney/calculator2/.
Source file: History.java

History(int version,DataInput in) throws IOException { if (version >= VERSION_1) { int size=in.readInt(); for (int i=0; i < size; ++i) { mEntries.add(new HistoryEntry(version,in)); } mPos=in.readInt(); } else { throw new IOException("invalid version " + version); } }
Example 28
From project datasalt-utils, under directory /src/contrib/java/org/apache/hadoop/mapreduce/lib/input/.
Source file: TaggedInputSplit.java

@SuppressWarnings("unchecked") public void readFields(DataInput in) throws IOException { inputSplitClass=(Class<? extends InputSplit>)readClass(in); inputFormatClass=(Class<? extends InputFormat<?,?>>)readClass(in); mapperClass=(Class<? extends Mapper<?,?,?,?>>)readClass(in); inputSplit=(InputSplit)ReflectionUtils.newInstance(inputSplitClass,conf); SerializationFactory factory=new SerializationFactory(conf); Deserializer deserializer=factory.getDeserializer(inputSplitClass); deserializer.open((DataInputStream)in); inputSplit=(InputSplit)deserializer.deserialize(inputSplit); }
Example 29
From project elephant-twin, under directory /com.twitter.elephanttwin/src/main/java/com/twitter/elephanttwin/io/.
Source file: ListLongPair.java

@Override public void readFields(DataInput in) throws IOException { int numElements=in.readInt(); list=new ArrayList<LongPairWritable>(numElements); for (int cnt=0; cnt < numElements; cnt++) { LongPairWritable l=new LongPairWritable(); l.readFields(in); list.add(l); } }
Example 30
From project extramuros, under directory /java/src/extramuros/java/formats/adapters/.
Source file: TextFileTableAdapter.java

public void readFields(DataInput dataInput) throws IOException { setHeader(new TableHeader()); getHeader().readFields(dataInput); Text tmp=new Text(); tmp.readFields(dataInput); setRowsPath(tmp.toString()); tmp=new Text(); tmp.readFields(dataInput); setDefaultSeparator(tmp.toString()); int numNullValues=dataInput.readInt(); nullValues=new String[numNullValues]; for (int i=0; i < nullValues.length; i++) { tmp=new Text(); tmp.readFields(dataInput); nullValues[i]=tmp.toString(); } }
Example 31
From project flume, under directory /flume-ng-channels/flume-file-channel/src/main/java/org/apache/flume/channel/file/.
Source file: FlumeEvent.java

@Override public void readFields(DataInput in) throws IOException { MapWritable map=new MapWritable(); map.readFields(in); setHeaders(fromMapWritable(map)); byte[] body=null; int bodyLength=in.readInt(); if (bodyLength != -1) { body=new byte[bodyLength]; in.readFully(body); } setBody(body); }
Example 32
From project g414-hash, under directory /src/main/java/com/g414/hash/file2/impl/.
Source file: FileOperations2.java

public static long read(DataInput in,ByteSize size) throws IOException { switch (size) { case EIGHT: return in.readLong(); case FOUR: return in.readInt(); case TWO: return in.readChar(); case ONE: return in.readByte(); case ZERO: return 0; default : throw new IllegalArgumentException("Unknown ByteSize: " + size); } }
Example 33
From project galaxy, under directory /src/co/paralleluniverse/common/io/.
Source file: Streamables.java

public Object read(DataInput in) throws IOException { final byte qualifier=in.readByte(); final ClassInfo ci=types.get(qualifier); if (ci == null) throw new IOException("Cannot read object. No class registered for qualifier " + qualifier); final Streamable obj=(Streamable)ci.construct(qualifier); obj.read(in); return obj; }
Example 34
From project gatein-api-java, under directory /api/src/main/java/org/gatein/api/portal/.
Source file: Node.java

@Override protected void readExternal(DataInput in) throws IOException { Site.Id si=new Site.Id(); si.readExternal(in); this.siteId=si; this.path=in.readUTF().split("/"); }
Example 35
From project giraph, under directory /src/main/java/org/apache/giraph/comm/messages/.
Source file: DiskBackedMessageStore.java

@Override public void readFields(DataInput in) throws IOException { int numVertices=in.readInt(); for (int v=0; v < numVertices; v++) { I vertexId=(I)config.createVertexId(); vertexId.readFields(in); destinationVertices.add(vertexId); } int mapSize=in.readInt(); for (int m=0; m < mapSize; m++) { I vertexId=config.createVertexId(); vertexId.readFields(in); int numMessages=in.readInt(); numberOfMessagesInMemory.addAndGet(numMessages); List<M> messages=Lists.newArrayList(); for (int i=0; i < numMessages; i++) { M message=config.createMessageValue(); message.readFields(in); messages.add(message); } inMemoryMessages.put(vertexId,messages); } int numFileStores=in.readInt(); for (int s=0; s < numFileStores; s++) { BasicMessageStore<I,M> fileStore=fileStoreFactory.newStore(); fileStore.readFields(in); fileStores.add(fileStore); } }