Home
last modified time | relevance | path

Searched refs:DatanodeInfo (Results 1 – 25 of 186) sorted by relevance

12345678

/dports/devel/hadoop/hadoop-1.2.1/src/test/org/apache/hadoop/hdfs/server/datanode/
H A DTestBlockReplacement.java42 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
116 DatanodeInfo[] oldNodes = block.getLocations(); in testBlockReplacement()
127 DatanodeInfo newNode=null; in testBlockReplacement()
128 for(DatanodeInfo node:datanodes) { in testBlockReplacement()
130 for(DatanodeInfo oldNode:oldNodes) { in testBlockReplacement()
143 DatanodeInfo source=null; in testBlockReplacement()
144 ArrayList<DatanodeInfo> proxies = new ArrayList<DatanodeInfo>(2); in testBlockReplacement()
145 for(DatanodeInfo node:datanodes) { in testBlockReplacement()
199 DatanodeInfo[] nodes = blocks.get(0).getLocations(); in checkBlocks()
206 for (DatanodeInfo node : includeNodes) { in checkBlocks()
[all …]
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/
H A DTestBlockReplacement.java144 DatanodeInfo newNode=null; in testBlockReplacement()
145 for(DatanodeInfo node:datanodes) { in testBlockReplacement()
160 DatanodeInfo source=null; in testBlockReplacement()
161 ArrayList<DatanodeInfo> proxies = new ArrayList<DatanodeInfo>(2); in testBlockReplacement()
292 for (DatanodeInfo dn : nodes) in checkBlocks()
311 DatanodeInfo sourceProxy, DatanodeInfo destination) throws IOException { in replaceBlock()
321 DatanodeInfo source, in replaceBlock()
322 DatanodeInfo sourceProxy, in replaceBlock()
323 DatanodeInfo destination, in replaceBlock()
410 (DatanodeInfo)sourceDnDesc, (DatanodeInfo)sourceDnDesc, in testDeletedBlockWhenAddBlockIsInEdit()
[all …]
/dports/devel/hadoop/hadoop-1.2.1/src/hdfs/org/apache/hadoop/hdfs/protocol/
H A DLocatedBlock.java43 private DatanodeInfo[] locs;
53 this(new Block(), new DatanodeInfo[0], 0L, false); in LocatedBlock()
58 public LocatedBlock(Block b, DatanodeInfo[] locs) { in LocatedBlock()
64 public LocatedBlock(Block b, DatanodeInfo[] locs, long startOffset) { in LocatedBlock()
70 public LocatedBlock(Block b, DatanodeInfo[] locs, long startOffset, in LocatedBlock()
76 this.locs = new DatanodeInfo[0]; in LocatedBlock()
98 public DatanodeInfo[] getLocations() { in getLocations()
143 this.locs = new DatanodeInfo[count]; in readFields()
145 locs[i] = new DatanodeInfo(); in readFields()
H A DDatanodeInfo.java40 public class DatanodeInfo extends DatanodeID implements Node { class
58 public DatanodeInfo() { in DatanodeInfo() method in DatanodeInfo
63 public DatanodeInfo(DatanodeInfo from) { in DatanodeInfo() method in DatanodeInfo
75 public DatanodeInfo(DatanodeID nodeID) { in DatanodeInfo() method in DatanodeInfo
85 protected DatanodeInfo(DatanodeID nodeID, String location, String hostName) { in DatanodeInfo() method in DatanodeInfo
92 public DatanodeInfo(final String name, final String storageID, in DatanodeInfo() method in DatanodeInfo
355 (DatanodeInfo.class, in WritableFactories.setFactory()
357 public Writable newInstance() { return new DatanodeInfo(); } in WritableFactories.setFactory() argument
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/
H A DTestDecommission.java45 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
183 DatanodeInfo[] nodes = blk.getLocations(); in checkFile()
268 for (DatanodeInfo dn : decommissionedNodes) { in decommissionNode()
275 DatanodeInfo ret = NameNodeAdapter.getDatanode( in decommissionNode()
295 private void waitNodeState(DatanodeInfo node, in waitNodeState()
389 ArrayList<ArrayList<DatanodeInfo>> namenodeDecomList = new ArrayList<ArrayList<DatanodeInfo>>( in testDecommission2()
653 final DatanodeInfo decomNode = in testRecommission()
845 ArrayList<DatanodeInfo> dnInfos = new ArrayList<DatanodeInfo>(); in testDecommissionWithOpenfile()
849 DatanodeInfo found = datanodeInfo; in testDecommissionWithOpenfile()
850 for (DatanodeInfo dif: dnInfos4LastBlock) { in testDecommissionWithOpenfile()
[all …]
H A DTestReplaceDatanodeOnFailure.java31 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
62 final DatanodeInfo[] infos = new DatanodeInfo[5]; in testDefaultPolicy()
63 final DatanodeInfo[][] datanodes = new DatanodeInfo[infos.length + 1][]; in testDefaultPolicy()
64 datanodes[0] = new DatanodeInfo[0]; in testDefaultPolicy()
68 datanodes[i] = new DatanodeInfo[i]; in testDefaultPolicy()
77 final DatanodeInfo[] existings = datanodes[nExistings]; in testDefaultPolicy()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/
H A DLocatedBlock.java58 private DatanodeInfo[] cachedLocs;
64 public LocatedBlock(ExtendedBlock b, DatanodeInfo[] locs) { in LocatedBlock()
68 public LocatedBlock(ExtendedBlock b, DatanodeInfo[] locs, long startOffset, in LocatedBlock()
77 public LocatedBlock(ExtendedBlock b, DatanodeInfo[] locs, in LocatedBlock()
90 public LocatedBlock(ExtendedBlock b, DatanodeInfo[] locs, String[] storageIDs, in LocatedBlock()
92 boolean corrupt, DatanodeInfo[] cachedLocs) { in LocatedBlock()
101 DatanodeInfo di = locs[i]; in LocatedBlock()
136 public DatanodeInfo[] getLocations() { in getLocations()
190 public void addCachedLoc(DatanodeInfo loc) { in addCachedLoc()
191 List<DatanodeInfo> cachedList = Lists.newArrayList(cachedLocs); in addCachedLoc()
[all …]
H A DDatanodeInfoWithStorage.java23 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
27 public class DatanodeInfoWithStorage extends DatanodeInfo {
31 public DatanodeInfoWithStorage(DatanodeInfo from, String storageID, in DatanodeInfoWithStorage()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/
H A DDFSInputStream.java247 private final ConcurrentHashMap<DatanodeInfo, DatanodeInfo> deadNodes =
248 new ConcurrentHashMap<DatanodeInfo, DatanodeInfo>();
353 LinkedList<DatanodeInfo> nodeList = new LinkedList<DatanodeInfo>( in readBlockLength()
355 LinkedList<DatanodeInfo> retryList = new LinkedList<DatanodeInfo>(); in readBlockLength()
621 DatanodeInfo chosenNode = null; in blockSeekTo()
958 Set<DatanodeInfo> dnSet = null; in addIntoCorruptedBlockMap()
1062 DatanodeInfo nodes[], AbstractMap<DatanodeInfo, in getBestNodeDNAddrPairErrorString() argument
1063 DatanodeInfo> deadNodes, Collection<DatanodeInfo> ignoredNodes) { in getBestNodeDNAddrPairErrorString()
1238 ArrayList<DatanodeInfo> ignored = new ArrayList<DatanodeInfo>();
1495 DatanodeInfo[] locs = new DatanodeInfo[dnSet.size()];
[all …]
H A DBlockStorageLocationUtil.java44 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
87 for (Map.Entry<DatanodeInfo, List<LocatedBlock>> entry : datanodeBlocks in createVolumeBlockLocationCallables()
90 DatanodeInfo datanode = entry.getKey(); in createVolumeBlockLocationCallables()
156 Map<DatanodeInfo, HdfsBlocksMetadata> metadatas = in queryDatanodesForHdfsBlocksMetadata()
161 DatanodeInfo datanode = callable.getDatanodeInfo(); in queryDatanodesForHdfsBlocksMetadata()
211 Map<DatanodeInfo, HdfsBlocksMetadata> metadatas) { in associateVolumeIdsWithBlocks() argument
237 DatanodeInfo datanode = entry.getKey(); in associateVolumeIdsWithBlocks()
266 DatanodeInfo[] dnInfos = locBlock.getLocations(); in associateVolumeIdsWithBlocks()
321 private final DatanodeInfo datanode;
329 DatanodeInfo datanode, String poolId, long []blockIds, in VolumeBlockLocationCallable()
[all …]
H A DDFSOutputStream.java237 private final LoadingCache<DatanodeInfo, DatanodeInfo> excludedNodes =
242 .removalListener(new RemovalListener<DatanodeInfo, DatanodeInfo>() {
245 RemovalNotification<DatanodeInfo, DatanodeInfo> notification) {
250 .build(new CacheLoader<DatanodeInfo, DatanodeInfo>() {
252 public DatanodeInfo load(DatanodeInfo key) throws Exception {
267 private final List<DatanodeInfo> failed = new ArrayList<DatanodeInfo>();
980 ArrayList<DatanodeInfo> exclude = new ArrayList<DatanodeInfo>(failed); in addDatanode2ExistingPipeline()
1017 private void transfer(final DatanodeInfo src, final DatanodeInfo[] targets, in transfer()
1123 DatanodeInfo[] newnodes = new DatanodeInfo[nodes.length-1]; in setupPipelineForAppendOrRecovery()
1507 DatanodeInfo[] getNodes() { in getNodes()
[all …]
/dports/devel/hadoop/hadoop-1.2.1/src/hdfs/org/apache/hadoop/hdfs/server/protocol/
H A DBlockCommand.java24 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
39 DatanodeInfo targets[][];
51 targets = new DatanodeInfo[blocks.length][]; in BlockCommand()
59 private static final DatanodeInfo[][] EMPTY_TARGET = {};
75 public DatanodeInfo[][] getTargets() { in getTargets()
113 this.targets = new DatanodeInfo[in.readInt()][]; in readFields()
115 this.targets[i] = new DatanodeInfo[in.readInt()]; in readFields()
117 targets[i][j] = new DatanodeInfo(); in readFields()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/
H A DInvalidateBlocks.java32 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
49 private final Map<DatanodeInfo, LightWeightHashSet<Block>> node2blocks =
50 new TreeMap<DatanodeInfo, LightWeightHashSet<Block>>();
91 synchronized boolean contains(final DatanodeInfo dn, final Block block) { in contains()
105 synchronized void add(final Block block, final DatanodeInfo datanode, in add()
122 synchronized void remove(final DatanodeInfo dn) { in remove()
130 synchronized void remove(final DatanodeInfo dn, final Block block) { in remove()
149 for(Map.Entry<DatanodeInfo, LightWeightHashSet<Block>> entry : node2blocks.entrySet()) { in dump()
159 synchronized List<DatanodeInfo> getDatanodes() { in getDatanodes()
160 return new ArrayList<DatanodeInfo>(node2blocks.keySet()); in getDatanodes()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/
H A DTestWebHdfsDataLocality.java32 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
90 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testDataLocality()
107 final DatanodeInfo[] locations = lb.get(0).getLocations(); in testDataLocality()
109 final DatanodeInfo expected = locations[0]; in testDataLocality()
115 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testDataLocality()
121 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testDataLocality()
127 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testDataLocality()
171 final DatanodeInfo[] locations = lb.get(0).getLocations(); in testExcludeDataNodes()
182 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testExcludeDataNodes()
192 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testExcludeDataNodes()
[all …]
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/
H A DDatanodeStorageReport.java20 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
26 final DatanodeInfo datanodeInfo;
29 public DatanodeStorageReport(DatanodeInfo datanodeInfo, in DatanodeStorageReport()
35 public DatanodeInfo getDatanodeInfo() { in getDatanodeInfo()
H A DBlockCommand.java26 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
53 final DatanodeInfo[][] targets;
66 targets = new DatanodeInfo[blocks.length][]; in BlockCommand()
79 private static final DatanodeInfo[][] EMPTY_TARGET_DATANODES = {};
97 DatanodeInfo[][] targets, StorageType[][] targetStorageTypes, in BlockCommand()
115 public DatanodeInfo[][] getTargets() { in getTargets()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/
H A DReplaceDatanodeOnFailure.java25 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
60 public boolean satisfy(short replication, DatanodeInfo[] existings,
69 public boolean satisfy(short replication, DatanodeInfo[] existings,
86 final DatanodeInfo[] existings, final int n, final boolean isAppend,
101 public boolean satisfy(short replication, DatanodeInfo[] existings, in satisfy()
138 final short replication, final DatanodeInfo[] existings, in satisfy()
H A DDataTransferProtocol.java27 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
102 final DatanodeInfo[] targets, in writeBlock()
104 final DatanodeInfo source, in writeBlock()
129 final DatanodeInfo[] targets, in transferBlock()
180 final DatanodeInfo source) throws IOException; in replaceBlock()
/dports/devel/hadoop/hadoop-1.2.1/src/test/org/apache/hadoop/hdfs/server/namenode/web/resources/
H A DTestWebHdfsDataLocality.java32 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
91 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testDataLocality()
108 final DatanodeInfo[] locations = lb.get(0).getLocations(); in testDataLocality()
110 final DatanodeInfo expected = locations[0]; in testDataLocality()
116 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testDataLocality()
122 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testDataLocality()
128 final DatanodeInfo chosen = NamenodeWebHdfsMethods.chooseDatanode( in testDataLocality()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/
H A DTestPBHelper.java38 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
314 private void compare(DatanodeInfo dn1, DatanodeInfo dn2) { in compare()
349 DatanodeInfo[] dnInfo = new DatanodeInfo[] { di1, di2 }; in testConvertRecoveringBlock()
354 DatanodeInfo[] dnInfo1 = b1.getLocations(); in testConvertRecoveringBlock()
365 DatanodeInfo[] dnInfo = new DatanodeInfo[] { di1, di2 }; in testConvertBlockRecoveryCommand()
445 DatanodeInfo[] dnInfos = { in createLocatedBlock()
472 DatanodeInfo[] dnInfos = { in createLocatedBlockNoStorageMedia()
561 DatanodeInfo[][] dnInfos = new DatanodeInfo[][] { new DatanodeInfo[1], in testConvertBlockCommand()
562 new DatanodeInfo[2] }; in testConvertBlockCommand()
582 DatanodeInfo[] d1 = dnInfos[i]; in testConvertBlockCommand()
[all …]
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/
H A DDfsClientShmManager.java38 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
75 private final DatanodeInfo datanode;
109 EndpointShmManager (DatanodeInfo datanode) { in EndpointShmManager()
400 private final HashMap<DatanodeInfo, EndpointShmManager> datanodes =
401 new HashMap<DatanodeInfo, EndpointShmManager>(1);
420 public Slot allocSlot(DatanodeInfo datanode, DomainPeer peer, in allocSlot()
466 void visit(HashMap<DatanodeInfo, PerDatanodeVisitorInfo> info) in visit() argument
474 HashMap<DatanodeInfo, PerDatanodeVisitorInfo> info = in visit()
475 new HashMap<DatanodeInfo, PerDatanodeVisitorInfo>(); in visit()
476 for (Entry<DatanodeInfo, EndpointShmManager> entry : in visit()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/
H A DJspHelper.java42 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
84 private static class NodeRecord extends DatanodeInfo {
87 public NodeRecord(DatanodeInfo info, int count) { in NodeRecord()
119 public static DatanodeInfo bestNode(LocatedBlocks blks, Configuration conf) in bestNode()
121 HashMap<DatanodeInfo, NodeRecord> map = in bestNode()
122 new HashMap<DatanodeInfo, NodeRecord>(); in bestNode()
124 DatanodeInfo[] nodes = block.getLocations(); in bestNode()
125 for (DatanodeInfo node : nodes) { in bestNode()
139 private static DatanodeInfo bestNode(DatanodeInfo[] nodes, boolean doRandom) in bestNode()
/dports/devel/hadoop/hadoop-1.2.1/src/hdfs/org/apache/hadoop/hdfs/
H A DDFSUtil.java31 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
100 DatanodeInfo[] locations = blk.getLocations(); in locatedBlocks2Locations()
176 public static class StaleComparator implements Comparator<DatanodeInfo> {
191 public int compare(DatanodeInfo a, DatanodeInfo b) {
/dports/devel/hadoop/hadoop-1.2.1/src/test/org/apache/hadoop/hdfs/
H A DTestGetBlocks.java35 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
105 DatanodeInfo[] nodes = blocks.get(0).getLocations(); in testReadSelectNonStaleDatanode()
119 DatanodeInfo[] nodesAfterStale = blocksAfterStale.get(0).getLocations(); in testReadSelectNonStaleDatanode()
161 DatanodeInfo[] dataNodes=null; in testGetBlocks()
214 getBlocksWithException(namenode, new DatanodeInfo(), 2); in testGetBlocks()
221 DatanodeInfo datanode, in getBlocksWithException()
225 namenode.getBlocks(new DatanodeInfo(), 2); in getBlocksWithException()
/dports/devel/hadoop/hadoop-1.2.1/src/hdfs/org/apache/hadoop/hdfs/web/
H A DJsonUtil.java36 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
37 import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
53 private static final DatanodeInfo[] EMPTY_DATANODE_INFO_ARRAY = {};
216 private static Map<String, Object> toJsonMap(final DatanodeInfo datanodeinfo) { in toJsonMap()
240 private static DatanodeInfo toDatanodeInfo(final Map<?, ?> m) { in toDatanodeInfo()
245 return new DatanodeInfo( in toDatanodeInfo()
262 private static Object[] toJsonArray(final DatanodeInfo[] array) { in toJsonArray()
277 private static DatanodeInfo[] toDatanodeInfoArray(final Object[] objects) { in toDatanodeInfoArray()
283 final DatanodeInfo[] array = new DatanodeInfo[objects.length]; in toDatanodeInfoArray()
315 final DatanodeInfo[] locations = toDatanodeInfoArray( in toLocatedBlock()

12345678