1/*
2 * Licensed to the Apache Software Foundation (ASF) under one or more
3 * contributor license agreements.  See the NOTICE file distributed with
4 * this work for additional information regarding copyright ownership.
5 * The ASF licenses this file to You under the Apache License, Version 2.0
6 * (the "License"); you may not use this file except in compliance with
7 * the License.  You may obtain a copy of the License at
8 *
9 *    http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17package org.apache.spark.scheduler.cluster
18
19import org.apache.spark.annotation.DeveloperApi
20
21/**
22 * :: DeveloperApi ::
23 * Stores information about an executor to pass from the scheduler to SparkListeners.
24 */
25@DeveloperApi
26class ExecutorInfo(
27   val executorHost: String,
28   val totalCores: Int,
29   val logUrlMap: Map[String, String]) {
30
31  def canEqual(other: Any): Boolean = other.isInstanceOf[ExecutorInfo]
32
33  override def equals(other: Any): Boolean = other match {
34    case that: ExecutorInfo =>
35      (that canEqual this) &&
36        executorHost == that.executorHost &&
37        totalCores == that.totalCores &&
38        logUrlMap == that.logUrlMap
39    case _ => false
40  }
41
42  override def hashCode(): Int = {
43    val state = Seq(executorHost, totalCores, logUrlMap)
44    state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
45  }
46}
47