Repository: spark Updated Branches: refs/heads/branch-1.0 c23bfd7de -> 0bc83210d
Fixing AWS instance type information based upon current EC2 data Fixed a problem in previous file in which some information regarding AWS instance types were wrong. Such information was updated base upon current AWS EC2 data. Author: Zichuan Ye <[email protected]> Closes #1156 from jerry86/master and squashes the following commits: ff36e95 [Zichuan Ye] Fixing AWS instance type information based upon current EC2 data (cherry picked from commit 62d4a0fa9947e64c1533f66ae577557bcfb271c9) Conflicts: ec2/spark_ec2.py Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0bc83210 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0bc83210 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0bc83210 Branch: refs/heads/branch-1.0 Commit: 0bc83210d884eae14eaf32b296fc6460f05e139c Parents: c23bfd7 Author: Zichuan Ye <[email protected]> Authored: Thu Jun 26 15:21:29 2014 -0700 Committer: Patrick Wendell <[email protected]> Committed: Thu Jun 26 15:25:52 2014 -0700 ---------------------------------------------------------------------- ec2/spark_ec2.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/0bc83210/ec2/spark_ec2.py ---------------------------------------------------------------------- diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py index 9eb1657..d89ef34 100755 --- a/ec2/spark_ec2.py +++ b/ec2/spark_ec2.py @@ -164,7 +164,7 @@ def is_active(instance): # Return correct versions of Spark and Shark, given the supplied Spark version def get_spark_shark_version(opts): spark_shark_map = {"0.7.3": "0.7.1", "0.8.0": "0.8.0", "0.8.1": "0.8.1", "0.9.0": "0.9.0", - "0.9.1": "0.9.1", "1.0.0": "1.0.0", "1.0.1", "1.0.0"} + "0.9.1": "0.9.1", "1.0.0": "1.0.0", "1.0.1": "1.0.0"} version = opts.spark_version.replace("v", "") if version not in spark_shark_map: print >> stderr, "Don't know about Spark version: %s" % version @@ -173,6 +173,8 @@ def get_spark_shark_version(opts): # Attempt to resolve an appropriate AMI given the architecture and # region of the request. +# Information regarding Amazon Linux AMI instance type was updated on 2014-6-20: +# http://aws.amazon.com/amazon-linux-ami/instance-type-matrix/ def get_spark_ami(opts): instance_types = { "m1.small": "pvm", @@ -190,6 +192,8 @@ def get_spark_ami(opts): "cg1.4xlarge": "hvm", "hs1.8xlarge": "hvm", "hi1.4xlarge": "hvm", + "m3.medium": "hvm", + "m3.large": "hvm", "m3.xlarge": "hvm", "m3.2xlarge": "hvm", "cr1.8xlarge": "hvm", @@ -484,7 +488,8 @@ def wait_for_cluster(conn, wait_secs, master_nodes, slave_nodes): # Get number of local disks available for a given EC2 instance type. def get_num_disks(instance_type): - # From http://docs.amazonwebservices.com/AWSEC2/latest/UserGuide/index.html?InstanceStorage.html + # From http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html + # Updated 2014-6-20 disks_by_instance = { "m1.small": 1, "m1.medium": 1, @@ -502,8 +507,10 @@ def get_num_disks(instance_type): "hs1.8xlarge": 24, "cr1.8xlarge": 2, "hi1.4xlarge": 2, - "m3.xlarge": 0, - "m3.2xlarge": 0, + "m3.medium": 1, + "m3.large": 1, + "m3.xlarge": 2, + "m3.2xlarge": 2, "i2.xlarge": 1, "i2.2xlarge": 2, "i2.4xlarge": 4, @@ -517,7 +524,9 @@ def get_num_disks(instance_type): "r3.xlarge": 1, "r3.2xlarge": 1, "r3.4xlarge": 1, - "r3.8xlarge": 2 + "r3.8xlarge": 2, + "g2.2xlarge": 1, + "t1.micro": 0 } if instance_type in disks_by_instance: return disks_by_instance[instance_type]
