1#
2#
3# Licensed to the Apache Software Foundation (ASF) under one
4# or more contributor license agreements.  See the NOTICE file
5# distributed with this work for additional information
6# regarding copyright ownership.  The ASF licenses this file
7# to you under the Apache License, Version 2.0 (the
8# "License"); you may not use this file except in compliance
9# with the License.  You may obtain a copy of the License at
10#
11#     http://www.apache.org/licenses/LICENSE-2.0
12#
13# Unless required by applicable law or agreed to in writing, software
14# distributed under the License is distributed on an "AS IS" BASIS,
15# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16# See the License for the specific language governing permissions and
17# limitations under the License.
18#
19
20include Java
21
22# Wrapper for org.apache.hadoop.hbase.client.Table
23
24module Hbase
25  class Table
26    include HBaseConstants
27
28    @@thread_pool = nil
29
30    # Add the command 'name' to table s.t. the shell command also called via 'name'
31    # and has an internal method also called 'name'.
32    #
33    # e.g. name = scan, adds table.scan which calls Scan.scan
34    def self.add_shell_command(name)
35      self.add_command(name, name, name)
36    end
37
38    # add a named command to the table instance
39    #
40    # name - name of the command that should added to the table
41    #    (eg. sending 'scan' here would allow you to do table.scan)
42    # shell_command - name of the command in the shell
43    # internal_method_name - name of the method in the shell command to forward the call
44    def self.add_command(name, shell_command, internal_method_name)
45      method  = name.to_sym
46      self.class_eval do
47        define_method method do |*args|
48            @shell.internal_command(shell_command, internal_method_name, self, *args)
49         end
50      end
51    end
52
53    # General help for the table
54    # class level so we can call it from anywhere
55    def self.help
56      return <<-EOF
57Help for table-reference commands.
58
59You can either create a table via 'create' and then manipulate the table via commands like 'put', 'get', etc.
60See the standard help information for how to use each of these commands.
61
62However, as of 0.96, you can also get a reference to a table, on which you can invoke commands.
63For instance, you can get create a table and keep around a reference to it via:
64
65   hbase> t = create 't', 'cf'
66
67Or, if you have already created the table, you can get a reference to it:
68
69   hbase> t = get_table 't'
70
71You can do things like call 'put' on the table:
72
73  hbase> t.put 'r', 'cf:q', 'v'
74
75which puts a row 'r' with column family 'cf', qualifier 'q' and value 'v' into table t.
76
77To read the data out, you can scan the table:
78
79  hbase> t.scan
80
81which will read all the rows in table 't'.
82
83Essentially, any command that takes a table name can also be done via table reference.
84Other commands include things like: get, delete, deleteall,
85get_all_columns, get_counter, count, incr. These functions, along with
86the standard JRuby object methods are also available via tab completion.
87
88For more information on how to use each of these commands, you can also just type:
89
90   hbase> t.help 'scan'
91
92which will output more information on how to use that command.
93
94You can also do general admin actions directly on a table; things like enable, disable,
95flush and drop just by typing:
96
97   hbase> t.enable
98   hbase> t.flush
99   hbase> t.disable
100   hbase> t.drop
101
102Note that after dropping a table, your reference to it becomes useless and further usage
103is undefined (and not recommended).
104EOF
105      end
106
107    #---------------------------------------------------------------------------------------------
108
109    # let external objects read the underlying table object
110    attr_reader :table
111    # let external objects read the table name
112    attr_reader :name
113
114    def initialize(table, shell)
115      @table = table
116      @name = @table.getName().getNameAsString()
117      @shell = shell
118      @converters = Hash.new()
119    end
120
121    def close()
122      @table.close()
123    end
124
125    # Note the below methods are prefixed with '_' to hide them from the average user, as
126    # they will be much less likely to tab complete to the 'dangerous' internal method
127    #----------------------------------------------------------------------------------------------
128
129    # Put a cell 'value' at specified table/row/column
130    def _put_internal(row, column, value, timestamp = nil, args = {})
131      p = org.apache.hadoop.hbase.client.Put.new(row.to_s.to_java_bytes)
132      family, qualifier = parse_column_name(column)
133      if args.any?
134         attributes = args[ATTRIBUTES]
135         set_attributes(p, attributes) if attributes
136         visibility = args[VISIBILITY]
137         set_cell_visibility(p, visibility) if visibility
138         ttl = args[TTL]
139         set_op_ttl(p, ttl) if ttl
140      end
141      #Case where attributes are specified without timestamp
142      if timestamp.kind_of?(Hash)
143      	timestamp.each do |k, v|
144          if k == 'ATTRIBUTES'
145            set_attributes(p, v)
146          elsif k == 'VISIBILITY'
147            set_cell_visibility(p, v)
148          elsif k == "TTL"
149            set_op_ttl(p, v)
150          end
151        end
152        timestamp = nil
153      end
154      if timestamp
155        p.add(family, qualifier, timestamp, value.to_s.to_java_bytes)
156      else
157        p.add(family, qualifier, value.to_s.to_java_bytes)
158      end
159      @table.put(p)
160    end
161
162    #----------------------------------------------------------------------------------------------
163    # Delete a cell
164    def _delete_internal(row, column,
165    			timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {})
166      _deleteall_internal(row, column, timestamp, args)
167    end
168
169    #----------------------------------------------------------------------------------------------
170    # Delete a row
171    def _deleteall_internal(row, column = nil,
172    		timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {})
173      # delete operation doesn't need read permission. Retaining the read check for
174      # meta table as a part of HBASE-5837.
175      if is_meta_table?
176        raise ArgumentError, "Row Not Found" if _get_internal(row).nil?
177      end
178      temptimestamp = timestamp
179      if temptimestamp.kind_of?(Hash)
180      	  timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP
181      end
182      d = org.apache.hadoop.hbase.client.Delete.new(row.to_s.to_java_bytes, timestamp)
183      if temptimestamp.kind_of?(Hash)
184      	temptimestamp.each do |k, v|
185      	  if v.kind_of?(String)
186      	  	set_cell_visibility(d, v) if v
187      	  end
188      	 end
189      end
190      if args.any?
191         visibility = args[VISIBILITY]
192         set_cell_visibility(d, visibility) if visibility
193      end
194      if column
195        family, qualifier = parse_column_name(column)
196        d.deleteColumns(family, qualifier, timestamp)
197      end
198      @table.delete(d)
199    end
200
201    #----------------------------------------------------------------------------------------------
202    # Increment a counter atomically
203    def _incr_internal(row, column, value = nil, args={})
204      if value.kind_of?(Hash)
205      	value = 1
206      end
207      value ||= 1
208      incr = org.apache.hadoop.hbase.client.Increment.new(row.to_s.to_java_bytes)
209      family, qualifier = parse_column_name(column)
210      if qualifier.nil?
211	  	raise ArgumentError, "Failed to provide both column family and column qualifier for incr"
212      end
213      if args.any?
214      	attributes = args[ATTRIBUTES]
215      	visibility = args[VISIBILITY]
216        set_attributes(incr, attributes) if attributes
217        set_cell_visibility(incr, visibility) if visibility
218        ttl = args[TTL]
219        set_op_ttl(incr, ttl) if ttl
220      end
221      incr.addColumn(family, qualifier, value)
222      result = @table.increment(incr)
223      return nil if result.isEmpty
224
225      # Fetch cell value
226      cell = result.listCells[0]
227      org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue)
228    end
229
230    #----------------------------------------------------------------------------------------------
231    # appends the value atomically
232    def _append_internal(row, column, value, args={})
233      append = org.apache.hadoop.hbase.client.Append.new(row.to_s.to_java_bytes)
234      family, qualifier = parse_column_name(column)
235      if qualifier.nil?
236	  	raise ArgumentError, "Failed to provide both column family and column qualifier for append"
237      end
238      if args.any?
239      	attributes = args[ATTRIBUTES]
240      	visibility = args[VISIBILITY]
241        set_attributes(append, attributes) if attributes
242        set_cell_visibility(append, visibility) if visibility
243        ttl = args[TTL]
244        set_op_ttl(append, ttl) if ttl
245      end
246      append.add(family, qualifier, value.to_s.to_java_bytes)
247      @table.append(append)
248    end
249
250    #----------------------------------------------------------------------------------------------
251    # Count rows in a table
252    def _count_internal(interval = 1000, caching_rows = 10)
253      # We can safely set scanner caching with the first key only filter
254      scan = org.apache.hadoop.hbase.client.Scan.new
255      scan.setCacheBlocks(false)
256      scan.setCaching(caching_rows)
257      scan.setFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.new)
258
259      # Run the scanner
260      scanner = @table.getScanner(scan)
261      count = 0
262      iter = scanner.iterator
263
264      # Iterate results
265      while iter.hasNext
266        row = iter.next
267        count += 1
268        next unless (block_given? && count % interval == 0)
269        # Allow command modules to visualize counting process
270        yield(count,
271              org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow))
272      end
273
274      scanner.close()
275      # Return the counter
276      return count
277    end
278
279    #----------------------------------------------------------------------------------------------
280    # Get from table
281    def _get_internal(row, *args)
282      get = org.apache.hadoop.hbase.client.Get.new(row.to_s.to_java_bytes)
283      maxlength = -1
284      @converters.clear()
285
286      # Normalize args
287      args = args.first if args.first.kind_of?(Hash)
288      if args.kind_of?(String) || args.kind_of?(Array)
289        columns = [ args ].flatten.compact
290        args = { COLUMNS => columns }
291      end
292
293      #
294      # Parse arguments
295      #
296      unless args.kind_of?(Hash)
297        raise ArgumentError, "Failed parse of of #{args.inspect}, #{args.class}"
298      end
299
300      # Get maxlength parameter if passed
301      maxlength = args.delete(MAXLENGTH) if args[MAXLENGTH]
302      filter = args.delete(FILTER) if args[FILTER]
303      attributes = args[ATTRIBUTES]
304      authorizations = args[AUTHORIZATIONS]
305      consistency = args.delete(CONSISTENCY) if args[CONSISTENCY]
306      replicaId = args.delete(REGION_REPLICA_ID) if args[REGION_REPLICA_ID]
307      unless args.empty?
308        columns = args[COLUMN] || args[COLUMNS]
309        if args[VERSIONS]
310          vers = args[VERSIONS]
311        else
312          vers = 1
313        end
314        if columns
315          # Normalize types, convert string to an array of strings
316          columns = [ columns ] if columns.is_a?(String)
317
318          # At this point it is either an array or some unsupported stuff
319          unless columns.kind_of?(Array)
320            raise ArgumentError, "Failed parse column argument type #{args.inspect}, #{args.class}"
321          end
322
323          # Get each column name and add it to the filter
324          columns.each do |column|
325            family, qualifier = parse_column_name(column.to_s)
326            if qualifier
327              get.addColumn(family, qualifier)
328            else
329              get.addFamily(family)
330            end
331          end
332
333          # Additional params
334          get.setMaxVersions(vers)
335          get.setTimeStamp(args[TIMESTAMP]) if args[TIMESTAMP]
336          get.setTimeRange(args[TIMERANGE][0], args[TIMERANGE][1]) if args[TIMERANGE]
337        else
338          if attributes
339          	 set_attributes(get, attributes)
340          elsif authorizations
341          	 set_authorizations(get, authorizations)
342          else
343          	# May have passed TIMESTAMP and row only; wants all columns from ts.
344          	unless ts = args[TIMESTAMP] || tr = args[TIMERANGE]
345            	raise ArgumentError, "Failed parse of #{args.inspect}, #{args.class}"
346          	end
347          end
348
349          get.setMaxVersions(vers)
350          # Set the timestamp/timerange
351          get.setTimeStamp(ts.to_i) if args[TIMESTAMP]
352          get.setTimeRange(args[TIMERANGE][0], args[TIMERANGE][1]) if args[TIMERANGE]
353        end
354        set_attributes(get, attributes) if attributes
355        set_authorizations(get, authorizations) if authorizations
356      end
357
358      unless filter.class == String
359        get.setFilter(filter)
360      else
361        get.setFilter(
362          org.apache.hadoop.hbase.filter.ParseFilter.new.parseFilterString(filter.to_java_bytes))
363      end
364
365      get.setConsistency(org.apache.hadoop.hbase.client.Consistency.valueOf(consistency)) if consistency
366      get.setReplicaId(replicaId) if replicaId
367
368      # Call hbase for the results
369      result = @table.get(get)
370      return nil if result.isEmpty
371
372      # Print out results.  Result can be Cell or RowResult.
373      res = {}
374      result.list.each do |kv|
375        family = String.from_java_bytes(kv.getFamily)
376        qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(kv.getQualifier)
377
378        column = "#{family}:#{qualifier}"
379        value = to_string(column, kv, maxlength)
380
381        if block_given?
382          yield(column, value)
383        else
384          res[column] = value
385        end
386      end
387
388      # If block given, we've yielded all the results, otherwise just return them
389      return ((block_given?) ? nil : res)
390    end
391
392    #----------------------------------------------------------------------------------------------
393    # Fetches and decodes a counter value from hbase
394    def _get_counter_internal(row, column)
395      family, qualifier = parse_column_name(column.to_s)
396      # Format get request
397      get = org.apache.hadoop.hbase.client.Get.new(row.to_s.to_java_bytes)
398      get.addColumn(family, qualifier)
399      get.setMaxVersions(1)
400
401      # Call hbase
402      result = @table.get(get)
403      return nil if result.isEmpty
404
405      # Fetch cell value
406      cell = result.list[0]
407      org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue)
408    end
409
410    def _hash_to_scan(args)
411      if args.any?
412        enablemetrics = args["ALL_METRICS"].nil? ? false : args["ALL_METRICS"]
413        enablemetrics = enablemetrics || !args["METRICS"].nil?
414        filter = args["FILTER"]
415        startrow = args["STARTROW"] || ''
416        stoprow = args["STOPROW"]
417        rowprefixfilter = args["ROWPREFIXFILTER"]
418        timestamp = args["TIMESTAMP"]
419        columns = args["COLUMNS"] || args["COLUMN"] || []
420        # If CACHE_BLOCKS not set, then default 'true'.
421        cache_blocks = args["CACHE_BLOCKS"].nil? ? true: args["CACHE_BLOCKS"]
422        cache = args["CACHE"] || 0
423        reversed = args["REVERSED"] || false
424        versions = args["VERSIONS"] || 1
425        timerange = args[TIMERANGE]
426        raw = args["RAW"] || false
427        attributes = args[ATTRIBUTES]
428        authorizations = args[AUTHORIZATIONS]
429        consistency = args[CONSISTENCY]
430        # Normalize column names
431        columns = [columns] if columns.class == String
432        limit = args["LIMIT"] || -1
433        unless columns.kind_of?(Array)
434          raise ArgumentError.new("COLUMNS must be specified as a String or an Array")
435        end
436
437        scan = if stoprow
438          org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes, stoprow.to_java_bytes)
439        else
440          org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes)
441        end
442
443        # This will overwrite any startrow/stoprow settings
444        scan.setRowPrefixFilter(rowprefixfilter.to_java_bytes) if rowprefixfilter
445
446        columns.each do |c|
447          family, qualifier = parse_column_name(c.to_s)
448          if qualifier
449            scan.addColumn(family, qualifier)
450          else
451            scan.addFamily(family)
452          end
453        end
454
455        unless filter.class == String
456          scan.setFilter(filter)
457        else
458          scan.setFilter(
459            org.apache.hadoop.hbase.filter.ParseFilter.new.parseFilterString(filter.to_java_bytes))
460        end
461
462        scan.setScanMetricsEnabled(enablemetrics) if enablemetrics
463        scan.setTimeStamp(timestamp) if timestamp
464        scan.setCacheBlocks(cache_blocks)
465        scan.setReversed(reversed)
466        scan.setCaching(cache) if cache > 0
467        scan.setMaxVersions(versions) if versions > 1
468        scan.setTimeRange(timerange[0], timerange[1]) if timerange
469        scan.setRaw(raw)
470        scan.setCaching(limit) if limit > 0
471        set_attributes(scan, attributes) if attributes
472        set_authorizations(scan, authorizations) if authorizations
473        scan.setConsistency(org.apache.hadoop.hbase.client.Consistency.valueOf(consistency)) if consistency
474      else
475        scan = org.apache.hadoop.hbase.client.Scan.new
476      end
477
478      scan
479    end
480
481    def _get_scanner(args)
482      @table.getScanner(_hash_to_scan(args))
483    end
484
485    #----------------------------------------------------------------------------------------------
486    # Scans whole table or a range of keys and returns rows matching specific criteria
487    def _scan_internal(args = {}, scan = nil)
488      raise(ArgumentError, "Args should be a Hash") unless args.kind_of?(Hash)
489      raise(ArgumentError, "Scan argument should be org.apache.hadoop.hbase.client.Scan") \
490        unless scan == nil || scan.kind_of?(org.apache.hadoop.hbase.client.Scan)
491
492      limit = args["LIMIT"] || -1
493      maxlength = args.delete("MAXLENGTH") || -1
494      count = 0
495      res = {}
496
497      @converters.clear()
498
499      # Start the scanner
500      scan = scan == nil ? _hash_to_scan(args) : scan
501      scanner = @table.getScanner(scan)
502      iter = scanner.iterator
503
504      # Iterate results
505      while iter.hasNext
506        row = iter.next
507        key = org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow)
508
509        row.list.each do |kv|
510          family = String.from_java_bytes(kv.getFamily)
511          qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(kv.getQualifier)
512
513          column = "#{family}:#{qualifier}"
514          cell = to_string(column, kv, maxlength)
515
516          if block_given?
517            yield(key, "column=#{column}, #{cell}")
518          else
519            res[key] ||= {}
520            res[key][column] = cell
521          end
522        end
523
524        # One more row processed
525        count += 1
526        if limit > 0 && count >= limit
527          # If we reached the limit, exit before the next call to hasNext
528          break
529        end
530      end
531
532      scanner.close()
533      return ((block_given?) ? count : res)
534    end
535
536     # Apply OperationAttributes to puts/scans/gets
537    def set_attributes(oprattr, attributes)
538      raise(ArgumentError, "Attributes must be a Hash type") unless attributes.kind_of?(Hash)
539      for k,v in attributes
540        v = v.to_s unless v.nil?
541        oprattr.setAttribute(k.to_s, v.to_java_bytes)
542      end
543    end
544
545    def set_cell_permissions(op, permissions)
546      raise(ArgumentError, "Permissions must be a Hash type") unless permissions.kind_of?(Hash)
547      map = java.util.HashMap.new
548      permissions.each do |user,perms|
549        map.put(user.to_s, org.apache.hadoop.hbase.security.access.Permission.new(
550          perms.to_java_bytes))
551      end
552      op.setACL(map)
553    end
554
555    def set_cell_visibility(oprattr, visibility)
556      oprattr.setCellVisibility(
557        org.apache.hadoop.hbase.security.visibility.CellVisibility.new(
558          visibility.to_s))
559    end
560
561    def set_authorizations(oprattr, authorizations)
562      raise(ArgumentError, "Authorizations must be a Array type") unless authorizations.kind_of?(Array)
563      auths = [ authorizations ].flatten.compact
564      oprattr.setAuthorizations(
565        org.apache.hadoop.hbase.security.visibility.Authorizations.new(
566          auths.to_java(:string)))
567    end
568
569    def set_op_ttl(op, ttl)
570      op.setTTL(ttl.to_java(:long))
571    end
572
573    #----------------------------
574    # Add general administration utilities to the shell
575    # each of the names below adds this method name to the table
576    # by callling the corresponding method in the shell
577    # Add single method utilities to the current class
578    # Generally used for admin functions which just have one name and take the table name
579    def self.add_admin_utils(*args)
580      args.each do |method|
581        define_method method do |*method_args|
582          @shell.command(method, @name, *method_args)
583        end
584      end
585    end
586
587    #Add the following admin utilities to the table
588    add_admin_utils :enable, :disable, :flush, :drop, :describe, :snapshot
589
590    #----------------------------
591    #give the general help for the table
592    # or the named command
593    def help (command = nil)
594      #if there is a command, get the per-command help from the shell
595      if command
596        begin
597          return @shell.help_command(command)
598        rescue NoMethodError
599          puts "Command \'#{command}\' does not exist. Please see general table help."
600          return nil
601        end
602      end
603      return @shell.help('table_help')
604    end
605
606    # Table to string
607    def to_s
608      cl = self.class()
609      return "#{cl} - #{@name}"
610    end
611
612    # Standard ruby call to get the return value for an object
613    # overriden here so we get sane semantics for printing a table on return
614    def inspect
615      to_s
616    end
617
618    #----------------------------------------------------------------------------------------
619    # Helper methods
620
621    # Returns a list of column names in the table
622    def get_all_columns
623      @table.table_descriptor.getFamilies.map do |family|
624        "#{family.getNameAsString}:"
625      end
626    end
627
628    # Checks if current table is one of the 'meta' tables
629    def is_meta_table?
630      org.apache.hadoop.hbase.TableName::META_TABLE_NAME.equals(@table.getName())
631    end
632
633    # Returns family and (when has it) qualifier for a column name
634    def parse_column_name(column)
635      split = org.apache.hadoop.hbase.KeyValue.parseColumn(column.to_java_bytes)
636      set_converter(split) if split.length > 1
637      return split[0], (split.length > 1) ? split[1] : nil
638    end
639
640    # Make a String of the passed kv
641    # Intercept cells whose format we know such as the info:regioninfo in hbase:meta
642    def to_string(column, kv, maxlength = -1)
643      if is_meta_table?
644        if column == 'info:regioninfo' or column == 'info:splitA' or column == 'info:splitB'
645          hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValue)
646          return "timestamp=%d, value=%s" % [kv.getTimestamp, hri.toString]
647        end
648        if column == 'info:serverstartcode'
649          if kv.getValue.length > 0
650            str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValue)
651          else
652            str_val = org.apache.hadoop.hbase.util.Bytes.toStringBinary(kv.getValue)
653          end
654          return "timestamp=%d, value=%s" % [kv.getTimestamp, str_val]
655        end
656      end
657
658      if kv.isDelete
659        val = "timestamp=#{kv.getTimestamp}, type=#{org.apache.hadoop.hbase.KeyValue::Type::codeToType(kv.getType)}"
660      else
661        val = "timestamp=#{kv.getTimestamp}, value=#{convert(column, kv)}"
662      end
663      (maxlength != -1) ? val[0, maxlength] : val
664    end
665
666    def convert(column, kv)
667      #use org.apache.hadoop.hbase.util.Bytes as the default class
668      klazz_name = 'org.apache.hadoop.hbase.util.Bytes'
669      #use org.apache.hadoop.hbase.util.Bytes::toStringBinary as the default convertor
670      converter = 'toStringBinary'
671      if @converters.has_key?(column)
672        # lookup the CONVERTER for certain column - "cf:qualifier"
673        matches = /c\((.+)\)\.(.+)/.match(@converters[column])
674        if matches.nil?
675          # cannot match the pattern of 'c(className).functionname'
676          # use the default klazz_name
677          converter = @converters[column]
678        else
679          klazz_name = matches[1]
680          converter = matches[2]
681        end
682      end
683      method = eval(klazz_name).method(converter)
684      return method.call(kv.getValue) # apply the converter
685    end
686
687    # if the column spec contains CONVERTER information, to get rid of :CONVERTER info from column pair.
688    # 1. return back normal column pair as usual, i.e., "cf:qualifier[:CONVERTER]" to "cf" and "qualifier" only
689    # 2. register the CONVERTER information based on column spec - "cf:qualifier"
690    def set_converter(column)
691      family = String.from_java_bytes(column[0])
692      parts = org.apache.hadoop.hbase.KeyValue.parseColumn(column[1])
693      if parts.length > 1
694        @converters["#{family}:#{String.from_java_bytes(parts[0])}"] = String.from_java_bytes(parts[1])
695        column[1] = parts[0]
696      end
697    end
698
699    #----------------------------------------------------------------------------------------------
700    # Get the split points for the table
701    def _get_splits_internal()
702      locator = @table.getRegionLocator()
703      splits = locator.getAllRegionLocations().
704          map{|i| Bytes.toStringBinary(i.getRegionInfo().getStartKey)}.delete_if{|k| k == ""}
705      locator.close()
706      puts("Total number of splits = %s" % [splits.size + 1])
707      return splits
708    end
709  end
710end
711