Source: vertx-kafka-client-js/kafka_consumer.js

/*
 * Copyright 2014 Red Hat, Inc.
 *
 * Red Hat licenses this file to you under the Apache License, version 2.0
 * (the "License"); you may not use this file except in compliance with the
 * License.  You may obtain a copy of the License at:
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
 * License for the specific language governing permissions and limitations
 * under the License.
 */

/** @module vertx-kafka-client-js/kafka_consumer */
var utils = require('vertx-js/util/utils');
var Pipe = require('vertx-js/pipe');
var KafkaConsumerRecord = require('vertx-kafka-client-js/kafka_consumer_record');
var WriteStream = require('vertx-js/write_stream');
var Vertx = require('vertx-js/vertx');
var ReadStream = require('vertx-js/read_stream');
var KafkaConsumerRecords = require('vertx-kafka-client-js/kafka_consumer_records');
var Future = require('vertx-js/future');
var Promise = require('vertx-js/promise');

var io = Packages.io;
var JsonObject = io.vertx.core.json.JsonObject;
var JsonArray = io.vertx.core.json.JsonArray;
var JKafkaConsumer = Java.type('io.vertx.kafka.client.consumer.KafkaConsumer');
var OffsetAndMetadata = Java.type('io.vertx.kafka.client.consumer.OffsetAndMetadata');
var PartitionInfo = Java.type('io.vertx.kafka.client.common.PartitionInfo');
var OffsetAndTimestamp = Java.type('io.vertx.kafka.client.consumer.OffsetAndTimestamp');
var TopicPartition = Java.type('io.vertx.kafka.client.common.TopicPartition');

/**
 Vert.x Kafka consumer.
 <p>
 @class
*/
var KafkaConsumer = function(j_val, j_arg_0, j_arg_1) {

  var j_kafkaConsumer = j_val;
  var that = this;
  var j_K = typeof j_arg_0 !== 'undefined' ? j_arg_0 : utils.unknown_jtype;  var j_V = typeof j_arg_1 !== 'undefined' ? j_arg_1 : utils.unknown_jtype;ReadStream.call(this, j_val);

  var __super_pipe = this.pipe;
  var __super_pipeTo = this.pipeTo;
  var __super_create = this.create;
  var __super_create = this.create;
  var __super_exceptionHandler = this.exceptionHandler;
  var __super_handler = this.handler;
  var __super_pause = this.pause;
  var __super_resume = this.resume;
  var __super_fetch = this.fetch;
  var __super_endHandler = this.endHandler;
  var __super_subscribe = this.subscribe;
  var __super_subscribe = this.subscribe;
  var __super_assign = this.assign;
  var __super_assign = this.assign;
  var __super_assignment = this.assignment;
  var __super_unsubscribe = this.unsubscribe;
  var __super_subscription = this.subscription;
  var __super_pause = this.pause;
  var __super_pause = this.pause;
  var __super_paused = this.paused;
  var __super_resume = this.resume;
  var __super_resume = this.resume;
  var __super_partitionsRevokedHandler = this.partitionsRevokedHandler;
  var __super_partitionsAssignedHandler = this.partitionsAssignedHandler;
  var __super_seek = this.seek;
  var __super_seekToBeginning = this.seekToBeginning;
  var __super_seekToBeginning = this.seekToBeginning;
  var __super_seekToEnd = this.seekToEnd;
  var __super_seekToEnd = this.seekToEnd;
  var __super_commit = this.commit;
  var __super_committed = this.committed;
  var __super_partitionsFor = this.partitionsFor;
  var __super_batchHandler = this.batchHandler;
  var __super_close = this.close;
  var __super_position = this.position;
  var __super_offsetsForTimes = this.offsetsForTimes;
  var __super_beginningOffsets = this.beginningOffsets;
  var __super_endOffsets = this.endOffsets;
  var __super_pollTimeout = this.pollTimeout;
  var __super_poll = this.poll;
  /**
   Pause this stream and return a  to transfer the elements of this stream to a destination .
   <p/>
   The stream will be resumed when the pipe will be wired to a <code>WriteStream</code>.

   @public

   @return {Pipe} a pipe
   */
  this.pipe =  function() {
    var __args = arguments;
    if (__args.length === 0) {
      return utils.convReturnVertxGen(Pipe, j_kafkaConsumer["pipe()"](), KafkaConsumerRecord._jtype) ;
    } else if (typeof __super_pipe != 'undefined') {
      return __super_pipe.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Pipe this <code>ReadStream</code> to the <code>WriteStream</code>.
   <p>
   Elements emitted by this stream will be written to the write stream until this stream ends or fails.
   <p>
   Once this stream has ended or failed, the write stream will be ended and the <code>handler</code> will be
   called with the result.

   @public
   @param dst {WriteStream} the destination write stream 
   @param handler {function} 
   */
  this.pipeTo =  function(dst, handler) {
    var __args = arguments;
    if (__args.length === 2 && typeof __args[0] === 'object' && __args[0]._jdel && typeof __args[1] === 'function') {
      j_kafkaConsumer["pipeTo(io.vertx.core.streams.WriteStream,io.vertx.core.Handler)"](dst._jdel, function(ar) {
        if (ar.succeeded()) {
          handler(null, null);
        } else {
          handler(null, ar.cause());
        }
      });
    } else if (__args.length === 1 && typeof __args[0] === 'object' && __args[0]._jdel) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["pipeTo(io.vertx.core.streams.WriteStream,io.vertx.core.Handler)"](dst._jdel, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_pipeTo != 'undefined') {
      return __super_pipeTo.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**

   @public
   @param handler {function} 
   @return {KafkaConsumer}
   */
  this.exceptionHandler =  function(handler) {
    var __args = arguments;
    if (__args.length === 1 && (typeof __args[0] === 'function' || __args[0] == null)) {
      j_kafkaConsumer["exceptionHandler(io.vertx.core.Handler)"](handler == null ? null : function(jVal) {
        handler(utils.convReturnThrowable(jVal));
      }) ;
      return that;
    } else if (typeof __super_exceptionHandler != 'undefined') {
      return __super_exceptionHandler.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**

   @public
   @param handler {function} 
   @return {KafkaConsumer}
   */
  this.handler =  function(handler) {
    var __args = arguments;
    if (__args.length === 1 && (typeof __args[0] === 'function' || __args[0] == null)) {
      j_kafkaConsumer["handler(io.vertx.core.Handler)"](handler == null ? null : function(jVal) {
        handler(utils.convReturnVertxGen(KafkaConsumerRecord, jVal, undefined, undefined));
      }) ;
      return that;
    } else if (typeof __super_handler != 'undefined') {
      return __super_handler.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Suspend fetching from the requested partitions.
   <p>
   Due to internal buffering of messages,
   the  will
   continue to observe messages from the given <code>topicPartitions</code>
   until some time <em>after</em> the given <code>completionHandler</code>
   is called. In contrast, the once the given <code>completionHandler</code>
   is called the {@link KafkaConsumer#batchHandler} will not see messages
   from the given <code>topicPartitions</code>.

   @public
   @param topicPartitions {Array.<Object>} topic partition from which suspend fetching 
   @param completionHandler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.pause =  function() {
    var __args = arguments;
    if (__args.length === 0) {
      j_kafkaConsumer["pause()"]() ;
      return that;
    } else if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["pause(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["pause(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (__args.length === 2 && typeof __args[0] === 'object' && __args[0] instanceof Array && typeof __args[1] === 'function') {
      j_kafkaConsumer["pause(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && typeof __args[0] === 'object' && __args[0] instanceof Array) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["pause(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_pause != 'undefined') {
      return __super_pause.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Resume specified partitions which have been paused with pause.

   @public
   @param topicPartitions {Array.<Object>} topic partition from which resume fetching 
   @param completionHandler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.resume =  function() {
    var __args = arguments;
    if (__args.length === 0) {
      j_kafkaConsumer["resume()"]() ;
      return that;
    } else if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["resume(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["resume(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (__args.length === 2 && typeof __args[0] === 'object' && __args[0] instanceof Array && typeof __args[1] === 'function') {
      j_kafkaConsumer["resume(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && typeof __args[0] === 'object' && __args[0] instanceof Array) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["resume(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_resume != 'undefined') {
      return __super_resume.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**

   @public
   @param amount {number} 
   @return {KafkaConsumer}
   */
  this.fetch =  function(amount) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] ==='number') {
      j_kafkaConsumer["fetch(long)"](amount) ;
      return that;
    } else if (typeof __super_fetch != 'undefined') {
      return __super_fetch.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**

   @public
   @param endHandler {function} 
   @return {KafkaConsumer}
   */
  this.endHandler =  function(endHandler) {
    var __args = arguments;
    if (__args.length === 1 && (typeof __args[0] === 'function' || __args[0] == null)) {
      j_kafkaConsumer["endHandler(io.vertx.core.Handler)"](endHandler) ;
      return that;
    } else if (typeof __super_endHandler != 'undefined') {
      return __super_endHandler.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Subscribe to the given list of topics to get dynamically assigned partitions.
   <p>
   Due to internal buffering of messages, when changing the subscribed topics
   the old set of topics may remain in effect
   (as observed by the  record handler})
   until some time <em>after</em> the given <code>completionHandler</code>
   is called. In contrast, the once the given <code>completionHandler</code>
   is called the {@link KafkaConsumer#batchHandler} will only see messages
   consistent with the new set of topics.

   @public
   @param topics {Array.<string>} topics to subscribe to 
   @param completionHandler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.subscribe =  function() {
    var __args = arguments;
    if (__args.length === 2 && typeof __args[0] === 'string' && typeof __args[1] === 'function') {
      j_kafkaConsumer["subscribe(java.lang.String,io.vertx.core.Handler)"](__args[0], function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && typeof __args[0] === 'string') {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["subscribe(java.lang.String,io.vertx.core.Handler)"](__args[0], function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (__args.length === 2 && typeof __args[0] === 'object' && __args[0] instanceof Array && typeof __args[1] === 'function') {
      j_kafkaConsumer["subscribe(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetBasicOther(__args[0]), function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && typeof __args[0] === 'object' && __args[0] instanceof Array) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["subscribe(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetBasicOther(__args[0]), function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_subscribe != 'undefined') {
      return __super_subscribe.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Manually assign a list of partition to this consumer.
   <p>
   Due to internal buffering of messages, when reassigning
   the old set of partitions may remain in effect
   (as observed by the  record handler)}
   until some time <em>after</em> the given <code>completionHandler</code>
   is called. In contrast, the once the given <code>completionHandler</code>
   is called the {@link KafkaConsumer#batchHandler} will only see messages
   consistent with the new set of partitions.

   @public
   @param topicPartitions {Array.<Object>} partitions which want assigned 
   @param completionHandler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.assign =  function() {
    var __args = arguments;
    if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["assign(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["assign(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (__args.length === 2 && typeof __args[0] === 'object' && __args[0] instanceof Array && typeof __args[1] === 'function') {
      j_kafkaConsumer["assign(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && typeof __args[0] === 'object' && __args[0] instanceof Array) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["assign(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_assign != 'undefined') {
      return __super_assign.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Get the set of partitions currently assigned to this consumer.

   @public
   @param handler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.assignment =  function(handler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["assignment(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnListSetDataObjectAnnotated(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 0) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["assignment(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnListSetDataObjectAnnotated(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_assignment != 'undefined') {
      return __super_assignment.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Unsubscribe from topics currently subscribed with subscribe.

   @public
   @param completionHandler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.unsubscribe =  function(completionHandler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["unsubscribe(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          completionHandler(null, null);
        } else {
          completionHandler(null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 0) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["unsubscribe(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_unsubscribe != 'undefined') {
      return __super_unsubscribe.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Get the current subscription.

   @public
   @param handler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.subscription =  function(handler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["subscription(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnSet(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 0) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["subscription(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnSet(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_subscription != 'undefined') {
      return __super_subscription.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Get the set of partitions that were previously paused by a call to pause(Set).

   @public
   @param handler {function} handler called on operation completed 
   */
  this.paused =  function(handler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["paused(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnListSetDataObjectAnnotated(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      });
    } else if (__args.length === 0) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["paused(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnListSetDataObjectAnnotated(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_paused != 'undefined') {
      return __super_paused.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Set the handler called when topic partitions are revoked to the consumer

   @public
   @param handler {function} handler called on revoked topic partitions 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.partitionsRevokedHandler =  function(handler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["partitionsRevokedHandler(io.vertx.core.Handler)"](function(jVal) {
        handler(utils.convReturnListSetDataObjectAnnotated(jVal));
      }) ;
      return that;
    } else if (typeof __super_partitionsRevokedHandler != 'undefined') {
      return __super_partitionsRevokedHandler.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Set the handler called when topic partitions are assigned to the consumer

   @public
   @param handler {function} handler called on assigned topic partitions 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.partitionsAssignedHandler =  function(handler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["partitionsAssignedHandler(io.vertx.core.Handler)"](function(jVal) {
        handler(utils.convReturnListSetDataObjectAnnotated(jVal));
      }) ;
      return that;
    } else if (typeof __super_partitionsAssignedHandler != 'undefined') {
      return __super_partitionsAssignedHandler.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Overrides the fetch offsets that the consumer will use on the next poll.
   <p>
   Due to internal buffering of messages,
   the  will
   continue to observe messages fetched with respect to the old offset
   until some time <em>after</em> the given <code>completionHandler</code>
   is called. In contrast, the once the given <code>completionHandler</code>
   is called the {@link KafkaConsumer#batchHandler} will only see messages
   consistent with the new offset.

   @public
   @param topicPartition {Object} topic partition for which seek 
   @param offset {number} offset to seek inside the topic partition 
   @param completionHandler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.seek =  function(topicPartition, offset, completionHandler) {
    var __args = arguments;
    if (__args.length === 3 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] ==='number' && typeof __args[2] === 'function') {
      j_kafkaConsumer["seek(io.vertx.kafka.client.common.TopicPartition,long,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, offset, function(ar) {
        if (ar.succeeded()) {
          completionHandler(null, null);
        } else {
          completionHandler(null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] ==='number') {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["seek(io.vertx.kafka.client.common.TopicPartition,long,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, offset, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_seek != 'undefined') {
      return __super_seek.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Seek to the first offset for each of the given partitions.
   <p>
   Due to internal buffering of messages,
   the  will
   continue to observe messages fetched with respect to the old offset
   until some time <em>after</em> the given <code>completionHandler</code>
   is called. In contrast, the once the given <code>completionHandler</code>
   is called the {@link KafkaConsumer#batchHandler} will only see messages
   consistent with the new offset.

   @public
   @param topicPartitions {Array.<Object>} topic partition for which seek 
   @param completionHandler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.seekToBeginning =  function() {
    var __args = arguments;
    if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["seekToBeginning(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["seekToBeginning(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (__args.length === 2 && typeof __args[0] === 'object' && __args[0] instanceof Array && typeof __args[1] === 'function') {
      j_kafkaConsumer["seekToBeginning(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && typeof __args[0] === 'object' && __args[0] instanceof Array) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["seekToBeginning(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_seekToBeginning != 'undefined') {
      return __super_seekToBeginning.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Seek to the last offset for each of the given partitions.
   <p>
   Due to internal buffering of messages,
   the  will
   continue to observe messages fetched with respect to the old offset
   until some time <em>after</em> the given <code>completionHandler</code>
   is called. In contrast, the once the given <code>completionHandler</code>
   is called the {@link KafkaConsumer#batchHandler} will only see messages
   consistent with the new offset.

   @public
   @param topicPartitions {Array.<Object>} topic partition for which seek 
   @param completionHandler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.seekToEnd =  function() {
    var __args = arguments;
    if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["seekToEnd(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["seekToEnd(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](__args[0]  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(__args[0]))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (__args.length === 2 && typeof __args[0] === 'object' && __args[0] instanceof Array && typeof __args[1] === 'function') {
      j_kafkaConsumer["seekToEnd(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __args[1](null, null);
        } else {
          __args[1](null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && typeof __args[0] === 'object' && __args[0] instanceof Array) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["seekToEnd(java.util.Set,io.vertx.core.Handler)"](utils.convParamSetDataObjectAnnotated(__args[0], function(json) { return new TopicPartition(json); }), function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_seekToEnd != 'undefined') {
      return __super_seekToEnd.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Commit current offsets for all the subscribed list of topics and partition.

   @public
   @param completionHandler {function} handler called on operation completed 
   */
  this.commit =  function(completionHandler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["commit(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          completionHandler(null, null);
        } else {
          completionHandler(null, ar.cause());
        }
      });
    } else if (__args.length === 0) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["commit(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_commit != 'undefined') {
      return __super_commit.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Get the last committed offset for the given partition (whether the commit happened by this process or another).

   @public
   @param topicPartition {Object} topic partition for getting last committed offset 
   @param handler {function} handler called on operation completed 
   */
  this.committed =  function(topicPartition, handler) {
    var __args = arguments;
    if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["committed(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnDataObjectAnnotated(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      });
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["committed(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnDataObjectAnnotated(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_committed != 'undefined') {
      return __super_committed.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Get metadata about the partitions for a given topic.

   @public
   @param topic {string} topic partition for which getting partitions info 
   @param handler {function} handler called on operation completed 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.partitionsFor =  function(topic, handler) {
    var __args = arguments;
    if (__args.length === 2 && typeof __args[0] === 'string' && typeof __args[1] === 'function') {
      j_kafkaConsumer["partitionsFor(java.lang.String,io.vertx.core.Handler)"](topic, function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnListSetDataObjectAnnotated(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      }) ;
      return that;
    } else if (__args.length === 1 && typeof __args[0] === 'string') {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["partitionsFor(java.lang.String,io.vertx.core.Handler)"](topic, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnListSetDataObjectAnnotated(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_partitionsFor != 'undefined') {
      return __super_partitionsFor.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Set the handler to be used when batches of messages are fetched
   from the Kafka server. Batch handlers need to take care not to block
   the event loop when dealing with large batches. It is better to process
   records individually using the [#handler(Handler) record handler] {@link KafkaConsumer}.

   @public
   @param handler {function} handler called when batches of messages are fetched 
   @return {KafkaConsumer} current KafkaConsumer instance
   */
  this.batchHandler =  function(handler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["batchHandler(io.vertx.core.Handler)"](function(jVal) {
        handler(utils.convReturnVertxGen(KafkaConsumerRecords, jVal, undefined, undefined));
      }) ;
      return that;
    } else if (typeof __super_batchHandler != 'undefined') {
      return __super_batchHandler.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Close the consumer

   @public
   @param completionHandler {function} handler called on operation completed 
   */
  this.close =  function(completionHandler) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] === 'function') {
      j_kafkaConsumer["close(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          completionHandler(null, null);
        } else {
          completionHandler(null, ar.cause());
        }
      });
    } else if (__args.length === 0) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["close(io.vertx.core.Handler)"](function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(null, null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_close != 'undefined') {
      return __super_close.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Get the offset of the next record that will be fetched (if a record with that offset exists).

   @public
   @param partition {Object} The partition to get the position for 
   @param handler {function} handler called on operation completed 
   */
  this.position =  function(partition, handler) {
    var __args = arguments;
    if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["position(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](partition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(partition))) : null, function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnLong(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      });
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["position(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](partition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(partition))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnLong(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_position != 'undefined') {
      return __super_position.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Look up the offset for the given partition by timestamp. Note: the result might be null in case
   for the given timestamp no offset can be found -- e.g., when the timestamp refers to the future

   @public
   @param topicPartition {Object} TopicPartition to query. 
   @param timestamp {number} Timestamp to be used in the query. 
   @param handler {function} handler called on operation completed 
   */
  this.offsetsForTimes =  function(topicPartition, timestamp, handler) {
    var __args = arguments;
    if (__args.length === 3 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] ==='number' && typeof __args[2] === 'function') {
      j_kafkaConsumer["offsetsForTimes(io.vertx.kafka.client.common.TopicPartition,java.lang.Long,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, utils.convParamLong(timestamp), function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnDataObjectAnnotated(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      });
    } else if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] ==='number') {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["offsetsForTimes(io.vertx.kafka.client.common.TopicPartition,java.lang.Long,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, utils.convParamLong(timestamp), function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnDataObjectAnnotated(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_offsetsForTimes != 'undefined') {
      return __super_offsetsForTimes.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Get the first offset for the given partitions.

   @public
   @param topicPartition {Object} the partition to get the earliest offset. 
   @param handler {function} handler called on operation completed. Returns the earliest available offset for the given partition 
   */
  this.beginningOffsets =  function(topicPartition, handler) {
    var __args = arguments;
    if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["beginningOffsets(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnLong(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      });
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["beginningOffsets(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnLong(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_beginningOffsets != 'undefined') {
      return __super_beginningOffsets.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Get the last offset for the given partition. The last offset of a partition is the offset
   of the upcoming message, i.e. the offset of the last available message + 1.

   @public
   @param topicPartition {Object} the partition to get the end offset. 
   @param handler {function} handler called on operation completed. The end offset for the given partition. 
   */
  this.endOffsets =  function(topicPartition, handler) {
    var __args = arguments;
    if (__args.length === 2 && (typeof __args[0] === 'object' && __args[0] != null) && typeof __args[1] === 'function') {
      j_kafkaConsumer["endOffsets(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnLong(ar.result()), null);
        } else {
          handler(null, ar.cause());
        }
      });
    } else if (__args.length === 1 && (typeof __args[0] === 'object' && __args[0] != null)) {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["endOffsets(io.vertx.kafka.client.common.TopicPartition,io.vertx.core.Handler)"](topicPartition  != null ? new TopicPartition(new JsonObject(Java.asJSONCompatible(topicPartition))) : null, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnLong(ar.result()), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_endOffsets != 'undefined') {
      return __super_endOffsets.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Sets the poll timeout (in ms) for the underlying native Kafka Consumer. Defaults to 1000.
   Setting timeout to a lower value results in a more 'responsive' client, because it will block for a shorter period
   if no data is available in the assigned partition and therefore allows subsequent actions to be executed with a shorter
   delay. At the same time, the client will poll more frequently and thus will potentially create a higher load on the Kafka Broker.

   @public
   @param timeout {number} The time, in milliseconds, spent waiting in poll if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the native Kafka consumer's buffer, else returns empty. Must not be negative. 
   @return {KafkaConsumer}
   */
  this.pollTimeout =  function(timeout) {
    var __args = arguments;
    if (__args.length === 1 && typeof __args[0] ==='number') {
      j_kafkaConsumer["pollTimeout(long)"](timeout) ;
      return that;
    } else if (typeof __super_pollTimeout != 'undefined') {
      return __super_pollTimeout.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  /**
   Executes a poll for getting messages from Kafka

   @public
   @param timeout {number} The time, in milliseconds, spent waiting in poll if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the native Kafka consumer's buffer, else returns empty. Must not be negative. 
   @param handler {function} handler called after the poll with batch of records (can be empty). 
   */
  this.poll =  function(timeout, handler) {
    var __args = arguments;
    if (__args.length === 2 && typeof __args[0] ==='number' && typeof __args[1] === 'function') {
      j_kafkaConsumer["poll(long,io.vertx.core.Handler)"](timeout, function(ar) {
        if (ar.succeeded()) {
          handler(utils.convReturnVertxGen(KafkaConsumerRecords, ar.result(), undefined, undefined), null);
        } else {
          handler(null, ar.cause());
        }
      });
    } else if (__args.length === 1 && typeof __args[0] ==='number') {
      var __prom = Promise.promise();
      var __prom_completer_handler = function (result, cause) { if (cause === null) { __prom.complete(result); } else { __prom.fail(cause); } };
      j_kafkaConsumer["poll(long,io.vertx.core.Handler)"](timeout, function(ar) {
        if (ar.succeeded()) {
          __prom_completer_handler(utils.convReturnVertxGen(KafkaConsumerRecords, ar.result(), undefined, undefined), null);
        } else {
          __prom_completer_handler(null, ar.cause());
        }
      });
      return __prom.future();
    } else if (typeof __super_poll != 'undefined') {
      return __super_poll.apply(this, __args);
    }
    else throw new TypeError('function invoked with invalid arguments');
  };

  // A reference to the underlying Java delegate
  // NOTE! This is an internal API and must not be used in user code.
  // If you rely on this property your code is likely to break if we change it / remove it without warning.
  this._jdel = j_kafkaConsumer;
};

KafkaConsumer._jclass = utils.getJavaClass("io.vertx.kafka.client.consumer.KafkaConsumer");
KafkaConsumer._jtype = {accept: function(obj) {
    return KafkaConsumer._jclass.isInstance(obj._jdel);
  },wrap: function(jdel) {
    var obj = Object.create(KafkaConsumer.prototype, {});
    KafkaConsumer.apply(obj, arguments);
    return obj;
  },
  unwrap: function(obj) {
    return obj._jdel;
  }
};
KafkaConsumer._create = function(jdel) {var obj = Object.create(KafkaConsumer.prototype, {});
  KafkaConsumer.apply(obj, arguments);
  return obj;
}
/**
 Create a new KafkaConsumer instance

 @memberof module:vertx-kafka-client-js/kafka_consumer
 @param vertx {Vertx} Vert.x instance to use 
 @param config {Object.<string, string>} Kafka consumer configuration 
 @param keyType {todo} class type for the key deserialization 
 @param valueType {todo} class type for the value deserialization 
 @return {KafkaConsumer} an instance of the KafkaConsumer
 */
KafkaConsumer.create =  function() {
  var __args = arguments;
  if (__args.length === 2 && typeof __args[0] === 'object' && __args[0]._jdel && (typeof __args[1] === 'object' && __args[1] != null)) {
    return utils.convReturnVertxGen(KafkaConsumer, JKafkaConsumer["create(io.vertx.core.Vertx,java.util.Map)"](__args[0]._jdel, __args[1]), undefined, undefined) ;
  } else if (__args.length === 4 && typeof __args[0] === 'object' && __args[0]._jdel && (typeof __args[1] === 'object' && __args[1] != null) && typeof __args[2] === 'function' && typeof __args[3] === 'function') {
    return utils.convReturnVertxGen(KafkaConsumer, JKafkaConsumer["create(io.vertx.core.Vertx,java.util.Map,java.lang.Class,java.lang.Class)"](__args[0]._jdel, __args[1], utils.get_jclass(__args[2]), utils.get_jclass(__args[3])), utils.get_jtype(__args[2]), utils.get_jtype(__args[3])) ;
  }else throw new TypeError('function invoked with invalid arguments');
};

module.exports = KafkaConsumer;