From 6f88fc64aff545eefe74057d4aca63bc6820256f Mon Sep 17 00:00:00 2001 From: nick Date: Sun, 11 Apr 2021 01:08:20 +0200 Subject: [PATCH 1/8] save work --- .circleci/config.yml | 52 + .gitignore | 7 + LICENSE | 31 + Makefile | 65 ++ README.md | 278 +++++- UPGRADE.md | 0 composer.json | 65 ++ docker/.env | 1 + docker/dev/php/Dockerfile | 41 + docker/dev/php/files/bin/php-ext-disable | 11 + docker/dev/php/files/bin/php-ext-enable | 20 + docker/dev/php/files/php/20-pcov.ini | 1 + docker/docker-compose.yml | 13 + infection.json | 18 + phpcs.xml | 8 + phpstan.neon | 3 + phpunit.xml | 41 + .../KafkaConsumerRebalanceCallback.php | 42 + src/Callback/KafkaErrorCallback.php | 35 + .../KafkaProducerDeliveryReportCallback.php | 35 + src/Configuration/KafkaConfiguration.php | 81 ++ src/Consumer/AbstractKafkaConsumer.php | 244 +++++ src/Consumer/KafkaConsumerBuilder.php | 314 ++++++ .../KafkaConsumerBuilderInterface.php | 125 +++ src/Consumer/KafkaConsumerInterface.php | 108 +++ src/Consumer/KafkaHighLevelConsumer.php | 306 ++++++ .../KafkaHighLevelConsumerInterface.php | 58 ++ src/Consumer/TopicSubscription.php | 72 ++ src/Consumer/TopicSubscriptionInterface.php | 30 + src/Exception/AvroEncoderException.php | 11 + src/Exception/AvroSchemaRegistryException.php | 12 + src/Exception/KafkaBrokerException.php | 9 + .../KafkaConsumerAssignmentException.php | 9 + .../KafkaConsumerBuilderException.php | 12 + .../KafkaConsumerCommitException.php | 9 + .../KafkaConsumerConsumeException.php | 43 + .../KafkaConsumerEndOfPartitionException.php | 10 + .../KafkaConsumerRequestException.php | 9 + .../KafkaConsumerSubscriptionException.php | 11 + .../KafkaConsumerTimeoutException.php | 10 + src/Exception/KafkaMessageException.php | 13 + src/Exception/KafkaProducerException.php | 13 + ...KafkaProducerTransactionAbortException.php | 11 + ...KafkaProducerTransactionFatalException.php | 11 + ...KafkaProducerTransactionRetryException.php | 10 + src/Exception/KafkaRebalanceException.php | 10 + src/Message/AbstractKafkaMessage.php | 74 ++ src/Message/Decoder/AvroDecoder.php | 111 +++ src/Message/Decoder/AvroDecoderInterface.php | 15 + src/Message/Decoder/DecoderInterface.php | 16 + src/Message/Decoder/JsonDecoder.php | 31 + src/Message/Decoder/NullDecoder.php | 20 + src/Message/Encoder/AvroEncoder.php | 134 +++ src/Message/Encoder/AvroEncoderInterface.php | 15 + src/Message/Encoder/EncoderInterface.php | 16 + src/Message/Encoder/JsonEncoder.php | 22 + src/Message/Encoder/NullEncoder.php | 20 + src/Message/KafkaAvroSchema.php | 73 ++ src/Message/KafkaAvroSchemaInterface.php | 32 + src/Message/KafkaConsumerMessage.php | 63 ++ src/Message/KafkaConsumerMessageInterface.php | 19 + src/Message/KafkaMessageInterface.php | 36 + src/Message/KafkaProducerMessage.php | 82 ++ src/Message/KafkaProducerMessageInterface.php | 41 + src/Message/Registry/AvroSchemaRegistry.php | 147 +++ .../Registry/AvroSchemaRegistryInterface.php | 64 ++ src/Producer/KafkaProducer.php | 273 ++++++ src/Producer/KafkaProducerBuilder.php | 192 ++++ .../KafkaProducerBuilderInterface.php | 60 ++ src/Producer/KafkaProducerInterface.php | 113 +++ .../KafkaConsumerRebalanceCallbackTest.php | 94 ++ .../Unit/Callback/KafkaErrorCallbackTest.php | 28 + ...afkaProducerDeliveryReportCallbackTest.php | 53 + tests/Unit/Conf/KafkaConfigurationTest.php | 115 +++ .../Consumer/KafkaConsumerBuilderTest.php | 336 +++++++ .../Consumer/KafkaHighLevelConsumerTest.php | 905 ++++++++++++++++++ tests/Unit/Consumer/TopicSubscriptionTest.php | 33 + .../KafkaConsumerConsumeExceptionTest.php | 45 + .../Unit/Message/Decoder/AvroDecoderTest.php | 161 ++++ .../Unit/Message/Decoder/JsonDecoderTest.php | 44 + .../Unit/Message/Decoder/NullDecoderTest.php | 26 + .../Unit/Message/Encoder/AvroEncoderTest.php | 172 ++++ .../Unit/Message/Encoder/JsonEncoderTest.php | 44 + .../Unit/Message/Encoder/NullEncoderTest.php | 26 + tests/Unit/Message/KafkaAvroSchemaTest.php | 54 ++ .../Unit/Message/KafkaConsumerMessageTest.php | 44 + .../Unit/Message/KafkaProducerMessageTest.php | 64 ++ .../Registry/AvroSchemaRegistryTest.php | 183 ++++ .../Producer/KafkaProducerBuilderTest.php | 170 ++++ tests/Unit/Producer/KafkaProducerTest.php | 528 ++++++++++ tests/bootstrap.php | 5 + 91 files changed, 7116 insertions(+), 1 deletion(-) create mode 100644 .circleci/config.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 UPGRADE.md create mode 100644 composer.json create mode 100644 docker/.env create mode 100644 docker/dev/php/Dockerfile create mode 100755 docker/dev/php/files/bin/php-ext-disable create mode 100755 docker/dev/php/files/bin/php-ext-enable create mode 100644 docker/dev/php/files/php/20-pcov.ini create mode 100644 docker/docker-compose.yml create mode 100644 infection.json create mode 100644 phpcs.xml create mode 100644 phpstan.neon create mode 100644 phpunit.xml create mode 100644 src/Callback/KafkaConsumerRebalanceCallback.php create mode 100644 src/Callback/KafkaErrorCallback.php create mode 100644 src/Callback/KafkaProducerDeliveryReportCallback.php create mode 100644 src/Configuration/KafkaConfiguration.php create mode 100644 src/Consumer/AbstractKafkaConsumer.php create mode 100644 src/Consumer/KafkaConsumerBuilder.php create mode 100644 src/Consumer/KafkaConsumerBuilderInterface.php create mode 100644 src/Consumer/KafkaConsumerInterface.php create mode 100644 src/Consumer/KafkaHighLevelConsumer.php create mode 100644 src/Consumer/KafkaHighLevelConsumerInterface.php create mode 100644 src/Consumer/TopicSubscription.php create mode 100644 src/Consumer/TopicSubscriptionInterface.php create mode 100644 src/Exception/AvroEncoderException.php create mode 100644 src/Exception/AvroSchemaRegistryException.php create mode 100644 src/Exception/KafkaBrokerException.php create mode 100644 src/Exception/KafkaConsumerAssignmentException.php create mode 100644 src/Exception/KafkaConsumerBuilderException.php create mode 100644 src/Exception/KafkaConsumerCommitException.php create mode 100644 src/Exception/KafkaConsumerConsumeException.php create mode 100644 src/Exception/KafkaConsumerEndOfPartitionException.php create mode 100644 src/Exception/KafkaConsumerRequestException.php create mode 100644 src/Exception/KafkaConsumerSubscriptionException.php create mode 100644 src/Exception/KafkaConsumerTimeoutException.php create mode 100644 src/Exception/KafkaMessageException.php create mode 100644 src/Exception/KafkaProducerException.php create mode 100644 src/Exception/KafkaProducerTransactionAbortException.php create mode 100644 src/Exception/KafkaProducerTransactionFatalException.php create mode 100644 src/Exception/KafkaProducerTransactionRetryException.php create mode 100644 src/Exception/KafkaRebalanceException.php create mode 100644 src/Message/AbstractKafkaMessage.php create mode 100644 src/Message/Decoder/AvroDecoder.php create mode 100644 src/Message/Decoder/AvroDecoderInterface.php create mode 100644 src/Message/Decoder/DecoderInterface.php create mode 100644 src/Message/Decoder/JsonDecoder.php create mode 100644 src/Message/Decoder/NullDecoder.php create mode 100644 src/Message/Encoder/AvroEncoder.php create mode 100644 src/Message/Encoder/AvroEncoderInterface.php create mode 100644 src/Message/Encoder/EncoderInterface.php create mode 100644 src/Message/Encoder/JsonEncoder.php create mode 100644 src/Message/Encoder/NullEncoder.php create mode 100644 src/Message/KafkaAvroSchema.php create mode 100644 src/Message/KafkaAvroSchemaInterface.php create mode 100644 src/Message/KafkaConsumerMessage.php create mode 100644 src/Message/KafkaConsumerMessageInterface.php create mode 100644 src/Message/KafkaMessageInterface.php create mode 100644 src/Message/KafkaProducerMessage.php create mode 100644 src/Message/KafkaProducerMessageInterface.php create mode 100644 src/Message/Registry/AvroSchemaRegistry.php create mode 100644 src/Message/Registry/AvroSchemaRegistryInterface.php create mode 100644 src/Producer/KafkaProducer.php create mode 100644 src/Producer/KafkaProducerBuilder.php create mode 100644 src/Producer/KafkaProducerBuilderInterface.php create mode 100644 src/Producer/KafkaProducerInterface.php create mode 100644 tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php create mode 100644 tests/Unit/Callback/KafkaErrorCallbackTest.php create mode 100644 tests/Unit/Callback/KafkaProducerDeliveryReportCallbackTest.php create mode 100644 tests/Unit/Conf/KafkaConfigurationTest.php create mode 100644 tests/Unit/Consumer/KafkaConsumerBuilderTest.php create mode 100644 tests/Unit/Consumer/KafkaHighLevelConsumerTest.php create mode 100644 tests/Unit/Consumer/TopicSubscriptionTest.php create mode 100644 tests/Unit/Exception/KafkaConsumerConsumeExceptionTest.php create mode 100644 tests/Unit/Message/Decoder/AvroDecoderTest.php create mode 100644 tests/Unit/Message/Decoder/JsonDecoderTest.php create mode 100644 tests/Unit/Message/Decoder/NullDecoderTest.php create mode 100644 tests/Unit/Message/Encoder/AvroEncoderTest.php create mode 100644 tests/Unit/Message/Encoder/JsonEncoderTest.php create mode 100644 tests/Unit/Message/Encoder/NullEncoderTest.php create mode 100644 tests/Unit/Message/KafkaAvroSchemaTest.php create mode 100644 tests/Unit/Message/KafkaConsumerMessageTest.php create mode 100644 tests/Unit/Message/KafkaProducerMessageTest.php create mode 100644 tests/Unit/Message/Registry/AvroSchemaRegistryTest.php create mode 100644 tests/Unit/Producer/KafkaProducerBuilderTest.php create mode 100644 tests/Unit/Producer/KafkaProducerTest.php create mode 100644 tests/bootstrap.php diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000..32750ef --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,52 @@ +version: 2.1 + +orbs: + ci-caching: jobcloud/ci-caching@1.0.2 + ci-php: jobcloud/ci-php@0.32 + +workflows: + test-php-simple-kafka-lib: + jobs: + - ci-caching/build-docker-images: + dockerComposeFile: "./docker/docker-compose.yml" + - ci-php/install-dependencies: + dockerComposeFile: "./docker/docker-compose.yml" + dependencyCheckSumFile: "./composer.json" + requires: + - ci-caching/build-docker-images + - coverage: + requires: + - ci-php/install-dependencies + - ci-php/code-style: + dockerComposeFile: "./docker/docker-compose.yml" + dependencyCheckSumFile: "./composer.json" + requires: + - ci-php/install-dependencies + - ci-php/static-analysis: + dockerComposeFile: "./docker/docker-compose.yml" + dependencyCheckSumFile: "./composer.json" + requires: + - ci-php/install-dependencies + - ci-php/infection-testing: + dockerComposeFile: "./docker/docker-compose.yml" + dependencyCheckSumFile: "./composer.json" + requires: + - ci-php/install-dependencies + +jobs: + coverage: + machine: true + steps: + - ci-php/coverage-command: + dockerComposeFile: "./docker/docker-compose.yml" + dependencyCheckSumFile: "./composer.json" + - run: + name: Download cc-test-reporter + command: | + mkdir -p tmp/ + curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./tmp/cc-test-reporter + chmod +x ./tmp/cc-test-reporter + - run: + name: Upload coverage results to Code Climate + command: | + ./tmp/cc-test-reporter after-build -p /var/www/html --coverage-input-type clover --exit-code $? diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..eb7e6e6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +/build/ +/vendor/ +/.idea +/composer.symlink +composer.lock +.phpunit.result.cache +clover.xml diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e5b561d --- /dev/null +++ b/LICENSE @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2019 JobCloud AG +Copyright (c) 2021, Nick Chiu +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..014e83a --- /dev/null +++ b/Makefile @@ -0,0 +1,65 @@ +.PHONY: clean code-style coverage help test static-analysis install-dependencies install-dependencies-lowest update-dependencies pcov-enable pcov-disable infection-testing +.DEFAULT_GOAL := test + +PHPUNIT = ./vendor/bin/phpunit -c ./phpunit.xml +PHPSTAN = ./vendor/bin/phpstan +PHPCS = ./vendor/bin/phpcs --extensions=php +CONSOLE = ./bin/console +INFECTION = ./vendor/bin/infection + +clean: + rm -rf ./build ./vendor + +code-style: pcov-disable + mkdir -p build/logs/phpcs + ${PHPCS} --report-full --report-gitblame --standard=PSR12 ./src --exclude=Generic.Commenting.Todo --report-junit=build/logs/phpcs/junit.xml + +coverage: pcov-enable + ${PHPUNIT} && ./vendor/bin/coverage-check clover.xml 100 + +test: pcov-disable + ${PHPUNIT} + +static-analysis: pcov-disable + mkdir -p build/logs/phpstan + ${PHPSTAN} analyse --no-progress --memory-limit=64 + +update-dependencies: + composer update + +install-dependencies: + composer install + +install-dependencies-lowest: + composer install --prefer-lowest + +infection-testing: + make coverage + cp -f build/logs/phpunit/junit.xml build/logs/phpunit/coverage/junit.xml + sudo php-ext-disable pcov + ${INFECTION} --coverage=build/logs/phpunit/coverage --min-msi=93 --threads=`nproc` + sudo php-ext-enable pcov + +pcov-enable: + sudo php-ext-enable pcov + +pcov-disable: + sudo php-ext-disable pcov + +help: + # Usage: + # make [OPTION=value] + # + # Targets: + # clean Cleans the coverage and the vendor directory + # code-style Check codestyle using phpcs + # coverage Generate code coverage (html, clover) + # help You're looking at it! + # test (default) Run all the tests with phpunit + # static-analysis Run static analysis using phpstan + # infection-testing Run infection/mutation testing + # install-dependencies Run composer install + # install-dependencies-lowest Run composer install with --prefer-lowest + # update-dependencies Run composer update + # pcov-enable Enable pcov + # pcov-disable Disable pcov diff --git a/README.md b/README.md index f695ee6..33f0b90 100644 --- a/README.md +++ b/README.md @@ -1 +1,277 @@ -# php-simple-kafka-lib \ No newline at end of file +# php-simple-kafka-lib + +[![CircleCI](https://circleci.com/gh/php-kafka/php-simple-kafka-lib.svg?style=shield)](https://circleci.com/gh/php-kafka/php-simple-kafka-lib) +[![Maintainability](https://api.codeclimate.com/v1/badges/1ca1ebf269b4d40bb52c/maintainability)](https://codeclimate.com/github/php-kafka/php-simple-kafka-lib/maintainability) +[![Test Coverage](https://api.codeclimate.com/v1/badges/1ca1ebf269b4d40bb52c/test_coverage)](https://codeclimate.com/github/php-kafka/php-simple-kafka-lib/test_coverage) +[![Latest Stable Version](https://poser.pugx.org/php-kafka/php-simple-kafka-lib/v/stable)](https://packagist.org/packages/php-kafka/php-simple-kafka-lib) +[![Latest Unstable Version](https://poser.pugx.org/php-kafka/php-simple-kafka-lib/v/unstable)](https://packagist.org/packages/php-kafka/php-simple-kafka-lib) +[![Join the chat at https://gitter.im/php-kafka/php-simple-kafka-lib](https://badges.gitter.im/php-kafka/php-simple-kafka-lib.svg)](https://gitter.im/php-kafka/php-simple-kafka-lib?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + +## Description +This is a library that makes it easier to use Kafka in your PHP project. + +This library relies on [php-kafka/php-simple-kafka-client](https://github.com/php-kafka/php-simple-kafka-client) +Avro support relies on [flix-tech/avro-serde-php](https://github.com/flix-tech/avro-serde-php) +The [documentation](https://php-kafka.github.io/php-simple-kafka-client.github.io/) of the php extension, +can help out to understand the internals of this library. + +## Requirements +- php: ^7.3|^8.0 +- ext-simple_kafka_client: >=0.1.0 +- librdkafka: >=1.4.0 + +## Installation +``` +composer require php-kafka/php-simple-kafka-lib +``` + +### Enable Avro support +If you need Avro support, run: +``` +composer require flix-tech/avro-serde-php "~1.4" +``` + +## Credits +This library was inspired by [jobcloud/php-kafka-lib](https://github.com/jobcloud/php-kafka-lib) :heart_eyes: + +## Usage + +### Simple Producer +```php +withAdditionalBroker('localhost:9092') + ->build(); + +$message = KafkaProducerMessage::create('test-topic', 0) + ->withKey('asdf-asdf-asfd-asdf') + ->withBody('some test message payload') + ->withHeaders([ 'key' => 'value' ]); + +$producer->produce($message); + +// Shutdown producer, flush messages that are in queue. Give up after 20s +$result = $producer->flush(20000); +``` + +### Transactional producer +```php +withAdditionalBroker('localhost:9092') + ->build(); + +$message = KafkaProducerMessage::create('test-topic', 0) + ->withKey('asdf-asdf-asfd-asdf') + ->withBody('some test message payload') + ->withHeaders([ 'key' => 'value' ]); +try { + $producer->beginTransaction(10000); + $producer->produce($message); + $producer->commitTransaction(10000); +} catch (KafkaProducerTransactionRetryException $e) { + // something went wrong but you can retry the failed call (either beginTransaction or commitTransaction) +} catch (KafkaProducerTransactionAbortException $e) { + // you need to call $producer->abortTransaction(10000); and try again +} catch (KafkaProducerTransactionFatalException $e) { + // something went very wrong, re-create your producer, otherwise you could jeopardize the idempotency guarantees +} + +// Shutdown producer, flush messages that are in queue. Give up after 20s +$result = $producer->flush(20000); +``` + +### Avro Producer +To create an avro prodcuer add the avro encoder. + +```php + 'kafka-schema-registry:9081']) + ) + ), + new AvroObjectCacheAdapter() +); + +$registry = new AvroSchemaRegistry($cachedRegistry); +$recordSerializer = new RecordSerializer($cachedRegistry); + +//if no version is defined, latest version will be used +//if no schema definition is defined, the appropriate version will be fetched form the registry +$registry->addBodySchemaMappingForTopic( + 'test-topic', + new KafkaAvroSchema('bodySchemaName' /*, int $version, AvroSchema $definition */) +); +$registry->addKeySchemaMappingForTopic( + 'test-topic', + new KafkaAvroSchema('keySchemaName' /*, int $version, AvroSchema $definition */) +); + +// if you are only encoding key or value, you can pass that mode as additional third argument +// per default both key and body will get encoded +$encoder = new AvroEncoder($registry, $recordSerializer /*, AvroEncoderInterface::ENCODE_BODY */); + +$producer = KafkaProducerBuilder::create() + ->withAdditionalBroker('kafka:9092') + ->withEncoder($encoder) + ->build(); + +$schemaName = 'testSchema'; +$version = 1; +$message = KafkaProducerMessage::create('test-topic', 0) + ->withKey('asdf-asdf-asfd-asdf') + ->withBody(['name' => 'someName']) + ->withHeaders([ 'key' => 'value' ]); + +$producer->produce($message); + +// Shutdown producer, flush messages that are in queue. Give up after 20s +$result = $producer->flush(20000); +``` + +**NOTE:** To improve producer latency you can install the `pcntl` extension. +The php-simple-kafka-lib already has code in place, similarly described here: +https://github.com/arnaud-lb/php-rdkafka#performance--low-latency-settings + +### Simple Consumer + +```php +withAdditionalConfig( + [ + 'compression.codec' => 'lz4', + 'auto.commit.interval.ms' => 500 + ] + ) + ->withAdditionalBroker('kafka:9092') + ->withConsumerGroup('testGroup') + ->withAdditionalSubscription('test-topic') + ->build(); + +$consumer->subscribe(); + +while (true) { + try { + $message = $consumer->consume(); + // your business logic + $consumer->commit($message); + } catch (KafkaConsumerTimeoutException $e) { + //no messages were read in a given time + } catch (KafkaConsumerEndOfPartitionException $e) { + //only occurs if enable.partition.eof is true (default: false) + } catch (KafkaConsumerConsumeException $e) { + // Failed + } +} +``` + +### Avro Consumer +To create an avro consumer add the avro decoder. + +```php + 'kafka-schema-registry:9081']) + ) + ), + new AvroObjectCacheAdapter() +); + +$registry = new AvroSchemaRegistry($cachedRegistry); +$recordSerializer = new RecordSerializer($cachedRegistry); + +//if no version is defined, latest version will be used +//if no schema definition is defined, the appropriate version will be fetched form the registry +$registry->addBodySchemaMappingForTopic( + 'test-topic', + new KafkaAvroSchema('bodySchema' , 9 /* , AvroSchema $definition */) +); +$registry->addKeySchemaMappingForTopic( + 'test-topic', + new KafkaAvroSchema('keySchema' , 9 /* , AvroSchema $definition */) +); + +// if you are only decoding key or value, you can pass that mode as additional third argument +// per default both key and body will get decoded +$decoder = new AvroDecoder($registry, $recordSerializer /*, AvroDecoderInterface::DECODE_BODY */); + +$consumer = KafkaConsumerBuilder::create() + ->withAdditionalConfig( + [ + 'compression.codec' => 'lz4', + 'auto.commit.interval.ms' => 500 + ] + ) + ->withDecoder($decoder) + ->withAdditionalBroker('kafka:9092') + ->withConsumerGroup('testGroup') + ->withAdditionalSubscription('test-topic') + ->build(); + +$consumer->subscribe(); + +while (true) { + try { + $message = $consumer->consume(); + // your business logic + $consumer->commit($message); + } catch (KafkaConsumerTimeoutException $e) { + //no messages were read in a given time + } catch (KafkaConsumerEndOfPartitionException $e) { + //only occurs if enable.partition.eof is true (default: false) + } catch (KafkaConsumerConsumeException $e) { + // Failed + } +} +``` + diff --git a/UPGRADE.md b/UPGRADE.md new file mode 100644 index 0000000..e69de29 diff --git a/composer.json b/composer.json new file mode 100644 index 0000000..0d5e410 --- /dev/null +++ b/composer.json @@ -0,0 +1,65 @@ +{ + "name": "php-kafka/php-simple-kafka-lib", + "description": "PHP Simple Kafka library", + "license": [ + "BSD-3-Clause" + ], + "authors": [ + { + "name": "php-kafka", + "homepage": "https://php-kafka.com" + }, + { + "name": "Nick", + "email": "coding.nikazu@gmail.com", + "homepage": "https://github.com/nick-zh" + } + ], + "keywords": [ + "php", + "kafka", + "consumer", + "producer", + "simple-kafka-client" + ], + "require": { + "php": "^7.3|^8.0", + "ext-simple_kafka_client": "^0.1", + "ext-json": "*" + }, + "conflict": { + "ext-rdkafka": "*", + "jobcloud/php-kafka-lib": "*" + }, + "require-dev": { + "phpunit/phpunit": "^9.5", + "squizlabs/php_codesniffer": "^3.5.4", + "phpstan/phpstan": "^0.12", + "php-mock/php-mock-phpunit": "^2.6", + "rregeer/phpunit-coverage-check": "^0.3.1", + "johnkary/phpunit-speedtrap": "^3.1", + "flix-tech/avro-serde-php": "^1.4", + "infection/infection": "^0.21" + }, + "config": { + "sort-packages": true + }, + "autoload": { + "psr-4": { + "PhpKafka\\": "src/" + } + }, + "suggest": { + "flix-tech/avro-serde-php": "Is needed for Avro support" + }, + "extra": { + "branch-alias": { + "dev-master": "1.0-dev" + } + }, + "support": { + "issues": "https://github.com/php-kafka/php-simple-kafka-lib/issues", + "doc": "https://php-kafka.github.io/php-simple-kafka-client.github.io", + "chat": "" + } +} diff --git a/docker/.env b/docker/.env new file mode 100644 index 0000000..2cf4a59 --- /dev/null +++ b/docker/.env @@ -0,0 +1 @@ +COMPOSE_PROJECT_NAME=php-simple-kafka-lib diff --git a/docker/dev/php/Dockerfile b/docker/dev/php/Dockerfile new file mode 100644 index 0000000..ca978ac --- /dev/null +++ b/docker/dev/php/Dockerfile @@ -0,0 +1,41 @@ +FROM php:7.3-cli-alpine3.13 + +ARG HOST_USER_ID +ARG HOST_USER + +# PHP: Copy configuration files & remove dist files +RUN mkdir /phpIni +COPY files/bin/ /usr/local/bin/ +COPY files/php/ /phpIni + +# SYS: Install required packages +RUN apk --no-cache upgrade && \ + apk --no-cache add bash git sudo openssh autoconf gcc g++ make + +RUN apk add librdkafka librdkafka-dev \ + --update-cache --repository http://dl-3.alpinelinux.org/alpine/edge/community + +# we need support for users with ID higher than 65k, so instead of using this: +#RUN adduser -u $HOST_USER_ID -D -H $HOST_USER +# we do it manually +RUN echo "$HOST_USER:x:$HOST_USER_ID:82:Linux User,,,:/home/$HOST_USER:" >> /etc/passwd && \ + echo "$HOST_USER:!:$(($(date +%s) / 60 / 60 / 24)):0:99999:7:::" >> /etc/shadow && \ + echo "$HOST_USER:x:$HOST_USER_ID:" >> /etc/group && \ + mkdir /home/$HOST_USER && \ + chown $HOST_USER:$HOST_USER /home/$HOST_USER && \ + echo "ALL ALL=NOPASSWD: ALL" >> /etc/sudoers && \ + addgroup $HOST_USER www-data + +# COMPOSER: install binary and prestissimo +RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/bin --filename=composer + +# PHP: Install php extensions +RUN pecl channel-update pecl.php.net && \ + pecl install simple_kafka_client pcov && \ + docker-php-ext-install pcntl && \ + php-ext-enable simple_kafka_client pcntl pcov + +USER $HOST_USER + +WORKDIR /var/www/html + diff --git a/docker/dev/php/files/bin/php-ext-disable b/docker/dev/php/files/bin/php-ext-disable new file mode 100755 index 0000000..e23b4b8 --- /dev/null +++ b/docker/dev/php/files/bin/php-ext-disable @@ -0,0 +1,11 @@ +#!/bin/bash +if [ $# -eq 0 ] + then + echo "Please pass a php module name" + exit +fi + +for phpmod in "$@" +do + rm -f /usr/local/etc/php/conf.d/*$phpmod*.ini +done diff --git a/docker/dev/php/files/bin/php-ext-enable b/docker/dev/php/files/bin/php-ext-enable new file mode 100755 index 0000000..43e06b9 --- /dev/null +++ b/docker/dev/php/files/bin/php-ext-enable @@ -0,0 +1,20 @@ +#!/bin/bash + +if [[ ${#} -eq 0 ]] ; then + echo -e "\\nPHP module name required!\\n" + exit 1 +fi + +for phpmod in "${@}" ; do + + files=($(find /phpIni -type f -iname "*${phpmod}*.ini" -exec ls -1 '{}' +)) + + for i in "${files[@]}" ; do + ln -s "${i}" /usr/local/etc/php/conf.d + done + + if [[ ${#files[@]} -eq 0 ]] ; then + docker-php-ext-enable "${phpmod}" + fi + +done diff --git a/docker/dev/php/files/php/20-pcov.ini b/docker/dev/php/files/php/20-pcov.ini new file mode 100644 index 0000000..423d9ae --- /dev/null +++ b/docker/dev/php/files/php/20-pcov.ini @@ -0,0 +1 @@ +extension=pcov.so \ No newline at end of file diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 0000000..f132fa0 --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,13 @@ +version: '3.2' +services: + php: + build: + context: ./dev/php + args: + HOST_USER: ${USER} + HOST_USER_ID: ${USER_ID} + container_name: php-simple-kafka-lib + hostname: php-simple-kafka-lib + tty: true + volumes: + - ../:/var/www/html \ No newline at end of file diff --git a/infection.json b/infection.json new file mode 100644 index 0000000..3205417 --- /dev/null +++ b/infection.json @@ -0,0 +1,18 @@ +{ + "timeout": 10, + "source": { + "directories": [ + "src" + ] + }, + "logs": { + "text": "build\/logs\/infection\/infection.log", + "summary": "build\/logs\/infection\/infection-summary.log" + }, + "mutators": { + "@default": true + }, + "phpUnit": { + "customPath": "vendor/bin/phpunit" + } +} diff --git a/phpcs.xml b/phpcs.xml new file mode 100644 index 0000000..72cc772 --- /dev/null +++ b/phpcs.xml @@ -0,0 +1,8 @@ + + + src + + + + + diff --git a/phpstan.neon b/phpstan.neon new file mode 100644 index 0000000..c7b1758 --- /dev/null +++ b/phpstan.neon @@ -0,0 +1,3 @@ +parameters: + level: 8 + paths: [ src ] diff --git a/phpunit.xml b/phpunit.xml new file mode 100644 index 0000000..24f3d2c --- /dev/null +++ b/phpunit.xml @@ -0,0 +1,41 @@ + + + + + src + + + + + + + + + + + + + + + + ./tests/Unit + + + + + + + + + diff --git a/src/Callback/KafkaConsumerRebalanceCallback.php b/src/Callback/KafkaConsumerRebalanceCallback.php new file mode 100644 index 0000000..44bc307 --- /dev/null +++ b/src/Callback/KafkaConsumerRebalanceCallback.php @@ -0,0 +1,42 @@ +assign($partitions); + break; + + default: + $consumer->assign(null); + break; + } + } catch (SkcException $e) { + throw new KafkaRebalanceException($e->getMessage(), $e->getCode(), $e); + } + } +} diff --git a/src/Callback/KafkaErrorCallback.php b/src/Callback/KafkaErrorCallback.php new file mode 100644 index 0000000..07a221a --- /dev/null +++ b/src/Callback/KafkaErrorCallback.php @@ -0,0 +1,35 @@ +err) { + return; + } + + throw new KafkaProducerException( + $message->errstr(), + $message->err + ); + } +} diff --git a/src/Configuration/KafkaConfiguration.php b/src/Configuration/KafkaConfiguration.php new file mode 100644 index 0000000..8152e86 --- /dev/null +++ b/src/Configuration/KafkaConfiguration.php @@ -0,0 +1,81 @@ +brokers = $brokers; + $this->topicSubscriptions = $topicSubscriptions; + $this->initializeConfig($config); + } + + /** + * @return string[] + */ + public function getBrokers(): array + { + return $this->brokers; + } + + /** + * @return array|TopicSubscription[] + */ + public function getTopicSubscriptions(): array + { + return $this->topicSubscriptions; + } + + /** + * @return string[] + */ + public function getConfiguration(): array + { + return $this->dump(); + } + + /** + * @param mixed[] $config + * @return void + */ + private function initializeConfig(array $config = []): void + { + foreach ($config as $name => $value) { + if (false === is_scalar($value)) { + continue; + } + + if (true === is_bool($value)) { + $value = true === $value ? 'true' : 'false'; + } + + $this->set($name, (string) $value); + } + + $this->set('metadata.broker.list', implode(',', $this->getBrokers())); + } +} diff --git a/src/Consumer/AbstractKafkaConsumer.php b/src/Consumer/AbstractKafkaConsumer.php new file mode 100644 index 0000000..ca85db1 --- /dev/null +++ b/src/Consumer/AbstractKafkaConsumer.php @@ -0,0 +1,244 @@ +consumer = $consumer; + $this->kafkaConfiguration = $kafkaConfiguration; + $this->decoder = $decoder; + } + + /** + * Returns true if the consumer has subscribed to its topics, otherwise false + * It is mandatory to call `subscribe` before `consume` + * + * @return boolean + */ + public function isSubscribed(): bool + { + return $this->subscribed; + } + + /** + * Returns the configuration settings for this consumer instance as array + * + * @return string[] + */ + public function getConfiguration(): array + { + return $this->kafkaConfiguration->dump(); + } + + /** + * Consumes a message and returns it + * In cases of errors / timeouts an exception is thrown + * + * @param integer $timeoutMs + * @param boolean $autoDecode + * @return KafkaConsumerMessageInterface + * @throws KafkaConsumerConsumeException + * @throws KafkaConsumerEndOfPartitionException + * @throws KafkaConsumerTimeoutException + */ + public function consume(int $timeoutMs = 10000, bool $autoDecode = true): KafkaConsumerMessageInterface + { + if (false === $this->isSubscribed()) { + throw new KafkaConsumerConsumeException(KafkaConsumerConsumeException::NOT_SUBSCRIBED_EXCEPTION_MESSAGE); + } + + if (null === $rdKafkaMessage = $this->kafkaConsume($timeoutMs)) { + throw new KafkaConsumerEndOfPartitionException( + kafka_err2str(RD_KAFKA_RESP_ERR__PARTITION_EOF), + RD_KAFKA_RESP_ERR__PARTITION_EOF + ); + } + + if (RD_KAFKA_RESP_ERR__PARTITION_EOF === $rdKafkaMessage->err) { + throw new KafkaConsumerEndOfPartitionException($rdKafkaMessage->errstr(), $rdKafkaMessage->err); + } elseif (RD_KAFKA_RESP_ERR__TIMED_OUT === $rdKafkaMessage->err) { + throw new KafkaConsumerTimeoutException($rdKafkaMessage->errstr(), $rdKafkaMessage->err); + } + + $message = $this->getConsumerMessage($rdKafkaMessage); + + if (RD_KAFKA_RESP_ERR_NO_ERROR !== $rdKafkaMessage->err) { + throw new KafkaConsumerConsumeException($rdKafkaMessage->errstr(), $rdKafkaMessage->err, $message); + } + + if (true === $autoDecode) { + return $this->decoder->decode($message); + } + + return $message; + } + + /** + * Decode consumer message + * + * @param KafkaConsumerMessageInterface $message + * @return KafkaConsumerMessageInterface + */ + public function decodeMessage(KafkaConsumerMessageInterface $message): KafkaConsumerMessageInterface + { + return $this->decoder->decode($message); + } + + /** + * Queries the broker for metadata on a certain topic + * + * @param string $topicName + * @param integer $timeoutMs + * @return SkcMetadataTopic + * @throws SkcException + */ + public function getMetadataForTopic(string $topicName, int $timeoutMs = 10000): SkcMetadataTopic + { + /** @var SkcConsumerTopic $topic */ + $topic = $this->consumer->getTopicHandle($topicName); + return $this->consumer + ->getMetadata( + false, + $topic, + $timeoutMs + ) + ->getTopics() + ->current(); + } + + /** + * Get the earliest offset for a certain timestamp for topic partitions + * + * @param array|SkcTopicPartition[] $topicPartitions + * @param integer $timeoutMs + * @return array|SkcTopicPartition[] + */ + public function offsetsForTimes(array $topicPartitions, int $timeoutMs): array + { + return $this->consumer->offsetsForTimes($topicPartitions, $timeoutMs); + } + + /** + * Queries the broker for the first offset of a given topic and partition + * + * @param string $topic + * @param integer $partition + * @param integer $timeoutMs + * @return integer + */ + public function getFirstOffsetForTopicPartition(string $topic, int $partition, int $timeoutMs): int + { + $lowOffset = 0; + $highOffset = 0; + + $this->consumer->queryWatermarkOffsets($topic, $partition, $lowOffset, $highOffset, $timeoutMs); + + return $lowOffset; + } + + /** + * Queries the broker for the last offset of a given topic and partition + * + * @param string $topic + * @param integer $partition + * @param integer $timeoutMs + * @return integer + */ + public function getLastOffsetForTopicPartition(string $topic, int $partition, int $timeoutMs): int + { + $lowOffset = 0; + $highOffset = 0; + + $this->consumer->queryWatermarkOffsets($topic, $partition, $lowOffset, $highOffset, $timeoutMs); + + return $highOffset; + } + + /** + * @param string $topic + * @return int[] + * @throws SkcException + */ + protected function getAllTopicPartitions(string $topic): array + { + + $partitions = []; + $topicMetadata = $this->getMetadataForTopic($topic); + + foreach ($topicMetadata->getPartitions() as $partition) { + $partitions[] = $partition->getId(); + } + + return $partitions; + } + + /** + * @param SkcMessage $message + * @return KafkaConsumerMessageInterface + */ + private function getConsumerMessage(SkcMessage $message): KafkaConsumerMessageInterface + { + return new KafkaConsumerMessage( + (string) $message->topic_name, + (int) $message->partition, + (int) $message->offset, + (int) $message->timestamp, + $message->key, + $message->payload, + (array) $message->headers + ); + } + + /** + * @param integer $timeoutMs + * @return null|SkcMessage + */ + abstract protected function kafkaConsume(int $timeoutMs): ?SkcMessage; +} diff --git a/src/Consumer/KafkaConsumerBuilder.php b/src/Consumer/KafkaConsumerBuilder.php new file mode 100644 index 0000000..da3c51c --- /dev/null +++ b/src/Consumer/KafkaConsumerBuilder.php @@ -0,0 +1,314 @@ + + */ + private $config = [ + 'enable.auto.offset.store' => false, + 'enable.auto.commit' => false, + 'auto.offset.reset' => 'earliest' + ]; + + /** + * @var array|TopicSubscription[] + */ + private $topics = []; + + /** + * @var string + */ + private $consumerGroup = 'default'; + + /** + * @var callable + */ + private $errorCallback; + + /** + * @var callable + */ + private $rebalanceCallback; + + /** + * @var callable + */ + private $consumeCallback; + + /** + * @var callable + */ + private $logCallback; + + /** + * @var callable + */ + private $offsetCommitCallback; + + /** + * @var DecoderInterface + */ + private $decoder; + + /** + * KafkaConsumerBuilder constructor. + */ + private function __construct() + { + $this->errorCallback = new KafkaErrorCallback(); + $this->decoder = new NullDecoder(); + } + + /** + * Returns the builder + * + * @return KafkaConsumerBuilder + */ + public static function create(): self + { + return new self(); + } + + /** + * Adds a broker from which you want to consume + * + * @param string $broker + * @return KafkaConsumerBuilderInterface + */ + public function withAdditionalBroker(string $broker): KafkaConsumerBuilderInterface + { + $that = clone $this; + + $that->brokers[] = $broker; + + return $that; + } + + /** + * Add topic name(s) (and additionally partitions and offsets) to subscribe to + * + * @param string $topicName + * @param int[] $partitions + * @param integer $offset + * @return KafkaConsumerBuilderInterface + */ + public function withAdditionalSubscription( + string $topicName, + array $partitions = [], + int $offset = self::OFFSET_STORED + ): KafkaConsumerBuilderInterface { + $that = clone $this; + + $that->topics[] = new TopicSubscription($topicName, $partitions, $offset); + + return $that; + } + + /** + * Replaces all topic names previously configured with a topic and additionally partitions and an offset to + * subscribe to + * + * @param string $topicName + * @param int[] $partitions + * @param integer $offset + * @return KafkaConsumerBuilderInterface + */ + public function withSubscription( + string $topicName, + array $partitions = [], + int $offset = self::OFFSET_STORED + ): KafkaConsumerBuilderInterface { + $that = clone $this; + + $that->topics = [new TopicSubscription($topicName, $partitions, $offset)]; + + return $that; + } + + /** + * Add configuration settings, otherwise the kafka defaults apply + * + * @param string[] $config + * @return KafkaConsumerBuilderInterface + */ + public function withAdditionalConfig(array $config): KafkaConsumerBuilderInterface + { + $that = clone $this; + $that->config = $config + $this->config; + + return $that; + } + + /** + * Set the consumer group + * + * @param string $consumerGroup + * @return KafkaConsumerBuilderInterface + */ + public function withConsumerGroup(string $consumerGroup): KafkaConsumerBuilderInterface + { + $that = clone $this; + $that->consumerGroup = $consumerGroup; + + return $that; + } + + /** + * Set a callback to be called on errors. + * The default callback will throw an exception for every error + * + * @param callable $errorCallback + * @return KafkaConsumerBuilderInterface + */ + public function withErrorCallback(callable $errorCallback): KafkaConsumerBuilderInterface + { + $that = clone $this; + $that->errorCallback = $errorCallback; + + return $that; + } + + /** + * Set a callback to be called on consumer rebalance + * + * @param callable $rebalanceCallback + * @return KafkaConsumerBuilderInterface + */ + public function withRebalanceCallback(callable $rebalanceCallback): KafkaConsumerBuilderInterface + { + $that = clone $this; + $that->rebalanceCallback = $rebalanceCallback; + + return $that; + } + + /** + * Only applicable for the high level consumer + * Callback that is going to be called when you call consume + * + * @param callable $consumeCallback + * @return KafkaConsumerBuilderInterface + */ + public function withConsumeCallback(callable $consumeCallback): KafkaConsumerBuilderInterface + { + $that = clone $this; + $that->consumeCallback = $consumeCallback; + + return $that; + } + + /** + * Callback for log related events + * + * @param callable $logCallback + * @return KafkaConsumerBuilderInterface + */ + public function withLogCallback(callable $logCallback): KafkaConsumerBuilderInterface + { + $that = clone $this; + $that->logCallback = $logCallback; + + return $that; + } + + /** + * Set callback that is being called on offset commits + * + * @param callable $offsetCommitCallback + * @return KafkaConsumerBuilderInterface + */ + public function withOffsetCommitCallback(callable $offsetCommitCallback): KafkaConsumerBuilderInterface + { + $that = clone $this; + $that->offsetCommitCallback = $offsetCommitCallback; + + return $that; + } + + /** + * Lets you set a custom decoder for the consumed message + * + * @param DecoderInterface $decoder + * @return KafkaConsumerBuilderInterface + */ + public function withDecoder(DecoderInterface $decoder): KafkaConsumerBuilderInterface + { + $that = clone $this; + $that->decoder = $decoder; + + return $that; + } + + /** + * Returns your consumer instance + * + * @return KafkaConsumerInterface + * @throws KafkaConsumerBuilderException + */ + public function build(): KafkaConsumerInterface + { + if ([] === $this->brokers) { + throw new KafkaConsumerBuilderException(KafkaConsumerBuilderException::NO_BROKER_EXCEPTION_MESSAGE); + } + + //set additional config + $this->config['group.id'] = $this->consumerGroup; + + //create config + $kafkaConfig = new KafkaConfiguration( + $this->brokers, + $this->topics, + $this->config + ); + + //set consumer callbacks + $this->registerCallbacks($kafkaConfig); + + //create SkcConsumer + $rdKafkaConsumer = new SkcConsumer($kafkaConfig); + + return new KafkaHighLevelConsumer($rdKafkaConsumer, $kafkaConfig, $this->decoder); + } + + /** + * @param KafkaConfiguration $conf + * @return void + */ + private function registerCallbacks(KafkaConfiguration $conf): void + { + $conf->setErrorCb($this->errorCallback); + + if (null !== $this->rebalanceCallback) { + $conf->setRebalanceCb($this->rebalanceCallback); + } + + if (null !== $this->consumeCallback) { + $conf->setConsumeCb($this->consumeCallback); + } + + if (null !== $this->logCallback) { + //$conf->setLogCb($this->logCallback); + } + + if (null !== $this->offsetCommitCallback) { + $conf->setOffsetCommitCb($this->offsetCommitCallback); + } + } +} diff --git a/src/Consumer/KafkaConsumerBuilderInterface.php b/src/Consumer/KafkaConsumerBuilderInterface.php new file mode 100644 index 0000000..0ddc43b --- /dev/null +++ b/src/Consumer/KafkaConsumerBuilderInterface.php @@ -0,0 +1,125 @@ + $topicSubscriptions + * @throws KafkaConsumerSubscriptionException + * @return void + */ + public function subscribe(array $topicSubscriptions = []): void + { + $subscriptions = $this->getTopicSubscriptions($topicSubscriptions); + $assignments = $this->getTopicAssignments($topicSubscriptions); + + if ([] !== $subscriptions && [] !== $assignments) { + throw new KafkaConsumerSubscriptionException( + KafkaConsumerSubscriptionException::MIXED_SUBSCRIPTION_EXCEPTION_MESSAGE + ); + } + + try { + if ([] !== $subscriptions) { + $this->consumer->subscribe($subscriptions); + } else { + $this->consumer->assign($assignments); + } + $this->subscribed = true; + } catch (SkcException $e) { + throw new KafkaConsumerSubscriptionException($e->getMessage(), $e->getCode(), $e); + } + } + + /** + * Unsubscribes from the current subscription / assignment + * + * @throws KafkaConsumerSubscriptionException + * @return void + */ + public function unsubscribe(): void + { + try { + $this->consumer->unsubscribe(); + $this->subscribed = false; + } catch (SkcException $e) { + throw new KafkaConsumerSubscriptionException($e->getMessage(), $e->getCode(), $e); + } + } + + /** + * Commits the offset to the broker for the given message(s) + * This is a blocking function, checkout out commitAsync if you want to commit in a non blocking manner + * + * @param KafkaConsumerMessageInterface|KafkaConsumerMessageInterface[] $messages + * @return void + * @throws KafkaConsumerCommitException + */ + public function commit($messages): void + { + $this->commitMessages($messages); + } + + /** + * Assigns a consumer to the given TopicPartition(s) + * + * @param SkcTopicPartition[] $topicPartitions + * @throws KafkaConsumerAssignmentException + * @return void + */ + public function assign(array $topicPartitions): void + { + try { + $this->consumer->assign($topicPartitions); + } catch (SkcException $e) { + throw new KafkaConsumerAssignmentException($e->getMessage(), $e->getCode()); + } + } + + /** + * Asynchronous version of commit (non blocking) + * + * @param KafkaConsumerMessageInterface|KafkaConsumerMessageInterface[] $messages + * @return void + * @throws KafkaConsumerCommitException + */ + public function commitAsync($messages): void + { + $this->commitMessages($messages, true); + } + + /** + * Gets the current assignment for the consumer + * + * @return array|SkcTopicPartition[] + * @throws KafkaConsumerAssignmentException + */ + public function getAssignment(): array + { + try { + return $this->consumer->getAssignment(); + } catch (SkcException $e) { + throw new KafkaConsumerAssignmentException($e->getMessage(), $e->getCode()); + } + } + + /** + * Gets the commited offset for a TopicPartition for the configured consumer group + * + * @param array|SkcTopicPartition[] $topicPartitions + * @param integer $timeoutMs + * @return array|SkcTopicPartition[] + * @throws KafkaConsumerRequestException + */ + public function getCommittedOffsets(array $topicPartitions, int $timeoutMs): array + { + try { + return $this->consumer->getCommittedOffsets($topicPartitions, $timeoutMs); + } catch (SkcException $e) { + throw new KafkaConsumerRequestException($e->getMessage(), $e->getCode()); + } + } + + /** + * Get current offset positions of the consumer + * + * @param array|SkcTopicPartition[] $topicPartitions + * @return array|SkcTopicPartition[] + */ + public function getOffsetPositions(array $topicPartitions): array + { + return $this->consumer->getOffsetPositions($topicPartitions); + } + + /** + * Close the consumer connection + * + * @return void; + */ + public function close(): void + { + $this->consumer->close(); + } + + /** + * @param integer $timeoutMs + * @return SkcMessage|null + * @throws SkcException + */ + protected function kafkaConsume(int $timeoutMs): ?SkcMessage + { + return $this->consumer->consume($timeoutMs); + } + + /** + * @param KafkaConsumerMessageInterface|KafkaConsumerMessageInterface[] $messages + * @param boolean $asAsync + * @return void + * @throws KafkaConsumerCommitException + */ + private function commitMessages($messages, bool $asAsync = false): void + { + $messages = is_array($messages) ? $messages : [$messages]; + + $offsetsToCommit = $this->getOffsetsToCommitForMessages($messages); + + try { + if (true === $asAsync) { + $this->consumer->commitAsync($offsetsToCommit); + } else { + $this->consumer->commit($offsetsToCommit); + } + } catch (SkcException $e) { + throw new KafkaConsumerCommitException($e->getMessage(), $e->getCode()); + } + } + + /** + * @param array|KafkaConsumerMessageInterface[] $messages + * @return array|SkcTopicPartition[] + */ + private function getOffsetsToCommitForMessages(array $messages): array + { + $offsetsToCommit = []; + + foreach ($messages as $message) { + $topicPartition = sprintf('%s-%s', $message->getTopicName(), $message->getPartition()); + + if (true === isset($offsetsToCommit[$topicPartition])) { + if ($message->getOffset() + 1 > $offsetsToCommit[$topicPartition]->getOffset()) { + $offsetsToCommit[$topicPartition]->setOffset($message->getOffset() + 1); + } + continue; + } + + $offsetsToCommit[$topicPartition] = new SkcTopicPartition( + $message->getTopicName(), + $message->getPartition(), + $message->getOffset() + 1 + ); + } + + return $offsetsToCommit; + } + + /** + * @param array $topicSubscriptions + * @return array|string[] + */ + private function getTopicSubscriptions(array $topicSubscriptions = []): array + { + $subscriptions = []; + + if ([] === $topicSubscriptions) { + $topicSubscriptions = $this->kafkaConfiguration->getTopicSubscriptions(); + } + + foreach ($topicSubscriptions as $topicSubscription) { + if ( + [] !== $topicSubscription->getPartitions() + || KafkaConsumerBuilderInterface::OFFSET_STORED !== $topicSubscription->getOffset() + ) { + continue; + } + $subscriptions[] = $topicSubscription->getTopicName(); + } + + return $subscriptions; + } + + /** + * @param array $topicSubscriptions + * @return array|SkcTopicPartition[] + */ + private function getTopicAssignments(array $topicSubscriptions = []): array + { + $assignments = []; + + if ([] === $topicSubscriptions) { + $topicSubscriptions = $this->kafkaConfiguration->getTopicSubscriptions(); + } + + foreach ($topicSubscriptions as $topicSubscription) { + if ( + [] === $topicSubscription->getPartitions() + && KafkaConsumerBuilderInterface::OFFSET_STORED === $topicSubscription->getOffset() + ) { + continue; + } + + $offset = $topicSubscription->getOffset(); + $partitions = $topicSubscription->getPartitions(); + + if ([] === $partitions) { + $partitions = $this->getAllTopicPartitions($topicSubscription->getTopicName()); + } + + foreach ($partitions as $partitionId) { + $assignments[] = new SkcTopicPartition( + $topicSubscription->getTopicName(), + $partitionId, + $offset + ); + } + } + + return $assignments; + } +} diff --git a/src/Consumer/KafkaHighLevelConsumerInterface.php b/src/Consumer/KafkaHighLevelConsumerInterface.php new file mode 100644 index 0000000..37a2f2b --- /dev/null +++ b/src/Consumer/KafkaHighLevelConsumerInterface.php @@ -0,0 +1,58 @@ +topicName = $topicName; + $this->partitions = $partitions; + $this->offset = $offset; + } + + /** + * @return string + */ + public function getTopicName(): string + { + return $this->topicName; + } + + /** + * @param int[] $partitions + * @return void + */ + public function setPartitions(array $partitions): void + { + $this->partitions = $partitions; + } + + /** + * @return int[] + */ + public function getPartitions(): array + { + return $this->partitions; + } + + /** + * @return integer + */ + public function getOffset(): int + { + return $this->offset ?? RD_KAFKA_OFFSET_STORED; + } +} diff --git a/src/Consumer/TopicSubscriptionInterface.php b/src/Consumer/TopicSubscriptionInterface.php new file mode 100644 index 0000000..705fe2e --- /dev/null +++ b/src/Consumer/TopicSubscriptionInterface.php @@ -0,0 +1,30 @@ +kafkaMessage = $kafkaMessage; + } + + /** + * @return null|KafkaConsumerMessageInterface + */ + public function getKafkaMessage(): ?KafkaConsumerMessageInterface + { + return $this->kafkaMessage; + } +} diff --git a/src/Exception/KafkaConsumerEndOfPartitionException.php b/src/Exception/KafkaConsumerEndOfPartitionException.php new file mode 100644 index 0000000..20a0a0b --- /dev/null +++ b/src/Exception/KafkaConsumerEndOfPartitionException.php @@ -0,0 +1,10 @@ +key; + } + + /** + * @return mixed + */ + public function getBody() + { + return $this->body; + } + + /** + * @return string + */ + public function getTopicName(): string + { + return $this->topicName; + } + + /** + * @return integer + */ + public function getPartition(): int + { + return $this->partition; + } + + /** + * @return string[]|null + */ + public function getHeaders(): ?array + { + return $this->headers; + } +} diff --git a/src/Message/Decoder/AvroDecoder.php b/src/Message/Decoder/AvroDecoder.php new file mode 100644 index 0000000..4e66b7f --- /dev/null +++ b/src/Message/Decoder/AvroDecoder.php @@ -0,0 +1,111 @@ +recordSerializer = $recordSerializer; + $this->registry = $registry; + } + + /** + * @param KafkaConsumerMessageInterface $consumerMessage + * @return KafkaConsumerMessageInterface + * @throws SchemaRegistryException + */ + public function decode(KafkaConsumerMessageInterface $consumerMessage): KafkaConsumerMessageInterface + { + return new KafkaConsumerMessage( + $consumerMessage->getTopicName(), + $consumerMessage->getPartition(), + $consumerMessage->getOffset(), + $consumerMessage->getTimestamp(), + $this->decodeKey($consumerMessage), + $this->decodeBody($consumerMessage), + $consumerMessage->getHeaders() + ); + } + + /** + * @param KafkaConsumerMessageInterface $consumerMessage + * @return mixed + * @throws SchemaRegistryException + */ + private function decodeBody(KafkaConsumerMessageInterface $consumerMessage) + { + $body = $consumerMessage->getBody(); + $topicName = $consumerMessage->getTopicName(); + + if (null === $body) { + return null; + } + + if (false === $this->registry->hasBodySchemaForTopic($topicName)) { + return $body; + } + + $avroSchema = $this->registry->getBodySchemaForTopic($topicName); + $schemaDefinition = $avroSchema->getDefinition(); + + return $this->recordSerializer->decodeMessage($body, $schemaDefinition); + } + + /** + * @param KafkaConsumerMessageInterface $consumerMessage + * @return mixed + * @throws SchemaRegistryException + */ + private function decodeKey(KafkaConsumerMessageInterface $consumerMessage) + { + $key = $consumerMessage->getKey(); + $topicName = $consumerMessage->getTopicName(); + + if (null === $key) { + return null; + } + + if (false === $this->registry->hasKeySchemaForTopic($topicName)) { + return $key; + } + + $avroSchema = $this->registry->getKeySchemaForTopic($topicName); + $schemaDefinition = $avroSchema->getDefinition(); + + return $this->recordSerializer->decodeMessage($key, $schemaDefinition); + } + + /** + * @return AvroSchemaRegistryInterface + */ + public function getRegistry(): AvroSchemaRegistryInterface + { + return $this->registry; + } +} diff --git a/src/Message/Decoder/AvroDecoderInterface.php b/src/Message/Decoder/AvroDecoderInterface.php new file mode 100644 index 0000000..530fff0 --- /dev/null +++ b/src/Message/Decoder/AvroDecoderInterface.php @@ -0,0 +1,15 @@ +getBody(), true, 512, JSON_THROW_ON_ERROR); + + return new KafkaConsumerMessage( + $consumerMessage->getTopicName(), + $consumerMessage->getPartition(), + $consumerMessage->getOffset(), + $consumerMessage->getTimestamp(), + $consumerMessage->getKey(), + $body, + $consumerMessage->getHeaders() + ); + } +} diff --git a/src/Message/Decoder/NullDecoder.php b/src/Message/Decoder/NullDecoder.php new file mode 100644 index 0000000..ed5e7dd --- /dev/null +++ b/src/Message/Decoder/NullDecoder.php @@ -0,0 +1,20 @@ +recordSerializer = $recordSerializer; + $this->registry = $registry; + } + + /** + * @param KafkaProducerMessageInterface $producerMessage + * @return KafkaProducerMessageInterface + * @throws SchemaRegistryException + * @throws AvroEncoderException + */ + public function encode(KafkaProducerMessageInterface $producerMessage): KafkaProducerMessageInterface + { + $producerMessage = $this->encodeBody($producerMessage); + + return $this->encodeKey($producerMessage); + } + + /** + * @param KafkaProducerMessageInterface $producerMessage + * @return KafkaProducerMessageInterface + * @throws SchemaRegistryException + */ + private function encodeBody(KafkaProducerMessageInterface $producerMessage): KafkaProducerMessageInterface + { + $topicName = $producerMessage->getTopicName(); + $body = $producerMessage->getBody(); + + if (null === $body) { + return $producerMessage; + } + + if (false === $this->registry->hasBodySchemaForTopic($topicName)) { + return $producerMessage; + } + + $avroSchema = $this->registry->getBodySchemaForTopic($topicName); + + $encodedBody = $this->recordSerializer->encodeRecord( + $avroSchema->getName(), + $this->getAvroSchemaDefinition($avroSchema), + $body + ); + + return $producerMessage->withBody($encodedBody); + } + + /** + * @param KafkaProducerMessageInterface $producerMessage + * @return KafkaProducerMessageInterface + * @throws SchemaRegistryException + */ + private function encodeKey(KafkaProducerMessageInterface $producerMessage): KafkaProducerMessageInterface + { + $topicName = $producerMessage->getTopicName(); + $key = $producerMessage->getKey(); + + if (null === $key) { + return $producerMessage; + } + + if (false === $this->registry->hasKeySchemaForTopic($topicName)) { + return $producerMessage; + } + + $avroSchema = $this->registry->getKeySchemaForTopic($topicName); + + $encodedKey = $this->recordSerializer->encodeRecord( + $avroSchema->getName(), + $this->getAvroSchemaDefinition($avroSchema), + $key + ); + + return $producerMessage->withKey($encodedKey); + } + + private function getAvroSchemaDefinition(KafkaAvroSchemaInterface $avroSchema): AvroSchema + { + $schemaDefinition = $avroSchema->getDefinition(); + + if (null === $schemaDefinition) { + throw new AvroEncoderException( + sprintf( + AvroEncoderException::UNABLE_TO_LOAD_DEFINITION_MESSAGE, + $avroSchema->getName() + ) + ); + } + + return $schemaDefinition; + } + + /** + * @return AvroSchemaRegistryInterface + */ + public function getRegistry(): AvroSchemaRegistryInterface + { + return $this->registry; + } +} diff --git a/src/Message/Encoder/AvroEncoderInterface.php b/src/Message/Encoder/AvroEncoderInterface.php new file mode 100644 index 0000000..b18963e --- /dev/null +++ b/src/Message/Encoder/AvroEncoderInterface.php @@ -0,0 +1,15 @@ +getBody(), JSON_THROW_ON_ERROR); + + return $producerMessage->withBody($body); + } +} diff --git a/src/Message/Encoder/NullEncoder.php b/src/Message/Encoder/NullEncoder.php new file mode 100644 index 0000000..c138c6b --- /dev/null +++ b/src/Message/Encoder/NullEncoder.php @@ -0,0 +1,20 @@ +name = $schemaName; + $this->version = $version; + $this->definition = $definition; + } + + /** + * @return string + */ + public function getName(): string + { + return $this->name; + } + + /** + * @return integer + */ + public function getVersion(): int + { + return $this->version; + } + + /** + * @param \AvroSchema $definition + * @return void + */ + public function setDefinition(\AvroSchema $definition): void + { + $this->definition = $definition; + } + + /** + * @return \AvroSchema|null + */ + public function getDefinition(): ?\AvroSchema + { + return $this->definition; + } +} diff --git a/src/Message/KafkaAvroSchemaInterface.php b/src/Message/KafkaAvroSchemaInterface.php new file mode 100644 index 0000000..e1d0e7e --- /dev/null +++ b/src/Message/KafkaAvroSchemaInterface.php @@ -0,0 +1,32 @@ +topicName = $topicName; + $this->partition = $partition; + $this->offset = $offset; + $this->timestamp = $timestamp; + $this->key = $key; + $this->body = $body; + $this->headers = $headers; + } + + /** + * @return integer + */ + public function getOffset(): int + { + return $this->offset; + } + + /** + * @return integer + */ + public function getTimestamp(): int + { + return $this->timestamp; + } +} diff --git a/src/Message/KafkaConsumerMessageInterface.php b/src/Message/KafkaConsumerMessageInterface.php new file mode 100644 index 0000000..b76e411 --- /dev/null +++ b/src/Message/KafkaConsumerMessageInterface.php @@ -0,0 +1,19 @@ +topicName = $topicName; + $this->partition = $partition; + } + + /** + * @param string $topicName + * @param integer $partition + * @return KafkaProducerMessageInterface + */ + public static function create(string $topicName, int $partition): KafkaProducerMessageInterface + { + return new self($topicName, $partition); + } + + /** + * @param string|null $key + * @return KafkaProducerMessageInterface + */ + public function withKey(?string $key): KafkaProducerMessageInterface + { + $new = clone $this; + + $new->key = $key; + + return $new; + } + + /** + * @param mixed $body + * @return KafkaProducerMessageInterface + */ + public function withBody($body): KafkaProducerMessageInterface + { + $new = clone $this; + + $new->body = $body; + + return $new; + } + + /** + * @param string[]|null $headers + * @return KafkaProducerMessageInterface + */ + public function withHeaders(?array $headers): KafkaProducerMessageInterface + { + $new = clone $this; + + $new->headers = $headers; + + return $new; + } + + /** + * @param string $key + * @param string|integer $value + * @return KafkaProducerMessageInterface + */ + public function withHeader(string $key, $value): KafkaProducerMessageInterface + { + $new = clone $this; + + $new->headers[$key] = $value; + + return $new; + } +} diff --git a/src/Message/KafkaProducerMessageInterface.php b/src/Message/KafkaProducerMessageInterface.php new file mode 100644 index 0000000..96754cb --- /dev/null +++ b/src/Message/KafkaProducerMessageInterface.php @@ -0,0 +1,41 @@ + + */ + private $schemaMapping = [ + self::BODY_IDX => [], + self::KEY_IDX => [], + ]; + + /** + * AvroSchemaRegistry constructor. + * @param Registry $registry + */ + public function __construct(Registry $registry) + { + $this->registry = $registry; + } + + /** + * @param string $topicName + * @param KafkaAvroSchemaInterface $avroSchema + * @return void + */ + public function addBodySchemaMappingForTopic(string $topicName, KafkaAvroSchemaInterface $avroSchema): void + { + $this->schemaMapping[self::BODY_IDX][$topicName] = $avroSchema; + } + + /** + * @param string $topicName + * @param KafkaAvroSchemaInterface $avroSchema + * @return void + */ + public function addKeySchemaMappingForTopic(string $topicName, KafkaAvroSchemaInterface $avroSchema): void + { + $this->schemaMapping[self::KEY_IDX][$topicName] = $avroSchema; + } + + /** + * @param string $topicName + * @return KafkaAvroSchemaInterface + * @throws SchemaRegistryException + */ + public function getBodySchemaForTopic(string $topicName): KafkaAvroSchemaInterface + { + return $this->getSchemaForTopicAndType($topicName, self::BODY_IDX); + } + + /** + * @param string $topicName + * @return KafkaAvroSchemaInterface + * @throws SchemaRegistryException + */ + public function getKeySchemaForTopic(string $topicName): KafkaAvroSchemaInterface + { + return $this->getSchemaForTopicAndType($topicName, self::KEY_IDX); + } + + /** + * @param string $topicName + * @return boolean + * @throws SchemaRegistryException + */ + public function hasBodySchemaForTopic(string $topicName): bool + { + return isset($this->schemaMapping[self::BODY_IDX][$topicName]); + } + + /** + * @param string $topicName + * @return boolean + * @throws SchemaRegistryException + */ + public function hasKeySchemaForTopic(string $topicName): bool + { + return isset($this->schemaMapping[self::KEY_IDX][$topicName]); + } + + /** + * @param string $topicName + * @param string $type + * @return KafkaAvroSchemaInterface + * @throws SchemaRegistryException|AvroSchemaRegistryException + */ + private function getSchemaForTopicAndType(string $topicName, string $type): KafkaAvroSchemaInterface + { + if (false === isset($this->schemaMapping[$type][$topicName])) { + throw new AvroSchemaRegistryException( + sprintf( + AvroSchemaRegistryException::SCHEMA_MAPPING_NOT_FOUND, + $topicName, + $type + ) + ); + } + + $avroSchema = $this->schemaMapping[$type][$topicName]; + + if (null !== $avroSchema->getDefinition()) { + return $avroSchema; + } + + $avroSchema->setDefinition($this->getSchemaDefinition($avroSchema)); + $this->schemaMapping[$type][$topicName] = $avroSchema; + + return $avroSchema; + } + + /** + * @param KafkaAvroSchemaInterface $avroSchema + * @return \AvroSchema + * @throws SchemaRegistryException + */ + private function getSchemaDefinition(KafkaAvroSchemaInterface $avroSchema): \AvroSchema + { + if (KafkaAvroSchemaInterface::LATEST_VERSION === $avroSchema->getVersion()) { + return $this->registry->latestVersion($avroSchema->getName()); + } + + return $this->registry->schemaForSubjectAndVersion($avroSchema->getName(), $avroSchema->getVersion()); + } + + /** + * @return array + */ + public function getTopicSchemaMapping(): array + { + return $this->schemaMapping; + } +} diff --git a/src/Message/Registry/AvroSchemaRegistryInterface.php b/src/Message/Registry/AvroSchemaRegistryInterface.php new file mode 100644 index 0000000..74c3860 --- /dev/null +++ b/src/Message/Registry/AvroSchemaRegistryInterface.php @@ -0,0 +1,64 @@ + + */ + public function getTopicSchemaMapping(): array; + + /** + * @param string $topicName + * @return KafkaAvroSchemaInterface + * @throws SchemaRegistryException + */ + public function getBodySchemaForTopic(string $topicName): KafkaAvroSchemaInterface; + + /** + * @param string $topicName + * @return KafkaAvroSchemaInterface + * @throws SchemaRegistryException + */ + public function getKeySchemaForTopic(string $topicName): KafkaAvroSchemaInterface; + + /** + * @param string $topicName + * @return boolean + * @throws SchemaRegistryException + */ + public function hasBodySchemaForTopic(string $topicName): bool; + + /** + * @param string $topicName + * @return boolean + * @throws SchemaRegistryException + */ + public function hasKeySchemaForTopic(string $topicName): bool; +} diff --git a/src/Producer/KafkaProducer.php b/src/Producer/KafkaProducer.php new file mode 100644 index 0000000..f9d849d --- /dev/null +++ b/src/Producer/KafkaProducer.php @@ -0,0 +1,273 @@ +producer = $producer; + $this->kafkaConfiguration = $kafkaConfiguration; + $this->encoder = $encoder; + } + + /** + * Produces a message to the topic and partition defined in the message + * If a schema name was given, the message body will be avro serialized. + * + * @param KafkaProducerMessageInterface $message + * @param boolean $autoPoll + * @param integer $pollTimeoutMs + * @return void + */ + public function produce(KafkaProducerMessageInterface $message, bool $autoPoll = true, int $pollTimeoutMs = 0): void + { + $message = $this->encoder->encode($message); + + $topicProducer = $this->getProducerTopicForTopic($message->getTopicName()); + + $topicProducer->producev( + $message->getPartition(), + RD_KAFKA_MSG_F_BLOCK, + $message->getBody(), + $message->getKey(), + $message->getHeaders() + ); + + if (true === $autoPoll) { + $this->producer->poll($pollTimeoutMs); + } + } + + /** + * Produces a message to the topic and partition defined in the message + * If a schema name was given, the message body will be avro serialized. + * Wait for an event to arrive before continuing (blocking) + * + * @param KafkaProducerMessageInterface $message + * @return void + */ + public function syncProduce(KafkaProducerMessageInterface $message): void + { + $this->produce($message, true, -1); + } + + /** + * Poll for producer event, pass 0 for non-blocking, pass -1 to block until an event arrives + * + * @param integer $timeoutMs + * @return void + */ + public function poll(int $timeoutMs = 0): void + { + $this->producer->poll($timeoutMs); + } + + /** + * Poll for producer events until the number of $queueSize events remain + * + * @param integer $timeoutMs + * @param integer $queueSize + * @return void + */ + public function pollUntilQueueSizeReached(int $timeoutMs = 0, int $queueSize = 0): void + { + while ($this->producer->getOutQLen() > $queueSize) { + $this->producer->poll($timeoutMs); + } + } + + /** + * Purge producer messages that are in flight + * + * @param integer $purgeFlags + * @return integer + */ + public function purge(int $purgeFlags): int + { + return $this->producer->purge($purgeFlags); + } + + /** + * Wait until all outstanding produce requests are completed + * + * @param integer $timeoutMs + * @return integer + */ + public function flush(int $timeoutMs): int + { + return $this->producer->flush($timeoutMs); + } + + /** + * Queries the broker for metadata on a certain topic + * + * @param string $topicName + * @param integer $timeoutMs + * @return SkcMetadataTopic + * @throws SkcException + */ + public function getMetadataForTopic(string $topicName, int $timeoutMs = 10000): SkcMetadataTopic + { + $topic = $this->producer->getTopicHandle($topicName); + return $this->producer + ->getMetadata( + false, + $topic, + $timeoutMs + ) + ->getTopics() + ->current(); + } + + /** + * Start a producer transaction + * + * @param int $timeoutMs + * @return void + * + * @throws KafkaProducerTransactionAbortException + * @throws KafkaProducerTransactionFatalException + * @throws KafkaProducerTransactionRetryException + */ + public function beginTransaction(int $timeoutMs): void + { + try { + if (false === $this->transactionInitialized) { + $this->producer->initTransactions($timeoutMs); + $this->transactionInitialized = true; + } + + $this->producer->beginTransaction(); + } catch (SkcErrorException $e) { + $this->handleTransactionError($e); + } + } + + /** + * Commit the current producer transaction + * + * @param int $timeoutMs + * @return void + * + * @throws KafkaProducerTransactionAbortException + * @throws KafkaProducerTransactionFatalException + * @throws KafkaProducerTransactionRetryException + */ + public function commitTransaction(int $timeoutMs): void + { + try { + $this->producer->commitTransaction($timeoutMs); + } catch (SkcErrorException $e) { + $this->handleTransactionError($e); + } + } + + /** + * Abort the current producer transaction + * + * @param int $timeoutMs + * @return void + * + * @throws KafkaProducerTransactionAbortException + * @throws KafkaProducerTransactionFatalException + * @throws KafkaProducerTransactionRetryException + */ + public function abortTransaction(int $timeoutMs): void + { + try { + $this->producer->abortTransaction($timeoutMs); + } catch (SkcErrorException $e) { + $this->handleTransactionError($e); + } + } + + /** + * @param string $topic + * @return SkcProducerTopic + */ + private function getProducerTopicForTopic(string $topic): SkcProducerTopic + { + if (!isset($this->producerTopics[$topic])) { + $this->producerTopics[$topic] = $this->producer->getTopicHandle($topic); + } + + return $this->producerTopics[$topic]; + } + + /** + * @param SkcErrorException $e + * + * @throws KafkaProducerTransactionAbortException + * @throws KafkaProducerTransactionFatalException + * @throws KafkaProducerTransactionRetryException + */ + private function handleTransactionError(SkcErrorException $e): void + { + if (true === $e->isRetriable()) { + throw new KafkaProducerTransactionRetryException( + KafkaProducerTransactionRetryException::RETRIABLE_TRANSACTION_EXCEPTION_MESSAGE + ); + } elseif (true === $e->transactionRequiresAbort()) { + throw new KafkaProducerTransactionAbortException( + KafkaProducerTransactionAbortException::TRANSACTION_REQUIRES_ABORT_EXCEPTION_MESSAGE + ); + } else { + $this->transactionInitialized = false; + // according to librdkafka documentation, everything that is not retriable, abortable or fatal is fatal + // fatal errors (so stated), need the producer to be destroyed + throw new KafkaProducerTransactionFatalException( + KafkaProducerTransactionFatalException::FATAL_TRANSACTION_EXCEPTION_MESSAGE + ); + } + } +} diff --git a/src/Producer/KafkaProducerBuilder.php b/src/Producer/KafkaProducerBuilder.php new file mode 100644 index 0000000..bf564f3 --- /dev/null +++ b/src/Producer/KafkaProducerBuilder.php @@ -0,0 +1,192 @@ +deliverReportCallback = new KafkaProducerDeliveryReportCallback(); + $this->errorCallback = new KafkaErrorCallback(); + $this->encoder = new NullEncoder(); + } + + /** + * Returns the producer builder + * + * @return KafkaProducerBuilderInterface + */ + public static function create(): KafkaProducerBuilderInterface + { + return new self(); + } + + /** + * Adds a broker to which you want to produce + * + * @param string $broker + * @return KafkaProducerBuilderInterface + */ + public function withAdditionalBroker(string $broker): KafkaProducerBuilderInterface + { + $this->brokers[] = $broker; + + return $this; + } + + /** + * Add configuration settings, otherwise the kafka defaults apply + * + * @param string[] $config + * @return KafkaProducerBuilderInterface + */ + public function withAdditionalConfig(array $config): KafkaProducerBuilderInterface + { + $this->config = $config + $this->config; + + return $this; + } + + /** + * Sets callback for the delivery report. The broker will send a delivery + * report for every message which describes if the delivery was successful or not + * + * @param callable $deliveryReportCallback + * @return KafkaProducerBuilderInterface + */ + public function withDeliveryReportCallback(callable $deliveryReportCallback): KafkaProducerBuilderInterface + { + $this->deliverReportCallback = $deliveryReportCallback; + + return $this; + } + + /** + * Set a callback to be called on errors. + * The default callback will throw an exception for every error + * + * @param callable $errorCallback + * @return KafkaProducerBuilderInterface + */ + public function withErrorCallback(callable $errorCallback): KafkaProducerBuilderInterface + { + $this->errorCallback = $errorCallback; + + return $this; + } + + /** + * Callback for log related events + * + * @param callable $logCallback + * @return KafkaProducerBuilderInterface + */ + public function withLogCallback(callable $logCallback): KafkaProducerBuilderInterface + { + $this->logCallback = $logCallback; + + return $this; + } + + /** + * Lets you set a custom encoder for produce message + * + * @param EncoderInterface $encoder + * @return KafkaProducerBuilderInterface + */ + public function withEncoder(EncoderInterface $encoder): KafkaProducerBuilderInterface + { + $this->encoder = $encoder; + + return $this; + } + + /** + * Returns your producer instance + * + * @return KafkaProducerInterface + * @throws KafkaProducerException + */ + public function build(): KafkaProducerInterface + { + if ([] === $this->brokers) { + throw new KafkaProducerException(KafkaProducerException::NO_BROKER_EXCEPTION_MESSAGE); + } + + //Thread termination improvement (https://github.com/arnaud-lb/php-rdkafka#performance--low-latency-settings) + $this->config['socket.timeout.ms'] = '50'; + $this->config['queue.buffering.max.ms'] = '1'; + + if (function_exists('pcntl_sigprocmask')) { + pcntl_sigprocmask(SIG_BLOCK, array(SIGIO)); + $this->config['internal.termination.signal'] = (string) SIGIO; + unset($this->config['queue.buffering.max.ms']); + } + + $kafkaConfig = new KafkaConfiguration($this->brokers, [], $this->config); + + //set producer callbacks + $this->registerCallbacks($kafkaConfig); + + $rdKafkaProducer = new SkcProducer($kafkaConfig); + + return new KafkaProducer($rdKafkaProducer, $kafkaConfig, $this->encoder); + } + + /** + * @param KafkaConfiguration $conf + * @return void + */ + private function registerCallbacks(KafkaConfiguration $conf): void + { + $conf->setDrMsgCb($this->deliverReportCallback); + $conf->setErrorCb($this->errorCallback); + + if (null !== $this->logCallback) { + $conf->setLogCb($this->logCallback); + } + } +} diff --git a/src/Producer/KafkaProducerBuilderInterface.php b/src/Producer/KafkaProducerBuilderInterface.php new file mode 100644 index 0000000..c14a0bb --- /dev/null +++ b/src/Producer/KafkaProducerBuilderInterface.php @@ -0,0 +1,60 @@ +getConsumerMock(); + + $consumer + ->expects(self::once()) + ->method('assign') + ->with(null) + ->willThrowException(new SkcException($exceptionMessage, $exceptionCode)); + + call_user_func(new KafkaConsumerRebalanceCallback(), $consumer, 1, []); + } + + public function testInvokeAssign() + { + $partitions = [1, 2, 3]; + + $consumer = $this->getConsumerMock(); + + $consumer + ->expects(self::once()) + ->method('assign') + ->with($partitions) + ->willReturn(null); + + + call_user_func( + new KafkaConsumerRebalanceCallback(), + $consumer, + RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS, + $partitions + ); + } + + public function testInvokeRevoke() + { + $consumer = $this->getConsumerMock(); + + $consumer + ->expects(self::once()) + ->method('assign') + ->with(null) + ->willReturn(null); + + call_user_func(new KafkaConsumerRebalanceCallback(), $consumer, RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS); + } + + /** + * @return MockObject|SkcConsumer + */ + private function getConsumerMock() + { + //create mock to assign topics + $consumerMock = $this->getMockBuilder(SkcConsumer::class) + ->disableOriginalConstructor() + ->onlyMethods(['assign', 'unsubscribe', 'getSubscription']) + ->getMock(); + + $consumerMock + ->expects(self::any()) + ->method('unsubscribe') + ->willReturn(null); + + $consumerMock + ->expects(self::any()) + ->method('getSubscription') + ->willReturn([]); + + return $consumerMock; + } +} diff --git a/tests/Unit/Callback/KafkaErrorCallbackTest.php b/tests/Unit/Callback/KafkaErrorCallbackTest.php new file mode 100644 index 0000000..4a89482 --- /dev/null +++ b/tests/Unit/Callback/KafkaErrorCallbackTest.php @@ -0,0 +1,28 @@ +getMockBuilder(SkcProducer::class) + ->disableOriginalConstructor() + ->getMock(); + } + + public function testInvokeDefault() + { + self::expectException(KafkaProducerException::class); + + $message = new Message(); + $message->err = -1; + + call_user_func(new KafkaProducerDeliveryReportCallback(), $this->getProducerMock(), $message); + } + + public function testInvokeTimeout() + { + self::expectException(KafkaProducerException::class); + + $message = new Message(); + $message->err = RD_KAFKA_RESP_ERR__MSG_TIMED_OUT; + + call_user_func(new KafkaProducerDeliveryReportCallback(), $this->getProducerMock(), $message); + } + + public function testInvokeNoError() + { + $message = new Message(); + $message->err = RD_KAFKA_RESP_ERR_NO_ERROR; + + $result = call_user_func(new KafkaProducerDeliveryReportCallback(), $this->getProducerMock(), $message); + + self::assertNull($result); + } +} diff --git a/tests/Unit/Conf/KafkaConfigurationTest.php b/tests/Unit/Conf/KafkaConfigurationTest.php new file mode 100644 index 0000000..08708eb --- /dev/null +++ b/tests/Unit/Conf/KafkaConfigurationTest.php @@ -0,0 +1,115 @@ +getBrokers()); + self::assertEquals($topicSubscriptions, $kafkaConfiguration->getTopicSubscriptions()); + } + + /** + * @dataProvider kafkaConfigurationDataProvider + * @param array $brokers + * @param array $topicSubscriptions + * @return void + */ + public function testGetConfiguration(array $brokers, array $topicSubscriptions): void + { + $kafkaConfiguration = new KafkaConfiguration($brokers, $topicSubscriptions); + + self::assertEquals($kafkaConfiguration->dump(), $kafkaConfiguration->getConfiguration()); + } + + /** + * @return array + */ + public function configValuesProvider(): array + { + return [ + [ 1, '1' ], + [ -1, '-1' ], + [ 1.123333, '1.123333' ], + [ -0.99999, '-0.99999' ], + [ true, 'true' ], + [ false, 'false' ], + [ null, '' ], + [ '', '' ], + [ ' ', ' ' ], + [ [], null ], + [ new stdClass(), null ], + ]; + } + + /** + * @dataProvider configValuesProvider + * @param mixed $inputValue + * @param mixed $expectedValue + */ + public function testConfigValues($inputValue, $expectedValue): void + { + $kafkaConfiguration = new KafkaConfiguration( + ['localhost'], + [new TopicSubscription('test-topic')], + [ + 'group.id' => $inputValue, + 'auto.commit.interval.ms' => 100 + ] + ); + + $config = $kafkaConfiguration->getConfiguration(); + + if (null === $expectedValue) { + self::assertArrayNotHasKey('group.id', $config); + return; + } + + self::assertEquals($config['metadata.broker.list'], 'localhost'); + self::assertEquals($expectedValue, $config['group.id']); + self::assertEquals('100', $config['auto.commit.interval.ms']); + } +} diff --git a/tests/Unit/Consumer/KafkaConsumerBuilderTest.php b/tests/Unit/Consumer/KafkaConsumerBuilderTest.php new file mode 100644 index 0000000..2327b79 --- /dev/null +++ b/tests/Unit/Consumer/KafkaConsumerBuilderTest.php @@ -0,0 +1,336 @@ +kafkaConsumerBuilder = KafkaConsumerBuilder::create(); + } + + /** + * @return void + */ + public function testCreate(): void + { + self::assertInstanceOf(KafkaConsumerBuilder::class, KafkaConsumerBuilder::create()); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testAddBroker(): void + { + self::assertNotSame( + $this->kafkaConsumerBuilder, + $clone = $this->kafkaConsumerBuilder->withAdditionalBroker('localhost') + ); + + $reflectionProperty = new \ReflectionProperty($clone, 'brokers'); + $reflectionProperty->setAccessible(true); + + self::assertSame(['localhost'], $reflectionProperty->getValue($clone)); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSubscribeToTopic(): void + { + self::assertNotSame( + $this->kafkaConsumerBuilder, + $clone = $this->kafkaConsumerBuilder->withAdditionalSubscription('test-topic') + ); + + $reflectionProperty = new \ReflectionProperty($clone, 'topics'); + $reflectionProperty->setAccessible(true); + + self::isInstanceOf(TopicSubscription::class, $reflectionProperty->getValue($clone)); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testReplaceSubscribedToTopics(): void + { + self::assertNotSame( + $this->kafkaConsumerBuilder, + $clone = $this->kafkaConsumerBuilder->withSubscription('new-topic') + ); + + $reflectionProperty = new \ReflectionProperty($clone, 'topics'); + $reflectionProperty->setAccessible(true); + + $topicSubscription = $reflectionProperty->getValue($clone); + self::assertCount(1, $topicSubscription); + self::isInstanceOf(TopicSubscription::class, $topicSubscription[0]); + self::assertSame('new-topic', $topicSubscription[0]->getTopicName()); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testAddConfig(): void + { + $intialConfig = ['group.id' => 'test-group', 'enable.auto.offset.store' => true]; + $newConfig = ['offset.store.sync.interval.ms' => 60e3]; + $clone = $this->kafkaConsumerBuilder->withAdditionalConfig($intialConfig); + $clone = $clone->withAdditionalConfig($newConfig); + + $reflectionProperty = new \ReflectionProperty($clone, 'config'); + $reflectionProperty->setAccessible(true); + + self::assertSame( + [ + 'offset.store.sync.interval.ms' => 60e3, + 'group.id' => 'test-group', + 'enable.auto.offset.store' => true, + 'enable.auto.commit' => false, + 'auto.offset.reset' => 'earliest' + ], + $reflectionProperty->getValue($clone) + ); + self::assertNotSame($clone, $this->kafkaConsumerBuilder); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetDecoder(): void + { + $decoder = $this->getMockForAbstractClass(DecoderInterface::class); + + $clone = $this->kafkaConsumerBuilder->withDecoder($decoder); + + $reflectionProperty = new \ReflectionProperty($clone, 'decoder'); + $reflectionProperty->setAccessible(true); + + self::assertInstanceOf(DecoderInterface::class, $reflectionProperty->getValue($clone)); + self::assertNotSame($clone, $this->kafkaConsumerBuilder); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetConsumerGroup(): void + { + $clone = $this->kafkaConsumerBuilder->withConsumerGroup('test-consumer'); + + $reflectionProperty = new \ReflectionProperty($clone, 'consumerGroup'); + $reflectionProperty->setAccessible(true); + + self::assertSame('test-consumer', $reflectionProperty->getValue($clone)); + self::assertNotSame($clone, $this->kafkaConsumerBuilder); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetErrorCallback(): void + { + $callback = function () { + // Anonymous test method, no logic required + }; + + $clone = $this->kafkaConsumerBuilder->withErrorCallback($callback); + + $reflectionProperty = new \ReflectionProperty($clone, 'errorCallback'); + $reflectionProperty->setAccessible(true); + + self::assertSame($callback, $reflectionProperty->getValue($clone)); + self::assertNotSame($clone, $this->kafkaConsumerBuilder); + + $consumer = $clone + ->withAdditionalBroker('localhost') + ->withSubscription('test') + ->build(); + $conf = $consumer->getConfiguration(); + self::assertArrayHasKey('error_cb', $conf); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetRebalanceCallback(): void + { + $callback = function () { + // Anonymous test method, no logic required + }; + + $clone = $this->kafkaConsumerBuilder->withRebalanceCallback($callback); + + $reflectionProperty = new \ReflectionProperty($clone, 'rebalanceCallback'); + $reflectionProperty->setAccessible(true); + + self::assertSame($callback, $reflectionProperty->getValue($clone)); + self::assertNotSame($clone, $this->kafkaConsumerBuilder); + + $consumer = $clone + ->withAdditionalBroker('localhost') + ->withSubscription('test') + ->build(); + $conf = $consumer->getConfiguration(); + self::assertArrayHasKey('rebalance_cb', $conf); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetConsumeCallback(): void + { + $callback = function () { + // Anonymous test method, no logic required + }; + + $clone = $this->kafkaConsumerBuilder->withConsumeCallback($callback); + + $reflectionProperty = new \ReflectionProperty($clone, 'consumeCallback'); + $reflectionProperty->setAccessible(true); + + self::assertSame($callback, $reflectionProperty->getValue($clone)); + self::assertNotSame($clone, $this->kafkaConsumerBuilder); + + $consumer = $clone + ->withAdditionalBroker('localhost') + ->withSubscription('test') + ->build(); + $conf = $consumer->getConfiguration(); + self::assertArrayHasKey('consume_cb', $conf); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetOffsetCommitCallback(): void + { + $callback = function () { + // Anonymous test method, no logic required + }; + + $clone = $this->kafkaConsumerBuilder->withOffsetCommitCallback($callback); + + $reflectionProperty = new \ReflectionProperty($clone, 'offsetCommitCallback'); + $reflectionProperty->setAccessible(true); + + self::assertSame($callback, $reflectionProperty->getValue($clone)); + self::assertNotSame($clone, $this->kafkaConsumerBuilder); + + $consumer = $clone + ->withAdditionalBroker('localhost') + ->withSubscription('test') + ->build(); + $conf = $consumer->getConfiguration(); + self::assertArrayHasKey('offset_commit_cb', $conf); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetLogCallback(): void + { + $callback = function () { + // Anonymous test method, no logic required + }; + + $clone = $this->kafkaConsumerBuilder->withLogCallback($callback); + + $reflectionProperty = new \ReflectionProperty($clone, 'logCallback'); + $reflectionProperty->setAccessible(true); + + self::assertSame($callback, $reflectionProperty->getValue($clone)); + self::assertNotSame($clone, $this->kafkaConsumerBuilder); + } + + /** + * @return void + * @throws KafkaConsumerBuilderException + */ + public function testBuildFailMissingBrokers(): void + { + self::expectException(KafkaConsumerBuilderException::class); + self::expectExceptionMessage(KafkaConsumerBuilderException::NO_BROKER_EXCEPTION_MESSAGE); + + $this->kafkaConsumerBuilder->build(); + } + + /** + * @return void + */ + public function testBuildSuccess(): void + { + $callback = function ($kafka, $errId, $msg) { + // Anonymous test method, no logic required + }; + + /** @var $consumer KafkaHighLevelConsumer */ + $consumer = $this->kafkaConsumerBuilder + ->withAdditionalBroker('localhost') + ->withAdditionalSubscription('test-topic') + ->withRebalanceCallback($callback) + ->withOffsetCommitCallback($callback) + ->withConsumeCallback($callback) + ->withErrorCallback($callback) + ->withLogCallback($callback) + ->build(); + + self::assertInstanceOf(KafkaConsumerInterface::class, $consumer); + self::assertInstanceOf(KafkaHighLevelConsumer::class, $consumer); + } + + /** + * @return void + */ + public function testBuildHighLevelSuccess(): void + { + $callback = function ($kafka, $errId, $msg) { + // Anonymous test method, no logic required + }; + + /** @var $consumer KafkaHighLevelConsumer */ + $consumer = $this->kafkaConsumerBuilder + ->withAdditionalBroker('localhost') + ->withAdditionalSubscription('test-topic') + ->withRebalanceCallback($callback) + ->withErrorCallback($callback) + ->withLogCallback($callback) + ->build(); + + $conf = $consumer->getConfiguration(); + + self::assertInstanceOf(KafkaConsumerInterface::class, $consumer); + self::assertInstanceOf(KafkaHighLevelConsumerInterface::class, $consumer); + self::assertArrayHasKey('enable.auto.commit', $conf); + self::assertEquals($conf['enable.auto.commit'], 'false'); + } +} diff --git a/tests/Unit/Consumer/KafkaHighLevelConsumerTest.php b/tests/Unit/Consumer/KafkaHighLevelConsumerTest.php new file mode 100644 index 0000000..4ffdf7f --- /dev/null +++ b/tests/Unit/Consumer/KafkaHighLevelConsumerTest.php @@ -0,0 +1,905 @@ +createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls($topics, []); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock->expects(self::once())->method('subscribe')->with(['testTopic', 'testTopic2']); + + $kafkaConsumer->subscribe(); + } + + /** + * @throws KafkaConsumerSubscriptionException + */ + public function testSubscribeSuccessWithParam(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::never())->method('getTopicSubscriptions'); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock->expects(self::once())->method('subscribe')->with(['testTopic3']); + + $kafkaConsumer->subscribe([new TopicSubscription('testTopic3')]); + } + + /** + * @throws KafkaConsumerSubscriptionException + */ + public function testSubscribeSuccessWithAssignmentWithPartitions(): void + { + $topics = [new TopicSubscription('testTopic', [1,2], RD_KAFKA_OFFSET_BEGINNING)]; + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls([], $topics); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock->expects(self::once())->method('assign'); + + $kafkaConsumer->subscribe(); + } + + /** + * @throws KafkaConsumerSubscriptionException + */ + public function testSubscribeSuccessWithAssignmentWithOffsetOnly(): void + { + $partitions = [ + $this->getMetadataPartitionMock(1), + $this->getMetadataPartitionMock(2) + ]; + + /** @var SkcConsumerTopic|MockObject $rdKafkaConsumerTopicMock */ + $rdKafkaConsumerTopicMock = $this->createMock(SkcConsumerTopic::class); + + /** @var SkcMetadataTopic|MockObject $rdKafkaMetadataTopicMock */ + $rdKafkaMetadataTopicMock = $this->createMock(SkcMetadataTopic::class); + $rdKafkaMetadataTopicMock + ->expects(self::once()) + ->method('getPartitions') + ->willReturn($partitions); + + /** @var SkcMetadata|MockObject $rdKafkaMetadataMock */ + $rdKafkaMetadataMock = $this->createMock(SkcMetadata::class); + $rdKafkaMetadataMock + ->expects(self::once()) + ->method('getTopics') + ->willReturnCallback( + function () use ($rdKafkaMetadataTopicMock) { + /** @var SkcMetadataCollection|MockObject $collection */ + $collection = $this->createMock(SkcMetadataCollection::class); + $collection + ->expects(self::once()) + ->method('current') + ->willReturn($rdKafkaMetadataTopicMock); + + return $collection; + } + ); + + $topics = [new TopicSubscription('testTopic', [], RD_KAFKA_OFFSET_END)]; + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls([], $topics); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock->expects(self::once())->method('assign')->with( + $this->callback( + function (array $assignment) { + self::assertCount(2, $assignment); + return true; + } + ) + ); + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('getMetadata') + ->with(false, $rdKafkaConsumerTopicMock, 10000) + ->willReturn($rdKafkaMetadataMock); + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('getTopicHandle') + ->with('testTopic') + ->willReturn($rdKafkaConsumerTopicMock); + + + $kafkaConsumer->subscribe(); + } + + + /** + * @throws KafkaConsumerSubscriptionException + */ + public function testSubscribeFailureOnMixedSubscribe(): void + { + $topics = [ + new TopicSubscription('testTopic'), + new TopicSubscription('anotherTestTopic', [1,2], RD_KAFKA_OFFSET_BEGINNING) + ]; + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturn($topics); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock->expects(self::never())->method('subscribe'); + $rdKafkaConsumerMock->expects(self::never())->method('assign'); + + + $this->expectException(KafkaConsumerSubscriptionException::class); + $this->expectExceptionMessage(KafkaConsumerSubscriptionException::MIXED_SUBSCRIPTION_EXCEPTION_MESSAGE); + + $kafkaConsumer->subscribe(); + } + + /** + * @throws KafkaConsumerSubscriptionException + */ + public function testSubscribeFailure(): void + { + $topics = [new TopicSubscription('testTopic')]; + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturn($topics); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('subscribe') + ->with(['testTopic']) + ->willThrowException(new SkcException('Error', 100)); + + $this->expectException(KafkaConsumerSubscriptionException::class); + $this->expectExceptionCode(100); + $this->expectExceptionMessage('Error'); + + $kafkaConsumer->subscribe(); + } + + /** + * @throws KafkaConsumerSubscriptionException + */ + public function testUnsubscribeSuccesss(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock->expects(self::once())->method('unsubscribe'); + + $kafkaConsumer->unsubscribe(); + } + + /** + * @throws KafkaConsumerSubscriptionException + */ + public function testUnsubscribeSuccesssConsumeFails(): void + { + self::expectException(KafkaConsumerConsumeException::class); + self::expectExceptionMessage(KafkaConsumerConsumeException::NOT_SUBSCRIBED_EXCEPTION_MESSAGE); + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock->expects(self::once())->method('unsubscribe'); + + $kafkaConsumer->unsubscribe(); + + $kafkaConsumer->consume(); + } + + /** + * @throws KafkaConsumerSubscriptionException + */ + public function testUnsubscribeFailure(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('unsubscribe') + ->willThrowException(new SkcException('Error', 100)); + + $this->expectException(KafkaConsumerSubscriptionException::class); + $this->expectExceptionCode(100); + $this->expectExceptionMessage('Error'); + + + $kafkaConsumer->unsubscribe(); + } + + /** + * @throws KafkaConsumerCommitException + */ + public function testCommitSuccesss(): void + { + $message = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message->expects(self::exactly(1))->method('getOffset')->willReturn(0); + $message->expects(self::exactly(1))->method('getTopicName')->willReturn('test'); + $message->expects(self::exactly(1))->method('getPartition')->willReturn(1); + $message2 = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message2->expects(self::exactly(1))->method('getOffset')->willReturn(1); + $message2->expects(self::exactly(2))->method('getTopicName')->willReturn('test'); + $message2->expects(self::exactly(2))->method('getPartition')->willReturn(1); + $message3 = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message3->expects(self::exactly(2))->method('getOffset')->willReturn(2); + $message3->expects(self::exactly(1))->method('getTopicName')->willReturn('test'); + $message3->expects(self::exactly(1))->method('getPartition')->willReturn(1); + $message4 = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message4->expects(self::exactly(1))->method('getOffset')->willReturn(0); + $message4->expects(self::exactly(2))->method('getTopicName')->willReturn('test'); + $message4->expects(self::exactly(2))->method('getPartition')->willReturn(2); + + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $rdKafkaConsumerMock->expects(self::once())->method('commit')->with( + $this->callback( + function (array $topicPartitions) { + self::assertCount(2, $topicPartitions); + self::assertInstanceOf(SkcTopicPartition::class, $topicPartitions['test-1']); + self::assertInstanceOf(SkcTopicPartition::class, $topicPartitions['test-2']); + self::assertEquals(3, $topicPartitions['test-1']->getOffset()); + self::assertEquals(1, $topicPartitions['test-2']->getOffset()); + + return true; + } + ) + ); + + $kafkaConsumer->commit([$message2, $message, $message3, $message4]); + } + + /** + * @throws KafkaConsumerCommitException + */ + public function testCommitSingleSuccesss(): void + { + $message = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message->expects(self::exactly(1))->method('getOffset')->willReturn(0); + $message->expects(self::exactly(2))->method('getTopicName')->willReturn('test'); + $message->expects(self::exactly(2))->method('getPartition')->willReturn(1); + + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $rdKafkaConsumerMock->expects(self::once())->method('commit')->with( + $this->callback( + function (array $topicPartitions) { + self::assertCount(1, $topicPartitions); + self::assertInstanceOf(SkcTopicPartition::class, $topicPartitions['test-1']); + self::assertEquals(1, $topicPartitions['test-1']->getOffset()); + return true; + } + ) + ); + + $kafkaConsumer->commit($message); + } + + /** + * @throws KafkaConsumerCommitException + */ + public function testCommitAsyncSuccesss(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $message = $this->createMock(KafkaConsumerMessageInterface::class); + + $rdKafkaConsumerMock->expects(self::once())->method('commitAsync'); + + $kafkaConsumer->commitAsync([$message]); + } + + /** + * @throws KafkaConsumerCommitException + */ + public function testCommitFails(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $message = $this->createMock(KafkaConsumerMessageInterface::class); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('commit') + ->willThrowException(new SkcException('Failure', 99)); + + $this->expectException(KafkaConsumerCommitException::class); + $this->expectExceptionCode(99); + $this->expectExceptionMessage('Failure'); + + $kafkaConsumer->commit([$message]); + } + + /** + * @throws KafkaConsumerAssignmentException + */ + public function testAssignSuccess(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $topicPartitions = ['test']; + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('assign') + ->with($topicPartitions); + + $kafkaConsumer->assign($topicPartitions); + } + + /** + * @throws KafkaConsumerAssignmentException + */ + public function testAssignFail(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $topicPartitions = ['test']; + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('assign') + ->with($topicPartitions) + ->willThrowException(new SkcException('Failure', 99)); + + $this->expectException(KafkaConsumerAssignmentException::class); + $this->expectExceptionCode(99); + $this->expectExceptionMessage('Failure'); + + $kafkaConsumer->assign($topicPartitions); + } + + /** + * @throws KafkaConsumerAssignmentException + */ + public function testGetAssignment(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $topicPartitions = ['test']; + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('getAssignment') + ->willReturn($topicPartitions); + + $this->assertEquals($topicPartitions, $kafkaConsumer->getAssignment()); + } + + /** + * @throws KafkaConsumerAssignmentException + */ + public function testGetAssignmentException(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('getAssignment') + ->willThrowException(new SkcException('Fail', 99)); + + $this->expectException(KafkaConsumerAssignmentException::class); + $this->expectExceptionCode(99); + $this->expectExceptionMessage('Fail'); + $kafkaConsumer->getAssignment(); + } + + public function testKafkaConsumeWithDecode(): void + { + $message = new SkcMessage(); + $message->key = 'test'; + $message->payload = null; + $message->topic_name = 'test_topic'; + $message->partition = '9'; + $message->offset = '501'; + $message->timestamp = '500'; + $message->headers = 'header'; + $message->err = RD_KAFKA_RESP_ERR_NO_ERROR; + + $topics = [new TopicSubscription('testTopic')]; + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('subscribe') + ->with(['testTopic']); + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('consume') + ->with(10000) + ->willReturn($message); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls($topics, []); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $decoderMock->expects(self::once())->method('decode')->with( + $this->callback( + function (KafkaConsumerMessageInterface $message) { + self::assertEquals('test', $message->getKey()); + self::assertNull($message->getBody()); + self::assertEquals('test_topic', $message->getTopicName()); + self::assertEquals(9, $message->getPartition()); + self::assertEquals(501, $message->getOffset()); + self::assertEquals(500, $message->getTimestamp()); + self::assertEquals(['header'], $message->getHeaders()); + + return true; + } + ) + ); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $kafkaConsumer->subscribe(); + $kafkaConsumer->consume(); + } + + public function testKafkaConsumeWithoutDecode(): void + { + $message = new SkcMessage(); + $message->key = 'test'; + $message->payload = null; + $message->topic_name = 'test_topic'; + $message->partition = 9; + $message->offset = 501; + $message->timestamp = 500; + $message->err = RD_KAFKA_RESP_ERR_NO_ERROR; + + $topics = [new TopicSubscription('testTopic')]; + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('subscribe') + ->with(['testTopic']); + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('consume') + ->with(10000) + ->willReturn($message); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls($topics, []); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $decoderMock->expects(self::never())->method('decode'); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $kafkaConsumer->subscribe(); + $kafkaConsumer->consume(10000, false); + } + + public function testDecodeMessage(): void + { + $messageMock = $this->createMock(KafkaConsumerMessageInterface::class); + $messageMock->expects(self::once())->method('getKey')->willReturn('test'); + $messageMock->expects(self::once())->method('getBody')->willReturn('some body'); + $messageMock->expects(self::once())->method('getTopicName')->willReturn('test_topic'); + $messageMock->expects(self::once())->method('getPartition')->willReturn(9); + $messageMock->expects(self::once())->method('getOffset')->willReturn(501); + $messageMock->expects(self::once())->method('getTimestamp')->willReturn(500); + $messageMock->expects(self::once())->method('getHeaders')->willReturn(['some' => 'header']); + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $decoderMock->expects(self::once())->method('decode')->with( + $this->callback( + function (KafkaConsumerMessageInterface $message) { + self::assertEquals('test', $message->getKey()); + self::assertEquals('some body', $message->getBody()); + self::assertEquals('test_topic', $message->getTopicName()); + self::assertEquals(9, $message->getPartition()); + self::assertEquals(501, $message->getOffset()); + self::assertEquals(500, $message->getTimestamp()); + self::assertEquals(['some' => 'header'], $message->getHeaders()); + return true; + } + ) + ); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer->decodeMessage($messageMock); + } + + /** + * @throws KafkaConsumerRequestException + */ + public function testGetCommittedOffsets(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $committedOffsets = ['test']; + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('getCommittedOffsets') + ->with($committedOffsets, 1) + ->willReturn($committedOffsets); + + $this->assertEquals($committedOffsets, $kafkaConsumer->getCommittedOffsets($committedOffsets, 1)); + } + + /** + * @throws KafkaConsumerRequestException + */ + public function testGetCommittedOffsetsException(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('getCommittedOffsets') + ->willThrowException(new SkcException('Fail', 99)); + + $this->expectException(KafkaConsumerRequestException::class); + $this->expectExceptionCode(99); + $this->expectExceptionMessage('Fail'); + $kafkaConsumer->getCommittedOffsets([], 1); + } + + /** + * @return void + */ + public function testGetOffsetPositions(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('getOffsetPositions') + ->with([]) + ->willReturn([]); + + $kafkaConsumer->getOffsetPositions([]); + } + + /** + * @return void + */ + public function testClose(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $rdKafkaConsumerMock->expects(self::once())->method('close'); + + $kafkaConsumer->close(); + } + + /** + * @param int $partitionId + * @return SkcMetadataPartition|MockObject + */ + private function getMetadataPartitionMock(int $partitionId): SkcMetadataPartition + { + $partitionMock = $this->getMockBuilder(SkcMetadataPartition::class) + ->disableOriginalConstructor() + ->onlyMethods(['getId']) + ->getMock(); + + $partitionMock + ->expects(self::once()) + ->method('getId') + ->willReturn($partitionId); + + return $partitionMock; + } + + /** + * @return void + */ + public function testOffsetsForTimes(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('offsetsForTimes') + ->with([], 1000) + ->willReturn([]); + + $kafkaConsumer->offsetsForTimes([], 1000); + } + + /** + * @return void + */ + public function testGetConfiguration(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $kafkaConfigurationMock->expects(self::any())->method('dump')->willReturn([]); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + self::assertIsArray($kafkaConsumer->getConfiguration()); + } + + /** + * @return void + */ + public function testGetFirstOffsetForTopicPartition(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('queryWatermarkOffsets') + ->with('test-topic', 1, 0, 0, 1000) + ->willReturnCallback( + function (string $topic, int $partition, int &$lowOffset, int &$highOffset, int $timeoutMs) { + $lowOffset++; + } + ); + + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $lowOffset = $kafkaConsumer->getFirstOffsetForTopicPartition('test-topic', 1, 1000); + + self::assertEquals(1, $lowOffset); + } + + /** + * @return void + */ + public function testGetLastOffsetForTopicPartition(): void + { + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('queryWatermarkOffsets') + ->with('test-topic', 1, 0, 0, 1000) + ->willReturnCallback( + function (string $topic, int $partition, int &$lowOffset, int &$highOffset, int $timeoutMs) { + $highOffset += 5; + } + ); + + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $lowOffset = $kafkaConsumer->getLastOffsetForTopicPartition('test-topic', 1, 1000); + + $this->assertEquals(5, $lowOffset); + } + + /** + * @throws KafkaConsumerConsumeException + * @throws KafkaConsumerEndOfPartitionException + * @throws KafkaConsumerSubscriptionException + * @throws KafkaConsumerTimeoutException + * @return void + */ + public function testConsumeThrowsEofExceptionIfQueueConsumeReturnsNull(): void + { + self::expectException(KafkaConsumerEndOfPartitionException::class); + self::expectExceptionCode(RD_KAFKA_RESP_ERR__PARTITION_EOF); + self::expectExceptionMessage(kafka_err2str(RD_KAFKA_RESP_ERR__PARTITION_EOF)); + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('consume') + ->with(10000) + ->willReturn(null); + + $kafkaConsumer->subscribe(); + $kafkaConsumer->consume(); + } + + /** + * @throws KafkaConsumerConsumeException + * @throws KafkaConsumerEndOfPartitionException + * @throws KafkaConsumerSubscriptionException + * @throws KafkaConsumerTimeoutException + * @return void + */ + public function testConsumeDedicatedEofException(): void + { + self::expectException(KafkaConsumerEndOfPartitionException::class); + self::expectExceptionCode(RD_KAFKA_RESP_ERR__PARTITION_EOF); + self::expectExceptionMessage(kafka_err2str(RD_KAFKA_RESP_ERR__PARTITION_EOF)); + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $message = new SkcMessage(); + $message->err = RD_KAFKA_RESP_ERR__PARTITION_EOF; + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('consume') + ->with(10000) + ->willReturn($message); + + $kafkaConsumer->subscribe(); + $kafkaConsumer->consume(); + } + + /** + * @throws KafkaConsumerConsumeException + * @throws KafkaConsumerEndOfPartitionException + * @throws KafkaConsumerSubscriptionException + * @throws KafkaConsumerTimeoutException + * @return void + */ + public function testConsumeDedicatedTimeoutException(): void + { + self::expectException(KafkaConsumerTimeoutException::class); + self::expectExceptionCode(RD_KAFKA_RESP_ERR__TIMED_OUT); + self::expectExceptionMessage(kafka_err2str(RD_KAFKA_RESP_ERR__TIMED_OUT)); + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $message = new SkcMessage(); + $message->err = RD_KAFKA_RESP_ERR__TIMED_OUT; + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('consume') + ->with(1000) + ->willReturn($message); + + $kafkaConsumer->subscribe(); + $kafkaConsumer->consume(1000); + } + + /** + * @throws KafkaConsumerConsumeException + * @throws KafkaConsumerEndOfPartitionException + * @throws KafkaConsumerSubscriptionException + * @throws KafkaConsumerTimeoutException + * @return void + */ + public function testConsumeThrowsExceptionIfConsumedMessageHasNoTopicAndErrorCodeIsNotOkay(): void + { + self::expectException(KafkaConsumerConsumeException::class); + self::expectExceptionMessage('Unknown error'); + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + /** @var SkcMessage|MockObject $rdKafkaMessageMock */ + $rdKafkaMessageMock = $this->createMock(SkcMessage::class); + $rdKafkaMessageMock->err = RD_KAFKA_RESP_ERR__ALL_BROKERS_DOWN; + $rdKafkaMessageMock->partition = 1; + $rdKafkaMessageMock->offset = 103; + $rdKafkaMessageMock->topic_name = null; + $rdKafkaMessageMock + ->expects(self::once()) + ->method('errstr') + ->willReturn('Unknown error'); + + $topicSubscription = new TopicSubscription('test-topic', [1], 103); + + $rdKafkaConsumerMock + ->expects(self::once()) + ->method('consume') + ->with(10000) + ->willReturn($rdKafkaMessageMock); + $kafkaConfigurationMock + ->expects(self::exactly(2)) + ->method('getTopicSubscriptions') + ->willReturn([$topicSubscription]); + + $kafkaConsumer->subscribe(); + $kafkaConsumer->consume(); + } + + /** + * @throws KafkaConsumerConsumeException + * @throws KafkaConsumerEndOfPartitionException + * @throws KafkaConsumerTimeoutException + * @return void + */ + public function testConsumeThrowsExceptionIfConsumerIsCurrentlyNotSubscribed(): void + { + self::expectException(KafkaConsumerConsumeException::class); + self::expectExceptionMessage('This consumer is currently not subscribed'); + + $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); + $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); + + $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + + $kafkaConsumer->consume(); + } +} diff --git a/tests/Unit/Consumer/TopicSubscriptionTest.php b/tests/Unit/Consumer/TopicSubscriptionTest.php new file mode 100644 index 0000000..5eef2a3 --- /dev/null +++ b/tests/Unit/Consumer/TopicSubscriptionTest.php @@ -0,0 +1,33 @@ +getTopicName()); + self::assertEquals($partitions, $topicSubscription->getPartitions()); + self::assertEquals($offset, $topicSubscription->getOffset()); + + $topicSubscription->setPartitions($newPartitions); + + self::assertEquals($newPartitions, $topicSubscription->getPartitions()); + } +} diff --git a/tests/Unit/Exception/KafkaConsumerConsumeExceptionTest.php b/tests/Unit/Exception/KafkaConsumerConsumeExceptionTest.php new file mode 100644 index 0000000..e1fce5f --- /dev/null +++ b/tests/Unit/Exception/KafkaConsumerConsumeExceptionTest.php @@ -0,0 +1,45 @@ +getMockForAbstractClass(KafkaConsumerMessageInterface::class); + + $exception = new KafkaConsumerConsumeException('', 0, $message); + + self::assertSame($message, $exception->getKafkaMessage()); + } + + public function testGetAndConstructOfKafkaConsumerConsumeExceptionWithNullAsMessage() + { + $exception = new KafkaConsumerConsumeException('test', 100, null); + + self::assertNull($exception->getKafkaMessage()); + self::assertEquals('test', $exception->getMessage()); + self::assertEquals(100, $exception->getCode()); + } + + public function testGetDefaults() + { + $exception = new KafkaConsumerConsumeException(); + + self::assertNull($exception->getKafkaMessage()); + self::assertEquals('', $exception->getMessage()); + self::assertEquals(0, $exception->getCode()); + self::assertNull($exception->getPrevious()); + + } +} diff --git a/tests/Unit/Message/Decoder/AvroDecoderTest.php b/tests/Unit/Message/Decoder/AvroDecoderTest.php new file mode 100644 index 0000000..70901cf --- /dev/null +++ b/tests/Unit/Message/Decoder/AvroDecoderTest.php @@ -0,0 +1,161 @@ +getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message->expects(self::once())->method('getBody')->willReturn(null); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::never())->method('hasBodySchemaForTopic'); + $registry->expects(self::never())->method('hasKeySchemaForTopic'); + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $recordSerializer->expects(self::never())->method('decodeMessage'); + + $decoder = new AvroDecoder($registry, $recordSerializer); + + $result = $decoder->decode($message); + + self::assertInstanceOf(KafkaConsumerMessageInterface::class, $result); + self::assertNull($result->getBody()); + } + + public function testDecodeWithSchema() + { + $schemaDefinition = $this->getMockBuilder(\AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $avroSchema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $avroSchema->expects(self::exactly(2))->method('getDefinition')->willReturn($schemaDefinition); + + $message = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message->expects(self::exactly(3))->method('getTopicName')->willReturn('test-topic'); + $message->expects(self::once())->method('getPartition')->willReturn(0); + $message->expects(self::once())->method('getOffset')->willReturn(1); + $message->expects(self::once())->method('getTimestamp')->willReturn(time()); + $message->expects(self::exactly(2))->method('getKey')->willReturn('test-key'); + $message->expects(self::exactly(2))->method('getBody')->willReturn('body'); + $message->expects(self::once())->method('getHeaders')->willReturn([]); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::once())->method('getBodySchemaForTopic')->willReturn($avroSchema); + $registry->expects(self::once())->method('getKeySchemaForTopic')->willReturn($avroSchema); + $registry->expects(self::once())->method('hasBodySchemaForTopic')->willReturn(true); + $registry->expects(self::once())->method('hasKeySchemaForTopic')->willReturn(true); + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $recordSerializer->expects(self::exactly(2)) + ->method('decodeMessage') + ->withConsecutive( + [$message->getKey(), $schemaDefinition], + [$message->getBody(), $schemaDefinition], + ) + ->willReturnOnConsecutiveCalls('decoded-key', ['test']); + + $decoder = new AvroDecoder($registry, $recordSerializer); + + $result = $decoder->decode($message); + + self::assertInstanceOf(KafkaConsumerMessageInterface::class, $result); + self::assertSame(['test'], $result->getBody()); + self::assertSame('decoded-key', $result->getKey()); + + } + + public function testDecodeKeyMode() + { + $schemaDefinition = $this->getMockBuilder(\AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $avroSchema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $avroSchema->expects(self::once())->method('getDefinition')->willReturn($schemaDefinition); + + $message = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message->expects(self::exactly(3))->method('getTopicName')->willReturn('test-topic'); + $message->expects(self::once())->method('getPartition')->willReturn(0); + $message->expects(self::once())->method('getOffset')->willReturn(1); + $message->expects(self::once())->method('getTimestamp')->willReturn(time()); + $message->expects(self::exactly(2))->method('getKey')->willReturn('test-key'); + $message->expects(self::once())->method('getBody')->willReturn('body'); + $message->expects(self::once())->method('getHeaders')->willReturn([]); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::never())->method('getBodySchemaForTopic'); + $registry->expects(self::once())->method('getKeySchemaForTopic')->willReturn($avroSchema); + $registry->expects(self::once())->method('hasBodySchemaForTopic')->willReturn(false); + $registry->expects(self::once())->method('hasKeySchemaForTopic')->willReturn(true); + + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $recordSerializer->expects(self::once())->method('decodeMessage')->with($message->getKey(), $schemaDefinition)->willReturn('decoded-key'); + + $decoder = new AvroDecoder($registry, $recordSerializer); + + $result = $decoder->decode($message); + + self::assertInstanceOf(KafkaConsumerMessageInterface::class, $result); + self::assertSame('decoded-key', $result->getKey()); + self::assertSame('body', $result->getBody()); + + } + + public function testDecodeBodyMode() + { + $schemaDefinition = $this->getMockBuilder(\AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $avroSchema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $avroSchema->expects(self::once())->method('getDefinition')->willReturn($schemaDefinition); + + $message = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message->expects(self::exactly(3))->method('getTopicName')->willReturn('test-topic'); + $message->expects(self::once())->method('getPartition')->willReturn(0); + $message->expects(self::once())->method('getOffset')->willReturn(1); + $message->expects(self::once())->method('getTimestamp')->willReturn(time()); + $message->expects(self::once())->method('getKey')->willReturn('test-key'); + $message->expects(self::exactly(2))->method('getBody')->willReturn('body'); + $message->expects(self::once())->method('getHeaders')->willReturn([]); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::once())->method('getBodySchemaForTopic')->willReturn($avroSchema); + $registry->expects(self::never())->method('getKeySchemaForTopic'); + $registry->expects(self::once())->method('hasBodySchemaForTopic')->willReturn(true); + $registry->expects(self::once())->method('hasKeySchemaForTopic')->willReturn(false); + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $recordSerializer->expects(self::once())->method('decodeMessage')->with($message->getBody(), $schemaDefinition)->willReturn(['test']); + + $decoder = new AvroDecoder($registry, $recordSerializer); + + $result = $decoder->decode($message); + + self::assertInstanceOf(KafkaConsumerMessageInterface::class, $result); + self::assertSame('test-key', $result->getKey()); + self::assertSame(['test'], $result->getBody()); + + } + + public function testGetRegistry() + { + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + + $decoder = new AvroDecoder($registry, $recordSerializer); + + self::assertSame($registry, $decoder->getRegistry()); + } +} diff --git a/tests/Unit/Message/Decoder/JsonDecoderTest.php b/tests/Unit/Message/Decoder/JsonDecoderTest.php new file mode 100644 index 0000000..e128c35 --- /dev/null +++ b/tests/Unit/Message/Decoder/JsonDecoderTest.php @@ -0,0 +1,44 @@ +getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message->expects(self::once())->method('getBody')->willReturn('{"name":"foo"}'); + $decoder = new JsonDecoder(); + $result = $decoder->decode($message); + + self::assertInstanceOf(KafkaConsumerMessageInterface::class, $result); + self::assertEquals(['name' => 'foo'], $result->getBody()); + } + + /** + * @return void + */ + public function testDecodeNonJson(): void + { + $message = $this->getMockForAbstractClass(KafkaConsumerMessageInterface::class); + $message->expects(self::once())->method('getBody')->willReturn('test'); + $decoder = new JsonDecoder(); + + self::expectException(\JsonException::class); + + $decoder->decode($message); + } +} diff --git a/tests/Unit/Message/Decoder/NullDecoderTest.php b/tests/Unit/Message/Decoder/NullDecoderTest.php new file mode 100644 index 0000000..4869f0b --- /dev/null +++ b/tests/Unit/Message/Decoder/NullDecoderTest.php @@ -0,0 +1,26 @@ +getMockForAbstractClass(KafkaConsumerMessageInterface::class); + + self::assertSame($message, (new NullDecoder())->decode($message)); + } +} diff --git a/tests/Unit/Message/Encoder/AvroEncoderTest.php b/tests/Unit/Message/Encoder/AvroEncoderTest.php new file mode 100644 index 0000000..1d99132 --- /dev/null +++ b/tests/Unit/Message/Encoder/AvroEncoderTest.php @@ -0,0 +1,172 @@ +getMockForAbstractClass(KafkaProducerMessageInterface::class); + $producerMessage->expects(self::exactly(2))->method('getBody')->willReturn(null); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::never())->method('hasBodySchemaForTopic'); + $registry->expects(self::never())->method('hasKeySchemaForTopic'); + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $recordSerializer->expects(self::never())->method('encodeRecord'); + $encoder = new AvroEncoder($registry, $recordSerializer); + $result = $encoder->encode($producerMessage); + + self::assertInstanceOf(KafkaProducerMessageInterface::class, $result); + self::assertSame($producerMessage, $result); + self::assertNull($result->getBody()); + } + + public function testEncodeWithoutSchemaDefinition() + { + $avroSchema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $avroSchema->expects(self::once())->method('getDefinition')->willReturn(null); + + $producerMessage = $this->getMockForAbstractClass(KafkaProducerMessageInterface::class); + $producerMessage->expects(self::once())->method('getTopicName')->willReturn('test'); + $producerMessage->expects(self::once())->method('getBody')->willReturn('test'); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::once())->method('hasBodySchemaForTopic')->willReturn(true); + $registry->expects(self::once())->method('getBodySchemaForTopic')->willReturn($avroSchema); + + self::expectException(AvroEncoderException::class); + self::expectExceptionMessage( + sprintf( + AvroEncoderException::UNABLE_TO_LOAD_DEFINITION_MESSAGE, + $avroSchema->getName() + ) + ); + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + + $encoder = new AvroEncoder($registry, $recordSerializer); + $encoder->encode($producerMessage); + } + + public function testEncodeSuccessWithSchema() + { + $schemaDefinition = $this->getMockBuilder(\AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $avroSchema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $avroSchema->expects(self::exactly(4))->method('getName')->willReturn('schemaName'); + $avroSchema->expects(self::never())->method('getVersion'); + $avroSchema->expects(self::exactly(4))->method('getDefinition')->willReturn($schemaDefinition); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::once())->method('getBodySchemaForTopic')->willReturn($avroSchema); + $registry->expects(self::once())->method('getKeySchemaForTopic')->willReturn($avroSchema); + $registry->expects(self::once())->method('hasBodySchemaForTopic')->willReturn(true); + $registry->expects(self::once())->method('hasKeySchemaForTopic')->willReturn(true); + + $producerMessage = $this->getMockForAbstractClass(KafkaProducerMessageInterface::class); + $producerMessage->expects(self::exactly(2))->method('getTopicName')->willReturn('test'); + $producerMessage->expects(self::once())->method('getBody')->willReturn([]); + $producerMessage->expects(self::once())->method('getKey')->willReturn('test-key'); + $producerMessage->expects(self::once())->method('withBody')->with('encodedValue')->willReturn($producerMessage); + $producerMessage->expects(self::once())->method('withKey')->with('encodedKey')->willReturn($producerMessage); + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $recordSerializer + ->expects(self::exactly(2)) + ->method('encodeRecord') + ->withConsecutive( + [$avroSchema->getName(), $avroSchema->getDefinition(), []], + [$avroSchema->getName(), $avroSchema->getDefinition(), 'test-key'] + )->willReturnOnConsecutiveCalls('encodedValue', 'encodedKey'); + + $encoder = new AvroEncoder($registry, $recordSerializer); + + self::assertSame($producerMessage, $encoder->encode($producerMessage)); + } + + public function testEncodeKeyMode() + { + $schemaDefinition = $this->getMockBuilder(\AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $avroSchema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $avroSchema->expects(self::exactly(2))->method('getName')->willReturn('schemaName'); + $avroSchema->expects(self::never())->method('getVersion'); + $avroSchema->expects(self::exactly(2))->method('getDefinition')->willReturn($schemaDefinition); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::never())->method('getBodySchemaForTopic'); + $registry->expects(self::once())->method('getKeySchemaForTopic')->willReturn($avroSchema); + $registry->expects(self::once())->method('hasBodySchemaForTopic')->willReturn(false); + $registry->expects(self::once())->method('hasKeySchemaForTopic')->willReturn(true); + + $producerMessage = $this->getMockForAbstractClass(KafkaProducerMessageInterface::class); + $producerMessage->expects(self::exactly(2))->method('getTopicName')->willReturn('test'); + $producerMessage->expects(self::once())->method('getBody')->willReturn([]); + $producerMessage->expects(self::once())->method('getKey')->willReturn('test-key'); + $producerMessage->expects(self::never())->method('withBody'); + $producerMessage->expects(self::once())->method('withKey')->with('encodedKey')->willReturn($producerMessage); + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $recordSerializer->expects(self::once())->method('encodeRecord')->with($avroSchema->getName(), $avroSchema->getDefinition(), 'test-key')->willReturn('encodedKey'); + + $encoder = new AvroEncoder($registry, $recordSerializer); + + self::assertSame($producerMessage, $encoder->encode($producerMessage)); + } + + public function testEncodeBodyMode() + { + $schemaDefinition = $this->getMockBuilder(\AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $avroSchema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $avroSchema->expects(self::exactly(2))->method('getName')->willReturn('schemaName'); + $avroSchema->expects(self::never())->method('getVersion'); + $avroSchema->expects(self::exactly(2))->method('getDefinition')->willReturn($schemaDefinition); + + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + $registry->expects(self::once())->method('getBodySchemaForTopic')->willReturn($avroSchema); + $registry->expects(self::never())->method('getKeySchemaForTopic'); + $registry->expects(self::once())->method('hasBodySchemaForTopic')->willReturn(true); + $registry->expects(self::once())->method('hasKeySchemaForTopic')->willReturn(false); + + $producerMessage = $this->getMockForAbstractClass(KafkaProducerMessageInterface::class); + $producerMessage->expects(self::exactly(2))->method('getTopicName')->willReturn('test'); + $producerMessage->expects(self::once())->method('getBody')->willReturn([]); + $producerMessage->expects(self::once())->method('getKey')->willReturn('test-key'); + $producerMessage->expects(self::once())->method('withBody')->with('encodedBody')->willReturn($producerMessage); + $producerMessage->expects(self::never())->method('withKey'); + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $recordSerializer->expects(self::once())->method('encodeRecord')->with($avroSchema->getName(), $avroSchema->getDefinition(), [])->willReturn('encodedBody'); + + $encoder = new AvroEncoder($registry, $recordSerializer); + + self::assertSame($producerMessage, $encoder->encode($producerMessage)); + } + + public function testGetRegistry() + { + $registry = $this->getMockForAbstractClass(AvroSchemaRegistryInterface::class); + + $recordSerializer = $this->getMockBuilder(RecordSerializer::class)->disableOriginalConstructor()->getMock(); + $encoder = new AvroEncoder($registry, $recordSerializer); + + self::assertSame($registry, $encoder->getRegistry()); + } +} diff --git a/tests/Unit/Message/Encoder/JsonEncoderTest.php b/tests/Unit/Message/Encoder/JsonEncoderTest.php new file mode 100644 index 0000000..e87c2c1 --- /dev/null +++ b/tests/Unit/Message/Encoder/JsonEncoderTest.php @@ -0,0 +1,44 @@ +getMockForAbstractClass(KafkaProducerMessageInterface::class); + $message->expects(self::once())->method('getBody')->willReturn(['name' => 'foo']); + $message->expects(self::once())->method('withBody')->with('{"name":"foo"}')->willReturn($message); + + $encoder = $this->getMockForAbstractClass(JsonEncoder::class); + + self::assertSame($message, $encoder->encode($message)); + } + + /** + * @return void + */ + public function testEncodeThrowsException(): void + { + $message = $this->getMockForAbstractClass(KafkaProducerMessageInterface::class); + $message->expects(self::once())->method('getBody')->willReturn(chr(255)); + + $encoder = $this->getMockForAbstractClass(JsonEncoder::class); + + self::expectException(\JsonException::class); + + $encoder->encode($message); + } +} diff --git a/tests/Unit/Message/Encoder/NullEncoderTest.php b/tests/Unit/Message/Encoder/NullEncoderTest.php new file mode 100644 index 0000000..8ca0774 --- /dev/null +++ b/tests/Unit/Message/Encoder/NullEncoderTest.php @@ -0,0 +1,26 @@ +getMockForAbstractClass(KafkaProducerMessageInterface::class); + + $this->assertSame($message, (new NullEncoder())->encode($message)); + } +} diff --git a/tests/Unit/Message/KafkaAvroSchemaTest.php b/tests/Unit/Message/KafkaAvroSchemaTest.php new file mode 100644 index 0000000..4778484 --- /dev/null +++ b/tests/Unit/Message/KafkaAvroSchemaTest.php @@ -0,0 +1,54 @@ +getMockBuilder(AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $schemaName = 'testSchema'; + $version = 9; + + $avroSchema = new KafkaAvroSchema($schemaName, $version, $definition); + + self::assertEquals($schemaName, $avroSchema->getName()); + self::assertEquals($version, $avroSchema->getVersion()); + self::assertEquals($definition, $avroSchema->getDefinition()); + } + + public function testSetters() + { + $definition = $this->getMockBuilder(AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $schemaName = 'testSchema'; + + $avroSchema = new KafkaAvroSchema($schemaName); + + $avroSchema->setDefinition($definition); + + self::assertEquals($definition, $avroSchema->getDefinition()); + } + + public function testAvroSchemaWithJustName() + { + $schemaName = 'testSchema'; + + $avroSchema = new KafkaAvroSchema($schemaName); + + self::assertEquals($schemaName, $avroSchema->getName()); + self::assertEquals(KafkaAvroSchemaInterface::LATEST_VERSION, $avroSchema->getVersion()); + self::assertNull($avroSchema->getDefinition()); + } +} diff --git a/tests/Unit/Message/KafkaConsumerMessageTest.php b/tests/Unit/Message/KafkaConsumerMessageTest.php new file mode 100644 index 0000000..1ca25de --- /dev/null +++ b/tests/Unit/Message/KafkaConsumerMessageTest.php @@ -0,0 +1,44 @@ + 'value' ]; + + $message = new KafkaConsumerMessage( + $topic, + $partition, + $offset, + $timestamp, + $key, + $body, + $headers + ); + + self::assertEquals($key, $message->getKey()); + self::assertEquals($body, $message->getBody()); + self::assertEquals($topic, $message->getTopicName()); + self::assertEquals($offset, $message->getOffset()); + self::assertEquals($partition, $message->getPartition()); + self::assertEquals($timestamp, $message->getTimestamp()); + self::assertEquals($headers, $message->getHeaders()); + } +} diff --git a/tests/Unit/Message/KafkaProducerMessageTest.php b/tests/Unit/Message/KafkaProducerMessageTest.php new file mode 100644 index 0000000..a04aebc --- /dev/null +++ b/tests/Unit/Message/KafkaProducerMessageTest.php @@ -0,0 +1,64 @@ + 'value' ]; + $expectedHeader = [ + 'key' => 'value', + 'anotherKey' => 1 + ]; + + $message = KafkaProducerMessage::create($topic, $partition) + ->withKey($key) + ->withBody($body) + ->withHeaders($headers) + ->withHeader('anotherKey', 1); + + self::assertEquals($key, $message->getKey()); + self::assertEquals($body, $message->getBody()); + self::assertEquals($topic, $message->getTopicName()); + self::assertEquals($partition, $message->getPartition()); + self::assertEquals($expectedHeader, $message->getHeaders()); + } + + public function testClone() + { + $key = '1234-1234-1234'; + $body = 'foo bar baz'; + $topic = 'test'; + $partition = 1; + $headers = [ 'key' => 'value' ]; + + + $origMessage = KafkaProducerMessage::create($topic, $partition); + + $message = $origMessage->withKey($key); + self::assertNotSame($origMessage, $message); + + $message = $origMessage->withBody($body); + self::assertNotSame($origMessage, $message); + + $message = $origMessage->withHeaders($headers); + self::assertNotSame($origMessage, $message); + + $message = $origMessage->withHeader('anotherKey', 1); + self::assertNotSame($origMessage, $message); + } +} diff --git a/tests/Unit/Message/Registry/AvroSchemaRegistryTest.php b/tests/Unit/Message/Registry/AvroSchemaRegistryTest.php new file mode 100644 index 0000000..047af9f --- /dev/null +++ b/tests/Unit/Message/Registry/AvroSchemaRegistryTest.php @@ -0,0 +1,183 @@ +getMockForAbstractClass(Registry::class); + + $schema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + + $registry = new AvroSchemaRegistry($flixRegistry); + + $registry->addBodySchemaMappingForTopic('test', $schema); + + $reflectionProperty = new \ReflectionProperty($registry, 'schemaMapping'); + $reflectionProperty->setAccessible(true); + + $schemaMapping = $reflectionProperty->getValue($registry); + + self::assertArrayHasKey(AvroSchemaRegistryInterface::BODY_IDX, $schemaMapping); + self::assertArrayHasKey('test', $schemaMapping[AvroSchemaRegistryInterface::BODY_IDX]); + self::assertSame($schema, $schemaMapping[AvroSchemaRegistryInterface::BODY_IDX]['test']); + } + + public function testAddKeySchemaMappingForTopic() + { + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + + $schema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + + $registry = new AvroSchemaRegistry($flixRegistry); + + $registry->addKeySchemaMappingForTopic('test2', $schema); + + $reflectionProperty = new \ReflectionProperty($registry, 'schemaMapping'); + $reflectionProperty->setAccessible(true); + + $schemaMapping = $reflectionProperty->getValue($registry); + + self::assertArrayHasKey(AvroSchemaRegistryInterface::KEY_IDX, $schemaMapping); + self::assertArrayHasKey('test2', $schemaMapping[AvroSchemaRegistryInterface::KEY_IDX]); + self::assertSame($schema, $schemaMapping[AvroSchemaRegistryInterface::KEY_IDX]['test2']); + } + + public function testHasBodySchemaMappingForTopic() + { + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + $schema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + + $registry = new AvroSchemaRegistry($flixRegistry); + $registry->addBodySchemaMappingForTopic('test', $schema); + + self::assertTrue($registry->hasBodySchemaForTopic('test')); + self::assertFalse($registry->hasBodySchemaForTopic('test2')); + } + + public function testHasKeySchemaMappingForTopic() + { + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + $schema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + + $registry = new AvroSchemaRegistry($flixRegistry); + $registry->addKeySchemaMappingForTopic('test', $schema); + + self::assertTrue($registry->hasKeySchemaForTopic('test')); + self::assertFalse($registry->hasKeySchemaForTopic('test2')); + } + + public function testGetBodySchemaForTopicWithNoMapping() + { + self::expectException(AvroSchemaRegistryException::class); + self::expectExceptionMessage( + sprintf( + AvroSchemaRegistryException::SCHEMA_MAPPING_NOT_FOUND, + 'test', + AvroSchemaRegistryInterface::BODY_IDX + ) + ); + + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + + $registry = new AvroSchemaRegistry($flixRegistry); + + $registry->getBodySchemaForTopic('test'); + } + + public function testGetBodySchemaForTopicWithMappingWithDefinition() + { + $definition = $this->getMockBuilder(AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + + $schema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $schema->expects(self::once())->method('getDefinition')->willReturn($definition); + + $registry = new AvroSchemaRegistry($flixRegistry); + + $registry->addBodySchemaMappingForTopic('test', $schema); + + self::assertSame($schema, $registry->getBodySchemaForTopic('test')); + } + + public function testGetKeySchemaForTopicWithMappingWithDefinition() + { + $definition = $this->getMockBuilder(AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + + $schema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $schema->expects(self::once())->method('getDefinition')->willReturn($definition); + + $registry = new AvroSchemaRegistry($flixRegistry); + + $registry->addKeySchemaMappingForTopic('test2', $schema); + + self::assertSame($schema, $registry->getKeySchemaForTopic('test2')); + } + + public function testGetBodySchemaForTopicWithMappingWithoutDefinitionLatest() + { + $definition = $this->getMockBuilder(AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + $flixRegistry->expects(self::once())->method('latestVersion')->with('test-schema')->willReturn($definition); + + $schema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $schema->expects(self::once())->method('getDefinition')->willReturn(null); + $schema->expects(self::once())->method('getVersion')->willReturn(KafkaAvroSchemaInterface::LATEST_VERSION); + $schema->expects(self::once())->method('getName')->willReturn('test-schema'); + $schema->expects(self::once())->method('setDefinition')->with($definition); + + $registry = new AvroSchemaRegistry($flixRegistry); + + $registry->addBodySchemaMappingForTopic('test', $schema); + + $registry->getBodySchemaForTopic('test'); + } + + public function testGetBodySchemaForTopicWithMappingWithoutDefinitionVersion() + { + $definition = $this->getMockBuilder(AvroSchema::class)->disableOriginalConstructor()->getMock(); + + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + $flixRegistry->expects(self::once())->method('schemaForSubjectAndVersion')->with('test-schema', 1)->willReturn($definition); + + $schema = $this->getMockForAbstractClass(KafkaAvroSchemaInterface::class); + $schema->expects(self::once())->method('getDefinition')->willReturn(null); + $schema->expects(self::exactly(2))->method('getVersion')->willReturn(1); + $schema->expects(self::once())->method('getName')->willReturn('test-schema'); + $schema->expects(self::once())->method('setDefinition')->with($definition); + + $registry = new AvroSchemaRegistry($flixRegistry); + + $registry->addBodySchemaMappingForTopic('test', $schema); + + $registry->getBodySchemaForTopic('test'); + } + + public function testGetTopicSchemaMapping() + { + $flixRegistry = $this->getMockForAbstractClass(Registry::class); + + $registry = new AvroSchemaRegistry($flixRegistry); + + self::assertIsArray($registry->getTopicSchemaMapping()); + } +} diff --git a/tests/Unit/Producer/KafkaProducerBuilderTest.php b/tests/Unit/Producer/KafkaProducerBuilderTest.php new file mode 100644 index 0000000..9df09db --- /dev/null +++ b/tests/Unit/Producer/KafkaProducerBuilderTest.php @@ -0,0 +1,170 @@ +kafkaProducerBuilder = KafkaProducerBuilder::create(); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testAddConfig(): void + { + $config = ['auto.offset.reset' => 'earliest']; + $clone = $this->kafkaProducerBuilder->withAdditionalConfig($config); + $config = ['auto.offset.reset' => 'latest']; + $clone = $clone->withAdditionalConfig($config); + + $reflectionProperty = new \ReflectionProperty($clone, 'config'); + $reflectionProperty->setAccessible(true); + + self::assertSame($config, $reflectionProperty->getValue($clone)); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testAddBroker(): void + { + $clone = $this->kafkaProducerBuilder->withAdditionalBroker('localhost'); + + $reflectionProperty = new \ReflectionProperty($clone, 'brokers'); + $reflectionProperty->setAccessible(true); + + self::assertSame(['localhost'], $reflectionProperty->getValue($clone)); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetEncoder(): void + { + $encoder = $this->getMockForAbstractClass(EncoderInterface::class); + + $clone = $this->kafkaProducerBuilder->withEncoder($encoder); + + $reflectionProperty = new \ReflectionProperty($clone, 'encoder'); + $reflectionProperty->setAccessible(true); + + self::assertInstanceOf(EncoderInterface::class, $reflectionProperty->getValue($clone)); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetDeliveryReportCallback(): void + { + $callback = function () { + // Anonymous test method, no logic required + }; + + $clone = $this->kafkaProducerBuilder->withDeliveryReportCallback($callback); + + $reflectionProperty = new \ReflectionProperty($clone, 'deliverReportCallback'); + $reflectionProperty->setAccessible(true); + + self::assertSame($callback, $reflectionProperty->getValue($clone)); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testSetErrorCallback(): void + { + $callback = function () { + // Anonymous test method, no logic required + }; + + $clone = $this->kafkaProducerBuilder->withErrorCallback($callback); + + $reflectionProperty = new \ReflectionProperty($clone, 'errorCallback'); + $reflectionProperty->setAccessible(true); + + self::assertSame($callback, $reflectionProperty->getValue($clone)); + } + + /** + * @throws KafkaProducerException + */ + public function testBuildNoBroker(): void + { + self::expectException(KafkaProducerException::class); + + $this->kafkaProducerBuilder->build(); + } + + /** + * @return void + */ + public function testBuild(): void + { + $callback = function ($kafka, $errId, $msg) { + // Anonymous test method, no logic required + }; + + $producer = $this->kafkaProducerBuilder + ->withAdditionalBroker('localhost') + ->withDeliveryReportCallback($callback) + ->withErrorCallback($callback) + ->withLogCallback($callback) + ->build(); + + self::assertInstanceOf(KafkaProducerInterface::class, $producer); + } + + /** + * @return void + * @throws \ReflectionException + */ + public function testKafkaProducerBuilderConfig(): void + { + $callback = function ($kafka, $errId, $msg) { + // Anonymous test method, no logic required + }; + + $producer = $this->kafkaProducerBuilder + ->withAdditionalBroker('localhost') + ->withDeliveryReportCallback($callback) + ->withErrorCallback($callback) + ->withLogCallback($callback) + ->build(); + + $reflectionProperty = new \ReflectionProperty($this->kafkaProducerBuilder, 'config'); + $reflectionProperty->setAccessible(true); + + self::assertSame( + [ + 'socket.timeout.ms' => '50', + 'internal.termination.signal' => (string) SIGIO + ], + $reflectionProperty->getValue($this->kafkaProducerBuilder) + ); + + self::assertInstanceOf(KafkaProducerInterface::class, $producer); + } +} diff --git a/tests/Unit/Producer/KafkaProducerTest.php b/tests/Unit/Producer/KafkaProducerTest.php new file mode 100644 index 0000000..ce177c8 --- /dev/null +++ b/tests/Unit/Producer/KafkaProducerTest.php @@ -0,0 +1,528 @@ +kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); + $this->rdKafkaProducerMock = $this->createMock(SkcProducer::class); + $this->encoderMock = $this->getMockForAbstractClass(EncoderInterface::class); + $this->kafkaProducer = new KafkaProducer($this->rdKafkaProducerMock, $this->kafkaConfigurationMock, $this->encoderMock); + } + + /** + * @return void + * @throws KafkaProducerException + */ + public function testProduceError(): void + { + $message = KafkaProducerMessage::create('test-topic', 1) + ->withKey('asdf-asdf-asfd-asdf') + ->withBody('some test content') + ->withHeaders([ 'key' => 'value' ]); + + $this->encoderMock->expects(self::once())->method('encode')->willReturn($message); + + self::expectException(KafkaProducerException::class); + + /** @var SkcProducerTopic|MockObject $rdKafkaProducerTopicMock */ + $rdKafkaProducerTopicMock = $this->createMock(SkcProducerTopic::class); + $rdKafkaProducerTopicMock + ->expects(self::once()) + ->method('producev') + ->with( + $message->getPartition(), + RD_KAFKA_MSG_F_BLOCK, + $message->getBody(), + $message->getKey(), + $message->getHeaders() + ) + ->willThrowException(new KafkaProducerException()); + + $this->rdKafkaProducerMock + ->expects(self::any()) + ->method('getTopicHandle') + ->willReturn($rdKafkaProducerTopicMock); + + $this->kafkaProducer->produce($message); + } + + public function testProduceSuccess() + { + $message = KafkaProducerMessage::create('test-topic', 1) + ->withKey('asdf-asdf-asfd-asdf') + ->withBody('some test content') + ->withHeaders([ 'key' => 'value' ]); + + /** @var SkcProducerTopic|MockObject $rdKafkaProducerTopicMock */ + $rdKafkaProducerTopicMock = $this->createMock(SkcProducerTopic::class); + $rdKafkaProducerTopicMock + ->expects(self::once()) + ->method('producev') + ->with( + $message->getPartition(), + RD_KAFKA_MSG_F_BLOCK, + $message->getBody(), + $message->getKey(), + $message->getHeaders() + ); + + $this->encoderMock + ->expects(self::once()) + ->method('encode') + ->with($message) + ->willReturn($message); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('getTopicHandle') + ->with('test-topic') + ->willReturn($rdKafkaProducerTopicMock); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('poll') + ->with(0); + + $this->kafkaProducer->produce($message); + } + + public function testSyncProduceSuccess() + { + $message = KafkaProducerMessage::create('test-topic', 1) + ->withKey('asdf-asdf-asfd-asdf') + ->withBody('some test content') + ->withHeaders([ 'key' => 'value' ]); + + /** @var SkcProducerTopic|MockObject $rdKafkaProducerTopicMock */ + $rdKafkaProducerTopicMock = $this->createMock(SkcProducerTopic::class); + $rdKafkaProducerTopicMock + ->expects(self::once()) + ->method('producev') + ->with( + $message->getPartition(), + RD_KAFKA_MSG_F_BLOCK, + $message->getBody(), + $message->getKey(), + $message->getHeaders() + ); + + $this->encoderMock + ->expects(self::once()) + ->method('encode') + ->with($message) + ->willReturn($message); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('getTopicHandle') + ->with('test-topic') + ->willReturn($rdKafkaProducerTopicMock); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('poll') + ->with(-1); + + $this->kafkaProducer->syncProduce($message); + } + + public function testPoll() + { + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('poll') + ->with(1000); + + $this->kafkaProducer->poll(1000); + } + + public function testPollDefault() + { + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('poll') + ->with(0); + + $this->kafkaProducer->poll(); + } + + public function testPollUntilQueueSizeReached() + { + $message = KafkaProducerMessage::create('test-topic', 1) + ->withKey('asdf-asdf-asfd-asdf') + ->withBody('some test content') + ->withHeaders([ 'key' => 'value' ]); + + /** @var SkcProducerTopic|MockObject $rdKafkaProducerTopicMock */ + $rdKafkaProducerTopicMock = $this->createMock(SkcProducerTopic::class); + $rdKafkaProducerTopicMock + ->expects(self::once()) + ->method('producev') + ->with( + $message->getPartition(), + RD_KAFKA_MSG_F_BLOCK, + $message->getBody(), + $message->getKey(), + $message->getHeaders() + ); + + $this->rdKafkaProducerMock + ->expects(self::exactly(3)) + ->method('getOutQLen') + ->willReturnCallback( + function () { + static $messageCount = 0; + switch ($messageCount++) { + case 0: + case 1: + return 1; + default: + return 0; + } + } + ); + $this->encoderMock + ->expects(self::once()) + ->method('encode') + ->with($message) + ->willReturn($message); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('getTopicHandle') + ->with('test-topic') + ->willReturn($rdKafkaProducerTopicMock); + $this->rdKafkaProducerMock + ->expects(self::exactly(2)) + ->method('poll') + ->with(0); + + $this->kafkaProducer->produce($message, false); + $this->kafkaProducer->pollUntilQueueSizeReached(); + } + + /** + * @return void + */ + public function testPurge(): void + { + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('purge') + ->with(RD_KAFKA_PURGE_F_QUEUE) + ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + + $this->kafkaProducer->purge(RD_KAFKA_PURGE_F_QUEUE); + } + + /** + * @return void + */ + public function testFlush(): void + { + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('flush') + ->with(100) + ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + + $this->kafkaProducer->flush(100); + } + + /** + * @return void + */ + public function testGetMetadataForTopic(): void + { + $topicMock = $this->createMock(SkcProducerTopic::class); + $metadataMock = $this->createMock(SkcMetadata::class); + $metadataCollectionMock = $this->createMock(SkcMetadataCollection::class); + $metadataTopic = $this->createMock(SkcMetadataTopic::class); + $metadataMock + ->expects(self::once()) + ->method('getTopics') + ->willReturn($metadataCollectionMock); + $metadataCollectionMock + ->expects(self::once()) + ->method('current') + ->willReturn($metadataTopic); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('getTopicHandle') + ->with('test-topic-name') + ->willReturn($topicMock); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('getMetadata') + ->with(false, $topicMock, 1000) + ->willReturn($metadataMock); + $this->kafkaProducer->getMetadataForTopic('test-topic-name', 1000); + } + + /** + * @return void + */ + public function testGetMetadataForTopicDefault(): void + { + $topicMock = $this->createMock(SkcProducerTopic::class); + $metadataMock = $this->createMock(SkcMetadata::class); + $metadataCollectionMock = $this->createMock(SkcMetadataCollection::class); + $metadataTopic = $this->createMock(SkcMetadataTopic::class); + $metadataMock + ->expects(self::once()) + ->method('getTopics') + ->willReturn($metadataCollectionMock); + $metadataCollectionMock + ->expects(self::once()) + ->method('current') + ->willReturn($metadataTopic); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('getTopicHandle') + ->with('test-topic-name') + ->willReturn($topicMock); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('getMetadata') + ->with(false, $topicMock, 10000) + ->willReturn($metadataMock); + $this->kafkaProducer->getMetadataForTopic('test-topic-name'); + } + + /** + * @return void + */ + public function testBeginTransactionSuccess(): void + { + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('initTransactions') + ->with(10000) + ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('beginTransaction') + ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + + self::assertNull($this->kafkaProducer->beginTransaction(10000)); + } + + /** + * @return void + */ + public function testBeginTransactionConsecutiveSuccess(): void + { + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('initTransactions') + ->with(10000) + ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + $this->rdKafkaProducerMock + ->expects(self::exactly(2)) + ->method('beginTransaction') + ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + + self::assertNull($this->kafkaProducer->beginTransaction(10000)); + self::assertNull($this->kafkaProducer->beginTransaction(10000)); + + } + + /** + * @return void + */ + public function testBeginTransactionWithRetriableError(): void + { + self::expectException(KafkaProducerTransactionRetryException::class); + + $errorMock = $this->createMock(SkcErrorException::class); + $errorMock->expects(self::once())->method('isRetriable')->willReturn(true); + + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('initTransactions') + ->with(10000) + ->willThrowException($errorMock); + + $this->rdKafkaProducerMock->expects(self::never())->method('beginTransaction'); + + self::assertNull($this->kafkaProducer->beginTransaction(10000)); + } + + /** + * @return void + */ + public function testBeginTransactionWithAbortError(): void + { + self::expectException(KafkaProducerTransactionAbortException::class); + + $errorMock = $this->createMock(SkcErrorException::class); + $errorMock->expects(self::once())->method('isRetriable')->willReturn(false); + $errorMock->expects(self::once())->method('transactionRequiresAbort')->willReturn(true); + + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('initTransactions') + ->with(10000) + ->willThrowException($errorMock); + + $this->rdKafkaProducerMock->expects(self::never())->method('beginTransaction'); + + self::assertNull($this->kafkaProducer->beginTransaction(10000)); + } + + /** + * @return void + */ + public function testBeginTransactionWithFatalError(): void + { + self::expectException(KafkaProducerTransactionFatalException::class); + + $errorMock = $this->createMock(SkcErrorException::class); + $errorMock->expects(self::once())->method('isRetriable')->willReturn(false); + $errorMock->expects(self::once())->method('transactionRequiresAbort')->willReturn(false); + + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('initTransactions') + ->with(10000) + ->willThrowException($errorMock); + + $this->rdKafkaProducerMock->expects(self::never())->method('beginTransaction'); + + self::assertNull($this->kafkaProducer->beginTransaction(10000)); + } + + /** + * @return void + */ + public function testBeginTransactionWithFatalErrorWillTriggerInit(): void + { + $firstExceptionCaught = false; + + self::expectException(KafkaProducerTransactionFatalException::class); + + $errorMock = $this->createMock(SkcErrorException::class); + $errorMock->expects(self::exactly(2))->method('isRetriable')->willReturn(false); + $errorMock->expects(self::exactly(2))->method('transactionRequiresAbort')->willReturn(false); + + $this->rdKafkaProducerMock + ->expects(self::exactly(2)) + ->method('initTransactions') + ->with(10000) + ->willThrowException($errorMock); + + $this->rdKafkaProducerMock->expects(self::never())->method('beginTransaction'); + + try { + self::assertNull($this->kafkaProducer->beginTransaction(10000)); + } catch (KafkaProducerTransactionFatalException $e) { + $firstExceptionCaught = true; + } + + self::assertTrue($firstExceptionCaught); + self::assertNull($this->kafkaProducer->beginTransaction(10000)); + } + + /** + * @return void + */ + public function testAbortTransactionSuccess(): void + { + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('abortTransaction') + ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + + self::assertNull($this->kafkaProducer->abortTransaction(10000)); + } + + /** + * @return void + */ + public function testAbortTransactionFailure(): void + { + self::expectException(KafkaProducerTransactionRetryException::class); + self::expectExceptionMessage(KafkaProducerTransactionRetryException::RETRIABLE_TRANSACTION_EXCEPTION_MESSAGE); + + $exception = new SkcErrorException('test', 1, 'some failure', false, true, false); + + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('abortTransaction') + ->willThrowException($exception); + + $this->kafkaProducer->abortTransaction(10000); + } + + /** + * @return void + */ + public function testCommitTransactionSuccess(): void + { + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('commitTransaction') + ->with(10000) + ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + + self::assertNull($this->kafkaProducer->commitTransaction(10000)); + } + + /** + * @return void + */ + public function testCommitTransactionFailure(): void + { + self::expectException(KafkaProducerTransactionRetryException::class); + self::expectExceptionMessage(KafkaProducerTransactionRetryException::RETRIABLE_TRANSACTION_EXCEPTION_MESSAGE); + + $exception = new SkcErrorException('test', 1, 'some failure', false, true, false); + + $this->rdKafkaProducerMock + ->expects(self::once()) + ->method('commitTransaction') + ->with(10000) + ->willThrowException($exception); + + $this->kafkaProducer->commitTransaction(10000); + } +} diff --git a/tests/bootstrap.php b/tests/bootstrap.php new file mode 100644 index 0000000..5ec5f37 --- /dev/null +++ b/tests/bootstrap.php @@ -0,0 +1,5 @@ +setPsr4('PhpKafka\\Tests\\', __DIR__); From f27b383a4121bccafb70d26050b87f18d368ae8d Mon Sep 17 00:00:00 2001 From: nick Date: Sun, 11 Apr 2021 01:11:55 +0200 Subject: [PATCH 2/8] add jc as author --- composer.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/composer.json b/composer.json index 0d5e410..f89b314 100644 --- a/composer.json +++ b/composer.json @@ -5,6 +5,10 @@ "BSD-3-Clause" ], "authors": [ + { + "name": "Jobcloud AG", + "homepage": "https://careers.jobcloud.ch/en/our-team/?term=133" + }, { "name": "php-kafka", "homepage": "https://php-kafka.com" From a025e0a5658973a4f30e59f8d8569306da849f5b Mon Sep 17 00:00:00 2001 From: nick Date: Sun, 11 Apr 2021 01:15:27 +0200 Subject: [PATCH 3/8] add chat url --- composer.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/composer.json b/composer.json index f89b314..882b13b 100644 --- a/composer.json +++ b/composer.json @@ -64,6 +64,6 @@ "support": { "issues": "https://github.com/php-kafka/php-simple-kafka-lib/issues", "doc": "https://php-kafka.github.io/php-simple-kafka-client.github.io", - "chat": "" + "chat": "https://gitter.im/php-kafka/php-simple-kafka-lib" } } From 458c7541cd80944ee23166d428d55e78029b8a50 Mon Sep 17 00:00:00 2001 From: nick Date: Sun, 11 Apr 2021 01:18:31 +0200 Subject: [PATCH 4/8] up to 7.4 --- docker/dev/php/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/dev/php/Dockerfile b/docker/dev/php/Dockerfile index ca978ac..221a4a2 100644 --- a/docker/dev/php/Dockerfile +++ b/docker/dev/php/Dockerfile @@ -1,4 +1,4 @@ -FROM php:7.3-cli-alpine3.13 +FROM php:7.4-cli-alpine3.13 ARG HOST_USER_ID ARG HOST_USER From ae97a45d8a14c636794ae78c50031fe62e7370f7 Mon Sep 17 00:00:00 2001 From: nick Date: Sun, 11 Apr 2021 01:46:58 +0200 Subject: [PATCH 5/8] save work --- .../KafkaProducerDeliveryReportCallback.php | 2 +- src/Consumer/AbstractKafkaConsumer.php | 10 +-- ...ighLevelConsumer.php => KafkaConsumer.php} | 2 +- src/Consumer/KafkaConsumerBuilder.php | 6 +- src/Consumer/KafkaConsumerInterface.php | 52 ++++++++++++- .../KafkaHighLevelConsumerInterface.php | 58 -------------- src/Producer/KafkaProducer.php | 4 +- .../KafkaConsumerRebalanceCallbackTest.php | 3 +- .../Consumer/KafkaConsumerBuilderTest.php | 38 ++-------- ...ConsumerTest.php => KafkaConsumerTest.php} | 76 +++++++++---------- tests/Unit/Producer/KafkaProducerTest.php | 16 ++-- 11 files changed, 109 insertions(+), 158 deletions(-) rename src/Consumer/{KafkaHighLevelConsumer.php => KafkaConsumer.php} (98%) delete mode 100644 src/Consumer/KafkaHighLevelConsumerInterface.php rename tests/Unit/Consumer/{KafkaHighLevelConsumerTest.php => KafkaConsumerTest.php} (89%) diff --git a/src/Callback/KafkaProducerDeliveryReportCallback.php b/src/Callback/KafkaProducerDeliveryReportCallback.php index 953005e..2b68e79 100644 --- a/src/Callback/KafkaProducerDeliveryReportCallback.php +++ b/src/Callback/KafkaProducerDeliveryReportCallback.php @@ -28,7 +28,7 @@ public function __invoke(SkcProducer $producer, SkcMessage $message) } throw new KafkaProducerException( - $message->errstr(), + $message->getErrorString(), $message->err ); } diff --git a/src/Consumer/AbstractKafkaConsumer.php b/src/Consumer/AbstractKafkaConsumer.php index ca85db1..dd42091 100644 --- a/src/Consumer/AbstractKafkaConsumer.php +++ b/src/Consumer/AbstractKafkaConsumer.php @@ -102,15 +102,15 @@ public function consume(int $timeoutMs = 10000, bool $autoDecode = true): KafkaC } if (RD_KAFKA_RESP_ERR__PARTITION_EOF === $rdKafkaMessage->err) { - throw new KafkaConsumerEndOfPartitionException($rdKafkaMessage->errstr(), $rdKafkaMessage->err); + throw new KafkaConsumerEndOfPartitionException($rdKafkaMessage->getErrorString(), $rdKafkaMessage->err); } elseif (RD_KAFKA_RESP_ERR__TIMED_OUT === $rdKafkaMessage->err) { - throw new KafkaConsumerTimeoutException($rdKafkaMessage->errstr(), $rdKafkaMessage->err); + throw new KafkaConsumerTimeoutException($rdKafkaMessage->getErrorString(), $rdKafkaMessage->err); } $message = $this->getConsumerMessage($rdKafkaMessage); if (RD_KAFKA_RESP_ERR_NO_ERROR !== $rdKafkaMessage->err) { - throw new KafkaConsumerConsumeException($rdKafkaMessage->errstr(), $rdKafkaMessage->err, $message); + throw new KafkaConsumerConsumeException($rdKafkaMessage->getErrorString(), $rdKafkaMessage->err, $message); } if (true === $autoDecode) { @@ -146,8 +146,8 @@ public function getMetadataForTopic(string $topicName, int $timeoutMs = 10000): return $this->consumer ->getMetadata( false, - $topic, - $timeoutMs + $timeoutMs, + $topic ) ->getTopics() ->current(); diff --git a/src/Consumer/KafkaHighLevelConsumer.php b/src/Consumer/KafkaConsumer.php similarity index 98% rename from src/Consumer/KafkaHighLevelConsumer.php rename to src/Consumer/KafkaConsumer.php index c52136d..1dace39 100644 --- a/src/Consumer/KafkaHighLevelConsumer.php +++ b/src/Consumer/KafkaConsumer.php @@ -17,7 +17,7 @@ use SimpleKafkaClient\TopicPartition as SkcTopicPartition; use SimpleKafkaClient\Consumer as SkcConsumer; -final class KafkaHighLevelConsumer extends AbstractKafkaConsumer implements KafkaHighLevelConsumerInterface +final class KafkaConsumer extends AbstractKafkaConsumer implements KafkaConsumerInterface { /** diff --git a/src/Consumer/KafkaConsumerBuilder.php b/src/Consumer/KafkaConsumerBuilder.php index da3c51c..a5dbf09 100644 --- a/src/Consumer/KafkaConsumerBuilder.php +++ b/src/Consumer/KafkaConsumerBuilder.php @@ -284,7 +284,7 @@ public function build(): KafkaConsumerInterface //create SkcConsumer $rdKafkaConsumer = new SkcConsumer($kafkaConfig); - return new KafkaHighLevelConsumer($rdKafkaConsumer, $kafkaConfig, $this->decoder); + return new KafkaConsumer($rdKafkaConsumer, $kafkaConfig, $this->decoder); } /** @@ -299,10 +299,6 @@ private function registerCallbacks(KafkaConfiguration $conf): void $conf->setRebalanceCb($this->rebalanceCallback); } - if (null !== $this->consumeCallback) { - $conf->setConsumeCb($this->consumeCallback); - } - if (null !== $this->logCallback) { //$conf->setLogCb($this->logCallback); } diff --git a/src/Consumer/KafkaConsumerInterface.php b/src/Consumer/KafkaConsumerInterface.php index 4c0bcf9..ca4f64e 100644 --- a/src/Consumer/KafkaConsumerInterface.php +++ b/src/Consumer/KafkaConsumerInterface.php @@ -4,14 +4,58 @@ namespace PhpKafka\Consumer; -use PhpKafka\Consumer\ConsumerInterface; use PhpKafka\Message\KafkaConsumerMessageInterface; -use SimpleKafkaClient\Metadata\Topic as SkcMetadataTopic; -use SimpleKafkaClient\ConsumerTopic as SkcConsumerTopic; use SimpleKafkaClient\TopicPartition as SkcTopicPartition; -interface KafkaConsumerInterface +interface KafkaConsumerInterface extends KafkaConsumerInterface { + /** + * Assigns a consumer to the given TopicPartition(s) + * + * @param string[]|SkcTopicPartition[] $topicPartitions + * @return void + */ + public function assign(array $topicPartitions): void; + + /** + * Asynchronous version of commit (non blocking) + * + * @param KafkaConsumerMessageInterface|KafkaConsumerMessageInterface[] $messages + * @return void + */ + public function commitAsync($messages): void; + + /** + * Gets the current assignment for the consumer + * + * @return array|SkcTopicPartition[] + */ + public function getAssignment(): array; + + /** + * Gets the commited offset for a TopicPartition for the configured consumer group + * + * @param array|SkcTopicPartition[] $topicPartitions + * @param integer $timeoutMs + * @return array|SkcTopicPartition[] + */ + public function getCommittedOffsets(array $topicPartitions, int $timeoutMs): array; + + /** + * Get current offset positions of the consumer + * + * @param array|SkcTopicPartition[] $topicPartitions + * @return array|SkcTopicPartition[] + */ + public function getOffsetPositions(array $topicPartitions): array; + + /** + * Close the consumer connection + * + * @return void; + */ + public function close(): void; + /** * Subscribes to all defined topics, if no partitions were set, subscribes to all partitions. * If partition(s) (and optionally offset(s)) were set, subscribes accordingly diff --git a/src/Consumer/KafkaHighLevelConsumerInterface.php b/src/Consumer/KafkaHighLevelConsumerInterface.php deleted file mode 100644 index 37a2f2b..0000000 --- a/src/Consumer/KafkaHighLevelConsumerInterface.php +++ /dev/null @@ -1,58 +0,0 @@ -producer ->getMetadata( false, - $topic, - $timeoutMs + $timeoutMs, + $topic ) ->getTopics() ->current(); diff --git a/tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php b/tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php index 6236f4f..e6c5b8a 100644 --- a/tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php +++ b/tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php @@ -81,8 +81,7 @@ private function getConsumerMock() $consumerMock ->expects(self::any()) - ->method('unsubscribe') - ->willReturn(null); + ->method('unsubscribe'); $consumerMock ->expects(self::any()) diff --git a/tests/Unit/Consumer/KafkaConsumerBuilderTest.php b/tests/Unit/Consumer/KafkaConsumerBuilderTest.php index 2327b79..66fe6b0 100644 --- a/tests/Unit/Consumer/KafkaConsumerBuilderTest.php +++ b/tests/Unit/Consumer/KafkaConsumerBuilderTest.php @@ -2,8 +2,8 @@ namespace PhpKafka\Tests\Unit\Kafka\Consumer; -use PhpKafka\Consumer\KafkaHighLevelConsumer; -use PhpKafka\Consumer\KafkaHighLevelConsumerInterface; +use PhpKafka\Consumer\KafkaConsumer; +use PhpKafka\Consumer\KafkaConsumerInterface; use PhpKafka\Consumer\KafkaConsumerBuilder; use PhpKafka\Message\Decoder\DecoderInterface; use PhpKafka\Consumer\TopicSubscription; @@ -201,32 +201,6 @@ public function testSetRebalanceCallback(): void self::assertArrayHasKey('rebalance_cb', $conf); } - /** - * @return void - * @throws \ReflectionException - */ - public function testSetConsumeCallback(): void - { - $callback = function () { - // Anonymous test method, no logic required - }; - - $clone = $this->kafkaConsumerBuilder->withConsumeCallback($callback); - - $reflectionProperty = new \ReflectionProperty($clone, 'consumeCallback'); - $reflectionProperty->setAccessible(true); - - self::assertSame($callback, $reflectionProperty->getValue($clone)); - self::assertNotSame($clone, $this->kafkaConsumerBuilder); - - $consumer = $clone - ->withAdditionalBroker('localhost') - ->withSubscription('test') - ->build(); - $conf = $consumer->getConfiguration(); - self::assertArrayHasKey('consume_cb', $conf); - } - /** * @return void * @throws \ReflectionException @@ -293,7 +267,7 @@ public function testBuildSuccess(): void // Anonymous test method, no logic required }; - /** @var $consumer KafkaHighLevelConsumer */ + /** @var $consumer KafkaConsumer */ $consumer = $this->kafkaConsumerBuilder ->withAdditionalBroker('localhost') ->withAdditionalSubscription('test-topic') @@ -305,7 +279,7 @@ public function testBuildSuccess(): void ->build(); self::assertInstanceOf(KafkaConsumerInterface::class, $consumer); - self::assertInstanceOf(KafkaHighLevelConsumer::class, $consumer); + self::assertInstanceOf(KafkaConsumer::class, $consumer); } /** @@ -317,7 +291,7 @@ public function testBuildHighLevelSuccess(): void // Anonymous test method, no logic required }; - /** @var $consumer KafkaHighLevelConsumer */ + /** @var $consumer KafkaConsumer */ $consumer = $this->kafkaConsumerBuilder ->withAdditionalBroker('localhost') ->withAdditionalSubscription('test-topic') @@ -329,7 +303,7 @@ public function testBuildHighLevelSuccess(): void $conf = $consumer->getConfiguration(); self::assertInstanceOf(KafkaConsumerInterface::class, $consumer); - self::assertInstanceOf(KafkaHighLevelConsumerInterface::class, $consumer); + self::assertInstanceOf(KafkaConsumerInterface::class, $consumer); self::assertArrayHasKey('enable.auto.commit', $conf); self::assertEquals($conf['enable.auto.commit'], 'false'); } diff --git a/tests/Unit/Consumer/KafkaHighLevelConsumerTest.php b/tests/Unit/Consumer/KafkaConsumerTest.php similarity index 89% rename from tests/Unit/Consumer/KafkaHighLevelConsumerTest.php rename to tests/Unit/Consumer/KafkaConsumerTest.php index 4ffdf7f..3acbbd0 100644 --- a/tests/Unit/Consumer/KafkaHighLevelConsumerTest.php +++ b/tests/Unit/Consumer/KafkaConsumerTest.php @@ -2,7 +2,7 @@ namespace PhpKafka\Tests\Unit\Kafka\Consumer; -use PhpKafka\Consumer\KafkaHighLevelConsumer; +use PhpKafka\Consumer\KafkaConsumer; use PhpKafka\Exception\KafkaConsumerConsumeException; use PhpKafka\Exception\KafkaConsumerEndOfPartitionException; use PhpKafka\Exception\KafkaConsumerTimeoutException; @@ -27,9 +27,9 @@ /** * @covers \PhpKafka\Consumer\AbstractKafkaConsumer - * @covers \PhpKafka\Consumer\KafkaHighLevelConsumer + * @covers \PhpKafka\Consumer\KafkaConsumer */ -final class KafkaHighLevelConsumerTest extends TestCase +final class KafkaConsumerTest extends TestCase { /** @@ -42,7 +42,7 @@ public function testSubscribeSuccess(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls($topics, []); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('subscribe')->with(['testTopic', 'testTopic2']); @@ -58,7 +58,7 @@ public function testSubscribeSuccessWithParam(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $kafkaConfigurationMock->expects(self::never())->method('getTopicSubscriptions'); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('subscribe')->with(['testTopic3']); @@ -75,7 +75,7 @@ public function testSubscribeSuccessWithAssignmentWithPartitions(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls([], $topics); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('assign'); @@ -125,7 +125,7 @@ function () use ($rdKafkaMetadataTopicMock) { $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls([], $topics); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('assign')->with( $this->callback( @@ -138,7 +138,7 @@ function (array $assignment) { $rdKafkaConsumerMock ->expects(self::once()) ->method('getMetadata') - ->with(false, $rdKafkaConsumerTopicMock, 10000) + ->with(false, 10000, $rdKafkaConsumerTopicMock) ->willReturn($rdKafkaMetadataMock); $rdKafkaConsumerMock ->expects(self::once()) @@ -164,7 +164,7 @@ public function testSubscribeFailureOnMixedSubscribe(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturn($topics); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::never())->method('subscribe'); $rdKafkaConsumerMock->expects(self::never())->method('assign'); @@ -186,7 +186,7 @@ public function testSubscribeFailure(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturn($topics); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock ->expects(self::once()) @@ -209,7 +209,7 @@ public function testUnsubscribeSuccesss(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('unsubscribe'); @@ -227,7 +227,7 @@ public function testUnsubscribeSuccesssConsumeFails(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('unsubscribe'); @@ -244,7 +244,7 @@ public function testUnsubscribeFailure(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock ->expects(self::once()) @@ -285,7 +285,7 @@ public function testCommitSuccesss(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('commit')->with( $this->callback( function (array $topicPartitions) { @@ -317,7 +317,7 @@ public function testCommitSingleSuccesss(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('commit')->with( $this->callback( function (array $topicPartitions) { @@ -340,7 +340,7 @@ public function testCommitAsyncSuccesss(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $message = $this->createMock(KafkaConsumerMessageInterface::class); $rdKafkaConsumerMock->expects(self::once())->method('commitAsync'); @@ -356,7 +356,7 @@ public function testCommitFails(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $message = $this->createMock(KafkaConsumerMessageInterface::class); $rdKafkaConsumerMock @@ -379,7 +379,7 @@ public function testAssignSuccess(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $topicPartitions = ['test']; @@ -399,7 +399,7 @@ public function testAssignFail(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $topicPartitions = ['test']; @@ -424,7 +424,7 @@ public function testGetAssignment(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $topicPartitions = ['test']; @@ -444,7 +444,7 @@ public function testGetAssignmentException(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock ->expects(self::once()) @@ -498,7 +498,7 @@ function (KafkaConsumerMessageInterface $message) { } ) ); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $kafkaConsumer->subscribe(); $kafkaConsumer->consume(); @@ -530,7 +530,7 @@ public function testKafkaConsumeWithoutDecode(): void $kafkaConfigurationMock->expects(self::exactly(2))->method('getTopicSubscriptions')->willReturnOnConsecutiveCalls($topics, []); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); $decoderMock->expects(self::never())->method('decode'); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $kafkaConsumer->subscribe(); $kafkaConsumer->consume(10000, false); @@ -564,7 +564,7 @@ function (KafkaConsumerMessageInterface $message) { } ) ); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $kafkaConsumer->decodeMessage($messageMock); } @@ -576,7 +576,7 @@ public function testGetCommittedOffsets(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $committedOffsets = ['test']; $rdKafkaConsumerMock @@ -596,7 +596,7 @@ public function testGetCommittedOffsetsException(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock ->expects(self::once()) @@ -617,7 +617,7 @@ public function testGetOffsetPositions(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock ->expects(self::once()) ->method('getOffsetPositions') @@ -635,7 +635,7 @@ public function testClose(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock->expects(self::once())->method('close'); $kafkaConsumer->close(); @@ -668,7 +668,7 @@ public function testOffsetsForTimes(): void $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock ->expects(self::once()) @@ -688,7 +688,7 @@ public function testGetConfiguration(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $kafkaConfigurationMock->expects(self::any())->method('dump')->willReturn([]); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); self::assertIsArray($kafkaConsumer->getConfiguration()); } @@ -712,7 +712,7 @@ function (string $topic, int $partition, int &$lowOffset, int &$highOffset, int } ); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $lowOffset = $kafkaConsumer->getFirstOffsetForTopicPartition('test-topic', 1, 1000); @@ -738,7 +738,7 @@ function (string $topic, int $partition, int &$lowOffset, int &$highOffset, int } ); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $lowOffset = $kafkaConsumer->getLastOffsetForTopicPartition('test-topic', 1, 1000); @@ -762,7 +762,7 @@ public function testConsumeThrowsEofExceptionIfQueueConsumeReturnsNull(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $rdKafkaConsumerMock ->expects(self::once()) @@ -791,7 +791,7 @@ public function testConsumeDedicatedEofException(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $message = new SkcMessage(); $message->err = RD_KAFKA_RESP_ERR__PARTITION_EOF; @@ -823,7 +823,7 @@ public function testConsumeDedicatedTimeoutException(): void $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $message = new SkcMessage(); $message->err = RD_KAFKA_RESP_ERR__TIMED_OUT; @@ -854,7 +854,7 @@ public function testConsumeThrowsExceptionIfConsumedMessageHasNoTopicAndErrorCod $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); /** @var SkcMessage|MockObject $rdKafkaMessageMock */ $rdKafkaMessageMock = $this->createMock(SkcMessage::class); @@ -864,7 +864,7 @@ public function testConsumeThrowsExceptionIfConsumedMessageHasNoTopicAndErrorCod $rdKafkaMessageMock->topic_name = null; $rdKafkaMessageMock ->expects(self::once()) - ->method('errstr') + ->method('getErrorString') ->willReturn('Unknown error'); $topicSubscription = new TopicSubscription('test-topic', [1], 103); @@ -898,7 +898,7 @@ public function testConsumeThrowsExceptionIfConsumerIsCurrentlyNotSubscribed(): $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - $kafkaConsumer = new KafkaHighLevelConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); + $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); $kafkaConsumer->consume(); } diff --git a/tests/Unit/Producer/KafkaProducerTest.php b/tests/Unit/Producer/KafkaProducerTest.php index ce177c8..1b29279 100644 --- a/tests/Unit/Producer/KafkaProducerTest.php +++ b/tests/Unit/Producer/KafkaProducerTest.php @@ -293,7 +293,7 @@ public function testGetMetadataForTopic(): void $this->rdKafkaProducerMock ->expects(self::once()) ->method('getMetadata') - ->with(false, $topicMock, 1000) + ->with(false, 1000, $topicMock) ->willReturn($metadataMock); $this->kafkaProducer->getMetadataForTopic('test-topic-name', 1000); } @@ -323,7 +323,7 @@ public function testGetMetadataForTopicDefault(): void $this->rdKafkaProducerMock ->expects(self::once()) ->method('getMetadata') - ->with(false, $topicMock, 10000) + ->with(false, 10000, $topicMock) ->willReturn($metadataMock); $this->kafkaProducer->getMetadataForTopic('test-topic-name'); } @@ -340,8 +340,7 @@ public function testBeginTransactionSuccess(): void ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); $this->rdKafkaProducerMock ->expects(self::once()) - ->method('beginTransaction') - ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + ->method('beginTransaction'); self::assertNull($this->kafkaProducer->beginTransaction(10000)); } @@ -358,8 +357,7 @@ public function testBeginTransactionConsecutiveSuccess(): void ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); $this->rdKafkaProducerMock ->expects(self::exactly(2)) - ->method('beginTransaction') - ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + ->method('beginTransaction'); self::assertNull($this->kafkaProducer->beginTransaction(10000)); self::assertNull($this->kafkaProducer->beginTransaction(10000)); @@ -469,8 +467,7 @@ public function testAbortTransactionSuccess(): void { $this->rdKafkaProducerMock ->expects(self::once()) - ->method('abortTransaction') - ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + ->method('abortTransaction'); self::assertNull($this->kafkaProducer->abortTransaction(10000)); } @@ -501,8 +498,7 @@ public function testCommitTransactionSuccess(): void $this->rdKafkaProducerMock ->expects(self::once()) ->method('commitTransaction') - ->with(10000) - ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + ->with(10000); self::assertNull($this->kafkaProducer->commitTransaction(10000)); } From e03427b648f1084c1ea354d05cc3f3cdc9288280 Mon Sep 17 00:00:00 2001 From: nick Date: Sun, 11 Apr 2021 02:18:40 +0200 Subject: [PATCH 6/8] save work --- src/Consumer/KafkaConsumer.php | 2 +- src/Consumer/KafkaConsumerInterface.php | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/Consumer/KafkaConsumer.php b/src/Consumer/KafkaConsumer.php index 1dace39..ddf93ba 100644 --- a/src/Consumer/KafkaConsumer.php +++ b/src/Consumer/KafkaConsumer.php @@ -17,7 +17,7 @@ use SimpleKafkaClient\TopicPartition as SkcTopicPartition; use SimpleKafkaClient\Consumer as SkcConsumer; -final class KafkaConsumer extends AbstractKafkaConsumer implements KafkaConsumerInterface +final class KafkaConsumer extends AbstractKafkaConsumer { /** diff --git a/src/Consumer/KafkaConsumerInterface.php b/src/Consumer/KafkaConsumerInterface.php index ca4f64e..c9e67f8 100644 --- a/src/Consumer/KafkaConsumerInterface.php +++ b/src/Consumer/KafkaConsumerInterface.php @@ -6,8 +6,9 @@ use PhpKafka\Message\KafkaConsumerMessageInterface; use SimpleKafkaClient\TopicPartition as SkcTopicPartition; +use SimpleKafkaClient\Metadata\Topic as SkcMetadataTopic; -interface KafkaConsumerInterface extends KafkaConsumerInterface +interface KafkaConsumerInterface { /** * Assigns a consumer to the given TopicPartition(s) From 02081bcdc252a76cccd8480dd1a4089cb7a628e5 Mon Sep 17 00:00:00 2001 From: Nick Date: Sun, 11 Apr 2021 10:20:10 +0200 Subject: [PATCH 7/8] small fixes (#3) * small fixes * fix return type * update docker comments --- docker/dev/php/Dockerfile | 2 +- src/Consumer/AbstractKafkaConsumer.php | 17 +++--- src/Consumer/KafkaConsumer.php | 4 +- src/Consumer/KafkaConsumerBuilder.php | 20 ++----- .../KafkaConsumerBuilderInterface.php | 9 ---- .../KafkaConsumerRebalanceCallbackTest.php | 6 +-- .../Consumer/KafkaConsumerBuilderTest.php | 34 ++++-------- tests/Unit/Consumer/KafkaConsumerTest.php | 52 +++++++------------ tests/Unit/Producer/KafkaProducerTest.php | 6 +-- 9 files changed, 44 insertions(+), 106 deletions(-) diff --git a/docker/dev/php/Dockerfile b/docker/dev/php/Dockerfile index 221a4a2..c8b2402 100644 --- a/docker/dev/php/Dockerfile +++ b/docker/dev/php/Dockerfile @@ -26,7 +26,7 @@ RUN echo "$HOST_USER:x:$HOST_USER_ID:82:Linux User,,,:/home/$HOST_USER:" >> /etc echo "ALL ALL=NOPASSWD: ALL" >> /etc/sudoers && \ addgroup $HOST_USER www-data -# COMPOSER: install binary and prestissimo +# COMPOSER: install binary RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/bin --filename=composer # PHP: Install php extensions diff --git a/src/Consumer/AbstractKafkaConsumer.php b/src/Consumer/AbstractKafkaConsumer.php index dd42091..973b941 100644 --- a/src/Consumer/AbstractKafkaConsumer.php +++ b/src/Consumer/AbstractKafkaConsumer.php @@ -94,12 +94,7 @@ public function consume(int $timeoutMs = 10000, bool $autoDecode = true): KafkaC throw new KafkaConsumerConsumeException(KafkaConsumerConsumeException::NOT_SUBSCRIBED_EXCEPTION_MESSAGE); } - if (null === $rdKafkaMessage = $this->kafkaConsume($timeoutMs)) { - throw new KafkaConsumerEndOfPartitionException( - kafka_err2str(RD_KAFKA_RESP_ERR__PARTITION_EOF), - RD_KAFKA_RESP_ERR__PARTITION_EOF - ); - } + $rdKafkaMessage = $this->kafkaConsume($timeoutMs); if (RD_KAFKA_RESP_ERR__PARTITION_EOF === $rdKafkaMessage->err) { throw new KafkaConsumerEndOfPartitionException($rdKafkaMessage->getErrorString(), $rdKafkaMessage->err); @@ -211,9 +206,11 @@ protected function getAllTopicPartitions(string $topic): array $partitions = []; $topicMetadata = $this->getMetadataForTopic($topic); + $metaPartitions = $topicMetadata->getPartitions(); - foreach ($topicMetadata->getPartitions() as $partition) { - $partitions[] = $partition->getId(); + while ($metaPartitions->valid()) { + $partitions[] = $metaPartitions->current()->getId(); + $metaPartitions->next(); } return $partitions; @@ -238,7 +235,7 @@ private function getConsumerMessage(SkcMessage $message): KafkaConsumerMessageIn /** * @param integer $timeoutMs - * @return null|SkcMessage + * @return SkcMessage */ - abstract protected function kafkaConsume(int $timeoutMs): ?SkcMessage; + abstract protected function kafkaConsume(int $timeoutMs): SkcMessage; } diff --git a/src/Consumer/KafkaConsumer.php b/src/Consumer/KafkaConsumer.php index ddf93ba..0fc14da 100644 --- a/src/Consumer/KafkaConsumer.php +++ b/src/Consumer/KafkaConsumer.php @@ -181,10 +181,10 @@ public function close(): void /** * @param integer $timeoutMs - * @return SkcMessage|null + * @return SkcMessage * @throws SkcException */ - protected function kafkaConsume(int $timeoutMs): ?SkcMessage + protected function kafkaConsume(int $timeoutMs): SkcMessage { return $this->consumer->consume($timeoutMs); } diff --git a/src/Consumer/KafkaConsumerBuilder.php b/src/Consumer/KafkaConsumerBuilder.php index a5dbf09..2985f39 100644 --- a/src/Consumer/KafkaConsumerBuilder.php +++ b/src/Consumer/KafkaConsumerBuilder.php @@ -24,7 +24,8 @@ final class KafkaConsumerBuilder implements KafkaConsumerBuilderInterface private $config = [ 'enable.auto.offset.store' => false, 'enable.auto.commit' => false, - 'auto.offset.reset' => 'earliest' + 'enable.partition.eof' => true, + 'auto.offset.reset' => 'earliest', ]; /** @@ -199,21 +200,6 @@ public function withRebalanceCallback(callable $rebalanceCallback): KafkaConsume return $that; } - /** - * Only applicable for the high level consumer - * Callback that is going to be called when you call consume - * - * @param callable $consumeCallback - * @return KafkaConsumerBuilderInterface - */ - public function withConsumeCallback(callable $consumeCallback): KafkaConsumerBuilderInterface - { - $that = clone $this; - $that->consumeCallback = $consumeCallback; - - return $that; - } - /** * Callback for log related events * @@ -300,7 +286,7 @@ private function registerCallbacks(KafkaConfiguration $conf): void } if (null !== $this->logCallback) { - //$conf->setLogCb($this->logCallback); + $conf->setLogCb($this->logCallback); } if (null !== $this->offsetCommitCallback) { diff --git a/src/Consumer/KafkaConsumerBuilderInterface.php b/src/Consumer/KafkaConsumerBuilderInterface.php index 0ddc43b..e828696 100644 --- a/src/Consumer/KafkaConsumerBuilderInterface.php +++ b/src/Consumer/KafkaConsumerBuilderInterface.php @@ -83,15 +83,6 @@ public function withErrorCallback(callable $errorCallback): self; */ public function withRebalanceCallback(callable $rebalanceCallback): self; - /** - * Only applicable for the high level consumer - * Callback that is going to be called when you call consume - * - * @param callable $consumeCallback - * @return KafkaConsumerBuilderInterface - */ - public function withConsumeCallback(callable $consumeCallback): self; - /** * Set callback that is being called on offset commits * diff --git a/tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php b/tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php index e6c5b8a..4e5e436 100644 --- a/tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php +++ b/tests/Unit/Callback/KafkaConsumerRebalanceCallbackTest.php @@ -43,8 +43,7 @@ public function testInvokeAssign() $consumer ->expects(self::once()) ->method('assign') - ->with($partitions) - ->willReturn(null); + ->with($partitions); call_user_func( @@ -62,8 +61,7 @@ public function testInvokeRevoke() $consumer ->expects(self::once()) ->method('assign') - ->with(null) - ->willReturn(null); + ->with(null); call_user_func(new KafkaConsumerRebalanceCallback(), $consumer, RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS); } diff --git a/tests/Unit/Consumer/KafkaConsumerBuilderTest.php b/tests/Unit/Consumer/KafkaConsumerBuilderTest.php index 66fe6b0..0519da4 100644 --- a/tests/Unit/Consumer/KafkaConsumerBuilderTest.php +++ b/tests/Unit/Consumer/KafkaConsumerBuilderTest.php @@ -8,7 +8,6 @@ use PhpKafka\Message\Decoder\DecoderInterface; use PhpKafka\Consumer\TopicSubscription; use PhpKafka\Exception\KafkaConsumerBuilderException; -use PhpKafka\Consumer\KafkaConsumerInterface; use PHPUnit\Framework\TestCase; /** @@ -110,6 +109,7 @@ public function testAddConfig(): void 'group.id' => 'test-group', 'enable.auto.offset.store' => true, 'enable.auto.commit' => false, + 'enable.partition.eof' => true, 'auto.offset.reset' => 'earliest' ], $reflectionProperty->getValue($clone) @@ -170,6 +170,7 @@ public function testSetErrorCallback(): void $consumer = $clone ->withAdditionalBroker('localhost') ->withSubscription('test') + ->withLogCallback($callback) ->build(); $conf = $consumer->getConfiguration(); self::assertArrayHasKey('error_cb', $conf); @@ -196,6 +197,7 @@ public function testSetRebalanceCallback(): void $consumer = $clone ->withAdditionalBroker('localhost') ->withSubscription('test') + ->withLogCallback($callback) ->build(); $conf = $consumer->getConfiguration(); self::assertArrayHasKey('rebalance_cb', $conf); @@ -222,6 +224,7 @@ public function testSetOffsetCommitCallback(): void $consumer = $clone ->withAdditionalBroker('localhost') ->withSubscription('test') + ->withLogCallback($callback) ->build(); $conf = $consumer->getConfiguration(); self::assertArrayHasKey('offset_commit_cb', $conf); @@ -273,29 +276,6 @@ public function testBuildSuccess(): void ->withAdditionalSubscription('test-topic') ->withRebalanceCallback($callback) ->withOffsetCommitCallback($callback) - ->withConsumeCallback($callback) - ->withErrorCallback($callback) - ->withLogCallback($callback) - ->build(); - - self::assertInstanceOf(KafkaConsumerInterface::class, $consumer); - self::assertInstanceOf(KafkaConsumer::class, $consumer); - } - - /** - * @return void - */ - public function testBuildHighLevelSuccess(): void - { - $callback = function ($kafka, $errId, $msg) { - // Anonymous test method, no logic required - }; - - /** @var $consumer KafkaConsumer */ - $consumer = $this->kafkaConsumerBuilder - ->withAdditionalBroker('localhost') - ->withAdditionalSubscription('test-topic') - ->withRebalanceCallback($callback) ->withErrorCallback($callback) ->withLogCallback($callback) ->build(); @@ -303,8 +283,12 @@ public function testBuildHighLevelSuccess(): void $conf = $consumer->getConfiguration(); self::assertInstanceOf(KafkaConsumerInterface::class, $consumer); - self::assertInstanceOf(KafkaConsumerInterface::class, $consumer); + self::assertInstanceOf(KafkaConsumer::class, $consumer); self::assertArrayHasKey('enable.auto.commit', $conf); self::assertEquals($conf['enable.auto.commit'], 'false'); + self::assertArrayHasKey('rebalance_cb', $conf); + self::assertArrayHasKey('offset_commit_cb', $conf); + self::assertArrayHasKey('error_cb', $conf); + self::assertArrayHasKey('log_cb', $conf); } } diff --git a/tests/Unit/Consumer/KafkaConsumerTest.php b/tests/Unit/Consumer/KafkaConsumerTest.php index 3acbbd0..03bced4 100644 --- a/tests/Unit/Consumer/KafkaConsumerTest.php +++ b/tests/Unit/Consumer/KafkaConsumerTest.php @@ -87,10 +87,23 @@ public function testSubscribeSuccessWithAssignmentWithPartitions(): void */ public function testSubscribeSuccessWithAssignmentWithOffsetOnly(): void { - $partitions = [ - $this->getMetadataPartitionMock(1), - $this->getMetadataPartitionMock(2) - ]; + $partitionCollection = $this->createMock(SkcMetadataCollection::class); + $partitionCollection->expects(self::exactly(2))->method('next'); + $partitionCollection + ->expects(self::exactly(3)) + ->method('valid') + ->willReturnOnConsecutiveCalls( + true, + true, + false + ); + $partitionCollection + ->expects(self::exactly(2)) + ->method('current') + ->willReturnOnConsecutiveCalls( + $this->getMetadataPartitionMock(1), + $this->getMetadataPartitionMock(2) + ); /** @var SkcConsumerTopic|MockObject $rdKafkaConsumerTopicMock */ $rdKafkaConsumerTopicMock = $this->createMock(SkcConsumerTopic::class); @@ -100,7 +113,7 @@ public function testSubscribeSuccessWithAssignmentWithOffsetOnly(): void $rdKafkaMetadataTopicMock ->expects(self::once()) ->method('getPartitions') - ->willReturn($partitions); + ->willReturn($partitionCollection); /** @var SkcMetadata|MockObject $rdKafkaMetadataMock */ $rdKafkaMetadataMock = $this->createMock(SkcMetadata::class); @@ -745,35 +758,6 @@ function (string $topic, int $partition, int &$lowOffset, int &$highOffset, int $this->assertEquals(5, $lowOffset); } - /** - * @throws KafkaConsumerConsumeException - * @throws KafkaConsumerEndOfPartitionException - * @throws KafkaConsumerSubscriptionException - * @throws KafkaConsumerTimeoutException - * @return void - */ - public function testConsumeThrowsEofExceptionIfQueueConsumeReturnsNull(): void - { - self::expectException(KafkaConsumerEndOfPartitionException::class); - self::expectExceptionCode(RD_KAFKA_RESP_ERR__PARTITION_EOF); - self::expectExceptionMessage(kafka_err2str(RD_KAFKA_RESP_ERR__PARTITION_EOF)); - - $rdKafkaConsumerMock = $this->createMock(SkcConsumer::class); - $kafkaConfigurationMock = $this->createMock(KafkaConfiguration::class); - $decoderMock = $this->getMockForAbstractClass(DecoderInterface::class); - - $kafkaConsumer = new KafkaConsumer($rdKafkaConsumerMock, $kafkaConfigurationMock, $decoderMock); - - $rdKafkaConsumerMock - ->expects(self::once()) - ->method('consume') - ->with(10000) - ->willReturn(null); - - $kafkaConsumer->subscribe(); - $kafkaConsumer->consume(); - } - /** * @throws KafkaConsumerConsumeException * @throws KafkaConsumerEndOfPartitionException diff --git a/tests/Unit/Producer/KafkaProducerTest.php b/tests/Unit/Producer/KafkaProducerTest.php index 1b29279..621124a 100644 --- a/tests/Unit/Producer/KafkaProducerTest.php +++ b/tests/Unit/Producer/KafkaProducerTest.php @@ -336,8 +336,7 @@ public function testBeginTransactionSuccess(): void $this->rdKafkaProducerMock ->expects(self::once()) ->method('initTransactions') - ->with(10000) - ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + ->with(10000); $this->rdKafkaProducerMock ->expects(self::once()) ->method('beginTransaction'); @@ -353,8 +352,7 @@ public function testBeginTransactionConsecutiveSuccess(): void $this->rdKafkaProducerMock ->expects(self::once()) ->method('initTransactions') - ->with(10000) - ->willReturn(RD_KAFKA_RESP_ERR_NO_ERROR); + ->with(10000); $this->rdKafkaProducerMock ->expects(self::exactly(2)) ->method('beginTransaction'); From 91f0d1f53b42fc3ca85b051915440a98398b2b85 Mon Sep 17 00:00:00 2001 From: nick Date: Sun, 11 Apr 2021 10:20:19 +0200 Subject: [PATCH 8/8] add issue template --- .github/FUNDING.yml | 3 +++ .github/ISSUE_TEMPLATE | 7 +++++++ 2 files changed, 10 insertions(+) create mode 100644 .github/FUNDING.yml create mode 100644 .github/ISSUE_TEMPLATE diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..b1da0b5 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# These are supported funding model platforms + +github: [nick-zh] diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE new file mode 100644 index 0000000..16170b3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE @@ -0,0 +1,7 @@ + + + * PHP version: + * simple-kafka-client version: + * php-simple-kafka-lib version: