Mirror of Apache Kafka
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

392 lines
0 B

<!DOCTYPE import-control PUBLIC
"-//Puppy Crawl//DTD Import Control 1.1//EN"
"http://www.puppycrawl.com/dtds/import_control_1_1.dtd">
<!--
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to You under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-->
<import-control pkg="org.apache.kafka">
<!-- THINK HARD ABOUT THE LAYERING OF THE PROJECT BEFORE CHANGING THIS FILE -->
<!-- common library dependencies -->
<allow pkg="java" />
<allow pkg="javax.management" />
<allow pkg="org.slf4j" />
<allow pkg="org.junit" />
<allow pkg="org.hamcrest" />
<allow pkg="org.easymock" />
<allow pkg="org.powermock" />
<allow pkg="java.security" />
<allow pkg="javax.net.ssl" />
<allow pkg="javax.security" />
<allow pkg="org.ietf.jgss" />
<!-- no one depends on the server -->
<disallow pkg="kafka" />
<!-- anyone can use public classes -->
<allow pkg="org.apache.kafka.common" exact-match="true" />
<allow pkg="org.apache.kafka.common.security" />
<allow pkg="org.apache.kafka.common.serialization" />
<allow pkg="org.apache.kafka.common.utils" />
<allow pkg="org.apache.kafka.common.errors" exact-match="true" />
<allow pkg="org.apache.kafka.common.memory" />
<subpackage name="common">
<disallow pkg="org.apache.kafka.clients" />
<allow pkg="org.apache.kafka.common" exact-match="true" />
<allow pkg="org.apache.kafka.common.annotation" />
<allow pkg="org.apache.kafka.common.config" exact-match="true" />
<allow pkg="org.apache.kafka.common.internals" exact-match="true" />
<allow pkg="org.apache.kafka.test" />
<subpackage name="acl">
<allow pkg="org.apache.kafka.common.annotation" />
<allow pkg="org.apache.kafka.common.acl" />
<allow pkg="org.apache.kafka.common.resource" />
</subpackage>
<subpackage name="config">
<allow pkg="org.apache.kafka.common.config" />
<!-- for testing -->
<allow pkg="org.apache.kafka.common.metrics" />
</subpackage>
<subpackage name="metrics">
<allow pkg="org.apache.kafka.common.metrics" />
</subpackage>
<subpackage name="memory">
<allow pkg="org.apache.kafka.common.metrics" />
</subpackage>
<subpackage name="network">
<allow pkg="org.apache.kafka.common.security.auth" />
<allow pkg="org.apache.kafka.common.protocol" />
<allow pkg="org.apache.kafka.common.config" />
<allow pkg="org.apache.kafka.common.metrics" />
<allow pkg="org.apache.kafka.common.security" />
</subpackage>
<subpackage name="resource">
<allow pkg="org.apache.kafka.common.annotation" />
<allow pkg="org.apache.kafka.common.resource" />
</subpackage>
<subpackage name="security">
<allow pkg="org.apache.kafka.common.annotation" />
<allow pkg="org.apache.kafka.common.network" />
<allow pkg="org.apache.kafka.common.config" />
<allow pkg="org.apache.kafka.common.protocol" />
<allow pkg="org.apache.kafka.common.errors" />
<subpackage name="authenticator">
<allow pkg="org.apache.kafka.common.protocol.types" />
<allow pkg="org.apache.kafka.common.requests" />
<allow pkg="org.apache.kafka.clients" />
</subpackage>
<subpackage name="scram">
<allow pkg="javax.crypto" />
</subpackage>
<subpackage name="oauthbearer">
<allow pkg="com.fasterxml.jackson.databind" />
</subpackage>
</subpackage>
<subpackage name="protocol">
<allow pkg="org.apache.kafka.common.errors" />
<allow pkg="org.apache.kafka.common.protocol.types" />
<allow pkg="org.apache.kafka.common.record" />
<allow pkg="org.apache.kafka.common.requests" />
<allow pkg="org.apache.kafka.common.resource" />
</subpackage>
<subpackage name="record">
<allow pkg="net.jpountz" />
<allow pkg="org.apache.kafka.common.header" />
<allow pkg="org.apache.kafka.common.record" />
<allow pkg="org.apache.kafka.common.network" />
<allow pkg="org.apache.kafka.common.protocol" />
<allow pkg="org.apache.kafka.common.protocol.types" />
<allow pkg="org.apache.kafka.common.errors" />
</subpackage>
<subpackage name="header">
<allow pkg="org.apache.kafka.common.header" />
<allow pkg="org.apache.kafka.common.record" />
</subpackage>
<subpackage name="requests">
<allow pkg="org.apache.kafka.clients.admin" />
<allow pkg="org.apache.kafka.common.acl" />
<allow pkg="org.apache.kafka.common.protocol" />
<allow pkg="org.apache.kafka.common.network" />
<allow pkg="org.apache.kafka.common.requests" />
<allow pkg="org.apache.kafka.common.resource" />
<allow pkg="org.apache.kafka.common.record" />
<!-- for testing -->
<allow pkg="org.apache.kafka.common.errors" />
</subpackage>
<subpackage name="serialization">
<allow class="org.apache.kafka.common.errors.SerializationException" />
<allow class="org.apache.kafka.common.header.Headers" />
</subpackage>
<subpackage name="utils">
<allow pkg="org.apache.kafka.common" />
</subpackage>
</subpackage>
<subpackage name="clients">
<allow pkg="org.slf4j" />
<allow pkg="org.apache.kafka.common" />
<allow pkg="org.apache.kafka.clients" exact-match="true"/>
<allow pkg="org.apache.kafka.test" />
<subpackage name="consumer">
<allow pkg="org.apache.kafka.clients.consumer" />
</subpackage>
<subpackage name="producer">
<allow pkg="org.apache.kafka.clients.consumer" />
<allow pkg="org.apache.kafka.clients.producer" />
</subpackage>
<subpackage name="admin">
<allow pkg="org.apache.kafka.clients.admin" />
<allow pkg="org.apache.kafka.clients.consumer.internals" />
<allow pkg="org.apache.kafka.clients.consumer" />
</subpackage>
</subpackage>
<subpackage name="server">
<allow pkg="org.slf4j" />
<allow pkg="org.apache.kafka.common" />
<allow pkg="org.apache.kafka.test" />
</subpackage>
<subpackage name="tools">
<allow pkg="org.apache.kafka.common"/>
<allow pkg="org.apache.kafka.clients.admin" />
<allow pkg="org.apache.kafka.clients.producer" />
<allow pkg="org.apache.kafka.clients.consumer" />
<allow pkg="com.fasterxml.jackson" />
<allow pkg="net.sourceforge.argparse4j" />
<allow pkg="org.apache.log4j" />
</subpackage>
<subpackage name="trogdor">
<allow pkg="com.fasterxml.jackson" />
<allow pkg="javax.servlet" />
<allow pkg="javax.ws.rs" />
<allow pkg="net.sourceforge.argparse4j" />
<allow pkg="org.apache.kafka.clients.admin" />
<allow pkg="org.apache.kafka.clients.consumer" exact-match="true"/>
<allow pkg="org.apache.kafka.clients.producer" exact-match="true"/>
<allow pkg="org.apache.kafka.common" />
<allow pkg="org.apache.kafka.test"/>
<allow pkg="org.apache.kafka.trogdor" />
<allow pkg="org.apache.log4j" />
<allow pkg="org.eclipse.jetty" />
<allow pkg="org.glassfish.jersey" />
</subpackage>
<subpackage name="streams">
<allow pkg="org.apache.kafka.common"/>
<allow pkg="org.apache.kafka.test"/>
<allow pkg="org.apache.kafka.clients"/>
<allow pkg="org.apache.kafka.clients.producer" exact-match="true"/>
<allow pkg="org.apache.kafka.clients.consumer" exact-match="true"/>
<!-- Temporary until EosTestDriver migrates to the Java AdminClient -->
<allow pkg="kafka.admin" exact-match="true"/>
<allow pkg="org.apache.kafka.streams"/>
<subpackage name="examples">
<allow pkg="com.fasterxml.jackson.databind" />
<allow pkg="org.apache.kafka.connect.json" />
</subpackage>
<subpackage name="perf">
<allow pkg="com.fasterxml.jackson.databind" />
</subpackage>
<subpackage name="integration">
<allow pkg="kafka.admin" />
<allow pkg="kafka.api" />
<allow pkg="kafka.server" />
<allow pkg="kafka.tools" />
<allow pkg="kafka.utils" />
<allow pkg="kafka.zk" />
<allow pkg="kafka.zookeeper" />
<allow pkg="kafka.log" />
<allow pkg="scala" />
<allow pkg="scala.collection" />
</subpackage>
<subpackage name="test">
<allow pkg="kafka.admin" />
</subpackage>
KAFKA-5520: KIP-171; Extend Consumer Group Reset Offset for Stream Application KIP: https://cwiki.apache.org/confluence/display/KAFKA/KIP-171+-+Extend+Consumer+Group+Reset+Offset+for+Stream+Application Merge changes from KIP-198 Ref: https://github.com/apache/kafka/pull/3831 Author: Jorge Quilcate Otoya <quilcate.jorge@gmail.com> Author: Ismael Juma <ismael@juma.me.uk> Author: Matthias J. Sax <matthias@confluent.io> Author: Manikumar Reddy <manikumar.reddy@gmail.com> Author: Guozhang Wang <wangguoz@gmail.com> Author: Apurva Mehta <apurva@confluent.io> Author: Rajini Sivaram <rajinisivaram@googlemail.com> Author: Jason Gustafson <jason@confluent.io> Author: Vahid Hashemian <vahidhashemian@us.ibm.com> Author: Bill Bejeck <bill@confluent.io> Author: Dong Lin <lindong28@gmail.com> Author: Soenke Liebau <soenke.liebau@opencore.com> Author: Colin P. Mccabe <cmccabe@confluent.io> Author: Damian Guy <damian.guy@gmail.com> Author: Xavier Léauté <xl+github@xvrl.net> Author: Maytee Chinavanichkit <maytee.chinavanichkit@linecorp.com> Author: Joel Hamill <git config --global user.email> Author: Paolo Patierno <ppatierno@live.com> Author: siva santhalingam <siva.santhalingam@gmail.com> Author: Tommy Becker <tobecker@tivo.com> Author: Mickael Maison <mickael.maison@gmail.com> Author: Onur Karaman <okaraman@linkedin.com> Author: tedyu <yuzhihong@gmail.com> Author: Xin Li <Xin.Li@trivago.com> Author: Magnus Edenhill <magnus@edenhill.se> Author: Manjula K <manjula@kafka-summit.org> Author: Hugo Louro <hmclouro@gmail.com> Author: Jeff Widman <jeff@jeffwidman.com> Author: bartdevylder <bartdevylder@gmail.com> Author: Ewen Cheslack-Postava <me@ewencp.org> Author: Jacek Laskowski <jacek@japila.pl> Author: Tom Bentley <tbentley@redhat.com> Author: Konstantine Karantasis <konstantine@confluent.io> Reviewers: Matthias J. Sax <matthias@confluent.io>, Guozhang Wang <wangguoz@gmail.com> Closes #4159 from jeqo/feature/kip-171
7 years ago
<subpackage name="tools">
<allow pkg="kafka.tools" />
</subpackage>
<subpackage name="state">
<allow pkg="org.rocksdb" />
</subpackage>
<subpackage name="processor">
<subpackage name="internals">
<allow pkg="org.I0Itec.zkclient" />
<allow pkg="com.fasterxml.jackson" />
<allow pkg="org.apache.zookeeper" />
<allow pkg="org.apache.zookeeper" />
<subpackage name="testutil">
<allow pkg="org.apache.log4j" />
</subpackage>
</subpackage>
</subpackage>
</subpackage>
<subpackage name="jmh">
<allow pkg="org.openjdk.jmh.annotations" />
<allow pkg="org.openjdk.jmh.runner" />
<allow pkg="org.openjdk.jmh.runner.options" />
<allow pkg="org.openjdk.jmh.infra" />
<allow pkg="org.apache.kafka.common" />
<allow pkg="org.apache.kafka.clients" />
<allow pkg="org.apache.kafka.streams" />
<allow pkg="org.github.jamm" />
</subpackage>
<subpackage name="log4jappender">
<allow pkg="org.apache.log4j" />
<allow pkg="org.apache.kafka.clients" />
<allow pkg="org.apache.kafka.common" />
<allow pkg="org.apache.kafka.test" />
</subpackage>
<subpackage name="test">
<allow pkg="org.apache.kafka" />
<allow pkg="org.bouncycastle" />
</subpackage>
<subpackage name="connect">
<allow pkg="org.apache.kafka.common" />
<allow pkg="org.apache.kafka.connect.data" />
<allow pkg="org.apache.kafka.connect.errors" />
KAFKA-5142: Add Connect support for message headers (KIP-145) **[KIP-145](https://cwiki.apache.org/confluence/display/KAFKA/KIP-145+-+Expose+Record+Headers+in+Kafka+Connect) has been accepted, and this PR implements KIP-145 except without the SMTs.** Changed the Connect API and runtime to support message headers as described in [KIP-145](https://cwiki.apache.org/confluence/display/KAFKA/KIP-145+-+Expose+Record+Headers+in+Kafka+Connect). The new `Header` interface defines an immutable representation of a Kafka header (key-value pair) with support for the Connect value types and schemas. This interface provides methods for easily converting between many of the built-in primitive, structured, and logical data types. The new `Headers` interface defines an ordered collection of headers and is used to track all headers associated with a `ConnectRecord` (and thus `SourceRecord` and `SinkRecord`). This does allow multiple headers with the same key. The `Headers` contains methods for adding, removing, finding, and modifying headers. Convenience methods allow connectors and transforms to easily use and modify the headers for a record. A new `HeaderConverter` interface is also defined to enable the Connect runtime framework to be able to serialize and deserialize headers between the in-memory representation and Kafka’s byte[] representation. A new `SimpleHeaderConverter` implementation has been added, and this serializes to strings and deserializes by inferring the schemas (`Struct` header values are serialized without the schemas, so they can only be deserialized as `Map` instances without a schema.) The `StringConverter`, `JsonConverter`, and `ByteArrayConverter` have all been extended to also be `HeaderConverter` implementations. Each connector can be configured with a different header converter, although by default the `SimpleHeaderConverter` is used to serialize header values as strings without schemas. Unit and integration tests are added for `ConnectHeader` and `ConnectHeaders`, the two implementation classes for headers. Additional test methods are added for the methods added to the `Converter` implementations. Finally, the `ConnectRecord` object is already used heavily, so only limited tests need to be added while quite a few of the existing tests already cover the changes. Author: Randall Hauch <rhauch@gmail.com> Reviewers: Arjun Satish <arjun@confluent.io>, Ted Yu <yuzhihong@gmail.com>, Magesh Nandakumar <magesh.n.kumar@gmail.com>, Konstantine Karantasis <konstantine@confluent.io>, Ewen Cheslack-Postava <ewen@confluent.io> Closes #4319 from rhauch/kafka-5142-b
7 years ago
<allow pkg="org.apache.kafka.connect.header" />
<allow pkg="org.apache.kafka.connect.components"/>
<allow pkg="org.apache.kafka.clients" />
<allow pkg="org.apache.kafka.test"/>
<subpackage name="source">
<allow pkg="org.apache.kafka.connect.connector" />
<allow pkg="org.apache.kafka.connect.storage" />
</subpackage>
<subpackage name="sink">
<allow pkg="org.apache.kafka.clients.consumer" />
<allow pkg="org.apache.kafka.connect.connector" />
<allow pkg="org.apache.kafka.connect.storage" />
</subpackage>
<subpackage name="converters">
<allow pkg="org.apache.kafka.connect.storage" />
</subpackage>
<subpackage name="rest">
<allow pkg="org.apache.kafka.connect.health" />
<allow pkg="javax.ws.rs" />
<allow pkg= "javax.security.auth"/>
<subpackage name="basic">
<allow pkg="org.apache.kafka.connect.rest"/>
</subpackage>
</subpackage>
<subpackage name="runtime">
<allow pkg="org.apache.kafka.connect" />
<allow pkg="org.reflections"/>
<allow pkg="org.reflections.util"/>
<subpackage name="rest">
<allow pkg="org.eclipse.jetty" />
<allow pkg="javax.ws.rs" />
<allow pkg="javax.servlet" />
<allow pkg="org.glassfish.jersey" />
<allow pkg="com.fasterxml.jackson" />
</subpackage>
<subpackage name="isolation">
<allow pkg="com.fasterxml.jackson" />
<allow pkg="org.apache.maven.artifact.versioning" />
</subpackage>
</subpackage>
<subpackage name="cli">
<allow pkg="org.apache.kafka.connect.runtime" />
<allow pkg="org.apache.kafka.connect.storage" />
<allow pkg="org.apache.kafka.connect.util" />
<allow pkg="org.apache.kafka.common" />
</subpackage>
<subpackage name="storage">
<allow pkg="org.apache.kafka.connect" />
<allow pkg="org.apache.kafka.common.serialization" />
</subpackage>
<subpackage name="util">
<allow pkg="org.apache.kafka.connect" />
<allow pkg="org.reflections.vfs" />
<!-- for annotations to avoid code duplication -->
<allow pkg="com.fasterxml.jackson.annotation" />
</subpackage>
<subpackage name="json">
<allow pkg="com.fasterxml.jackson" />
<allow pkg="org.apache.kafka.common.serialization" />
<allow pkg="org.apache.kafka.common.errors" />
<allow pkg="org.apache.kafka.connect.storage" />
</subpackage>
<subpackage name="file">
<allow pkg="org.apache.kafka.connect" />
<allow pkg="org.apache.kafka.clients.consumer" />
<!-- for tests -->
<allow pkg="org.easymock" />
<allow pkg="org.powermock" />
</subpackage>
<subpackage name="tools">
<allow pkg="org.apache.kafka.connect" />
<allow pkg="org.apache.kafka.tools" />
<allow pkg="com.fasterxml.jackson" />
</subpackage>
<subpackage name="transforms">
<allow class="org.apache.kafka.connect.connector.ConnectRecord" />
<allow class="org.apache.kafka.connect.source.SourceRecord" />
<allow class="org.apache.kafka.connect.sink.SinkRecord" />
<allow pkg="org.apache.kafka.connect.transforms.util" />
</subpackage>
</subpackage>
KAFKA-2366; Initial patch for Copycat This is an initial patch implementing the basics of Copycat for KIP-26. The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version: * Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state). * This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works. * This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal. * This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions. * The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies. * There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review. Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs. Author: Ewen Cheslack-Postava <me@ewencp.org> Reviewers: Ismael Juma, Gwen Shapira Closes #99 from ewencp/copycat and squashes the following commits: a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException. 8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator. 7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode. 656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface. c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat 0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible. d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory. b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments. 6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change. b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value. 0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate. e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package. be5c387 [Ewen Cheslack-Postava] Minor cleanup 122423e [Ewen Cheslack-Postava] Style cleanup 6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch. 4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling. 25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly. 0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat. 220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer. 1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat 5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support. e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation. dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core. 4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages. 31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat. e14942c [Ewen Cheslack-Postava] Add Copycat file connector. 0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime 11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
</import-control>