Skip to content

Commit fa4acfb

Browse files
committed
use absolute imports everywhere
1 parent 4dc0899 commit fa4acfb

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+148
-145
lines changed

.gitignore

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@ build
55
dist
66
MANIFEST
77
env
8+
venv
9+
.ropeproject/
810
servers/*/kafka-bin*
911
servers/*/resources/ssl*
1012
.coverage*
@@ -13,4 +15,5 @@ docs/_build
1315
.cache*
1416
.idea/
1517
integration-test/
16-
tests-env/
18+
tests-env/
19+
.pytest_cache/

kafka/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import absolute_import
22

33
__title__ = 'kafka'
4-
from .version import __version__
4+
from kafka.version import __version__
55
__author__ = 'Dana Powers'
66
__license__ = 'Apache License 2.0'
77
__copyright__ = 'Copyright 2016 Dana Powers, David Arthur, and Contributors'

kafka/client_async.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -13,26 +13,26 @@
1313
import selectors # pylint: disable=import-error
1414
except ImportError:
1515
# vendored backport module
16-
from .vendor import selectors34 as selectors
16+
from kafka.vendor import selectors34 as selectors
1717

1818
import socket
1919
import time
2020

2121
from kafka.vendor import six
2222

23-
from .cluster import ClusterMetadata
24-
from .conn import BrokerConnection, ConnectionStates, collect_hosts, get_ip_port_afi
25-
from . import errors as Errors
26-
from .future import Future
27-
from .metrics import AnonMeasurable
28-
from .metrics.stats import Avg, Count, Rate
29-
from .metrics.stats.rate import TimeUnit
30-
from .protocol.metadata import MetadataRequest
31-
from .util import Dict, WeakMethod
23+
from kafka.cluster import ClusterMetadata
24+
from kafka.conn import BrokerConnection, ConnectionStates, collect_hosts, get_ip_port_afi
25+
from kafka import errors as Errors
26+
from kafka.future import Future
27+
from kafka.metrics import AnonMeasurable
28+
from kafka.metrics.stats import Avg, Count, Rate
29+
from kafka.metrics.stats.rate import TimeUnit
30+
from kafka.protocol.metadata import MetadataRequest
31+
from kafka.util import Dict, WeakMethod
3232
# Although this looks unused, it actually monkey-patches socket.socketpair()
3333
# and should be left in as long as we're using socket.socketpair() in this file
34-
from .vendor import socketpair
35-
from .version import __version__
34+
from kafka.vendor import socketpair
35+
from kafka.version import __version__
3636

3737
if six.PY2:
3838
ConnectionError = None

kafka/cluster.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@
88

99
from kafka.vendor import six
1010

11-
from . import errors as Errors
12-
from .future import Future
13-
from .structs import BrokerMetadata, PartitionMetadata, TopicPartition
11+
from kafka import errors as Errors
12+
from kafka.future import Future
13+
from kafka.structs import BrokerMetadata, PartitionMetadata, TopicPartition
1414

1515
log = logging.getLogger(__name__)
1616

kafka/conn.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
import selectors # pylint: disable=import-error
1313
except ImportError:
1414
# vendored backport module
15-
from .vendor import selectors34 as selectors
15+
from kafka.vendor import selectors34 as selectors
1616

1717
import socket
1818
import struct
@@ -857,8 +857,8 @@ def check_version(self, timeout=2, strict=False):
857857
# vanilla MetadataRequest. If the server did not recognize the first
858858
# request, both will be failed with a ConnectionError that wraps
859859
# socket.error (32, 54, or 104)
860-
from .protocol.admin import ApiVersionRequest, ListGroupsRequest
861-
from .protocol.commit import OffsetFetchRequest, GroupCoordinatorRequest
860+
from kafka.protocol.admin import ApiVersionRequest, ListGroupsRequest
861+
from kafka.protocol.commit import OffsetFetchRequest, GroupCoordinatorRequest
862862

863863
# Socket errors are logged as exceptions and can alarm users. Mute them
864864
from logging import Filter

kafka/consumer/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from __future__ import absolute_import
22

3-
from .simple import SimpleConsumer
4-
from .multiprocess import MultiProcessConsumer
5-
from .group import KafkaConsumer
3+
from kafka.consumer.simple import SimpleConsumer
4+
from kafka.consumer.multiprocess import MultiProcessConsumer
5+
from kafka.consumer.group import KafkaConsumer
66

77
__all__ = [
88
'SimpleConsumer', 'MultiProcessConsumer', 'KafkaConsumer'

kafka/consumer/multiprocess.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,15 @@
88

99
from kafka.vendor.six.moves import queue # pylint: disable=import-error
1010

11-
from ..common import KafkaError
12-
from .base import (
11+
from kafka.common import KafkaError
12+
from kafka.consumer.base import (
1313
Consumer,
1414
AUTO_COMMIT_MSG_COUNT, AUTO_COMMIT_INTERVAL,
1515
NO_MESSAGES_WAIT_TIME_SECONDS,
1616
FULL_QUEUE_WAIT_TIME_SECONDS,
1717
MAX_BACKOFF_SECONDS,
1818
)
19-
from .simple import SimpleConsumer
19+
from kafka.consumer.simple import SimpleConsumer
2020

2121

2222
log = logging.getLogger(__name__)

kafka/consumer/simple.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from kafka.vendor import six
1313
from kafka.vendor.six.moves import queue # pylint: disable=import-error
1414

15-
from .base import (
15+
from kafka.consumer.base import (
1616
Consumer,
1717
FETCH_DEFAULT_BLOCK_TIMEOUT,
1818
AUTO_COMMIT_MSG_COUNT,
@@ -24,7 +24,7 @@
2424
ITER_TIMEOUT_SECONDS,
2525
NO_MESSAGES_WAIT_TIME_SECONDS
2626
)
27-
from ..common import (
27+
from kafka.common import (
2828
FetchRequestPayload, KafkaError, OffsetRequestPayload,
2929
ConsumerFetchSizeTooSmall,
3030
UnknownTopicOrPartitionError, NotLeaderForPartitionError,

kafka/coordinator/assignors/range.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55

66
from kafka.vendor import six
77

8-
from .abstract import AbstractPartitionAssignor
9-
from ..protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
8+
from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor
9+
from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
1010

1111
log = logging.getLogger(__name__)
1212

kafka/coordinator/assignors/roundrobin.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66

77
from kafka.vendor import six
88

9-
from .abstract import AbstractPartitionAssignor
10-
from ...common import TopicPartition
11-
from ..protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
9+
from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor
10+
from kafka.common import TopicPartition
11+
from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
1212

1313
log = logging.getLogger(__name__)
1414

0 commit comments

Comments
 (0)