Skip to content

Jkeene.aws msk #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
116 changes: 116 additions & 0 deletions aws_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
#!/usr/bin/env python
import threading, time

from kafka import KafkaAdminClient, KafkaConsumer, KafkaProducer
from kafka.admin import NewTopic
import sys
from os import environ


BOOTSTRAP_SERVERS = environ.get("KAFKA_BROKERS")
AWS_ACCESS_KEY_ID = environ.get("KAFKA_AWS_ACCESS_ID")
AWS_SECRET_ACCESS_KEY = environ.get("KAFKA_AWS_SECRET_ID")
AWS_REGION = environ.get("KAFKA_AWS_REGION")


TOPIC_NAME = 'data.sandbox'
GROUP_NAME = 'data.sandbox'
SASL_MECHANISM = 'AWSMSKIAM'
SASL_PROTOCOL = 'SSL'
SASL_PROTOCOL = 'SASL_SSL'


class Producer(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.stop_event = threading.Event()

def stop(self):
self.stop_event.set()

def run(self):
producer = KafkaProducer(bootstrap_servers=BOOTSTRAP_SERVERS,
sasl_aws_msk_iam_access_key_id=AWS_ACCESS_KEY_ID,
sasl_aws_msk_iam_secret_access_key=AWS_SECRET_ACCESS_KEY,
sasl_aws_msk_region=AWS_REGION,
security_protocol=SASL_PROTOCOL,
sasl_mechanism=SASL_MECHANISM,
)

while not self.stop_event.is_set():
producer.send(TOPIC_NAME, b"test")
producer.send(TOPIC_NAME, b"\xc2Hola, mundo!")
time.sleep(1)

producer.close()


class Consumer(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.stop_event = threading.Event()

def stop(self):
self.stop_event.set()

def run(self):
consumer = KafkaConsumer(bootstrap_servers=BOOTSTRAP_SERVERS,
auto_offset_reset='earliest',
consumer_timeout_ms=1000,
sasl_aws_msk_iam_access_key_id=AWS_ACCESS_KEY_ID,
sasl_aws_msk_iam_secret_access_key=AWS_SECRET_ACCESS_KEY,
sasl_aws_msk_region=AWS_REGION,
security_protocol=SASL_PROTOCOL,
group_id=GROUP_NAME,
sasl_mechanism=SASL_MECHANISM,
)
consumer.subscribe([TOPIC_NAME])

while not self.stop_event.is_set():
for message in consumer:
print(f"consumer: {message}")
if self.stop_event.is_set():
break

consumer.close()


def main():
# Create 'TOPIC_NAME' topic
try:
admin = KafkaAdminClient(bootstrap_servers=BOOTSTRAP_SERVERS,
sasl_aws_msk_iam_access_key_id=AWS_ACCESS_KEY_ID,
sasl_aws_msk_iam_secret_access_key=AWS_SECRET_ACCESS_KEY,
sasl_aws_msk_region=AWS_REGION,
security_protocol=SASL_PROTOCOL,
sasl_mechanism=SASL_MECHANISM,
)

topic = NewTopic(name=TOPIC_NAME,
num_partitions=1,
replication_factor=1)
#admin.create_topics([topic])
except Exception as e:
print(str(e), file=sys.stderr)

tasks = [
Producer(),
#Consumer()
]

# Start threads of a publisher/producer and a subscriber/consumer to 'my-topic' Kafka topic
for t in tasks:
t.start()

time.sleep(10)

# Stop threads
for task in tasks:
task.stop()

for task in tasks:
task.join()


if __name__ == "__main__":
main()
14 changes: 8 additions & 6 deletions example.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@

from kafka import KafkaAdminClient, KafkaConsumer, KafkaProducer
from kafka.admin import NewTopic
import sys

BOOTSTRAP_SERVERS = 'localhost:9092'

class Producer(threading.Thread):
def __init__(self):
Expand All @@ -14,7 +16,7 @@ def stop(self):
self.stop_event.set()

def run(self):
producer = KafkaProducer(bootstrap_servers='localhost:9092')
producer = KafkaProducer(bootstrap_servers=BOOTSTRAP_SERVERS)

while not self.stop_event.is_set():
producer.send('my-topic', b"test")
Expand All @@ -33,14 +35,14 @@ def stop(self):
self.stop_event.set()

def run(self):
consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
consumer = KafkaConsumer(bootstrap_servers=BOOTSTRAP_SERVERS,
auto_offset_reset='earliest',
consumer_timeout_ms=1000)
consumer.subscribe(['my-topic'])

while not self.stop_event.is_set():
for message in consumer:
print(message)
print(f"consumer: {message}")
if self.stop_event.is_set():
break

Expand All @@ -50,14 +52,14 @@ def run(self):
def main():
# Create 'my-topic' Kafka topic
try:
admin = KafkaAdminClient(bootstrap_servers='localhost:9092')
admin = KafkaAdminClient(bootstrap_servers=BOOTSTRAP_SERVERS)

topic = NewTopic(name='my-topic',
num_partitions=1,
replication_factor=1)
admin.create_topics([topic])
except Exception:
pass
except Exception as e:
print(str(e), file=sys.stderr)

tasks = [
Producer(),
Expand Down
6 changes: 6 additions & 0 deletions kafka/admin/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,9 @@ class KafkaAdminClient(object):
sasl mechanism handshake. Default: one of bootstrap servers
sasl_oauth_token_provider (AbstractTokenProvider): OAuthBearer token provider
instance. (See kafka.oauth.abstract). Default: None
sasl_aws_msk_iam_access_key_id (str): aws access key id for msk_iam auth. Default: None
sasl_aws_msk_iam_secret_access_key (str): aws secret access key for msk_iam auth. Default: None
sasl_aws_msk_region (str): aws region for msk_iam auth. Default: None
kafka_client (callable): Custom class / callable for creating KafkaClient instances

"""
Expand Down Expand Up @@ -182,6 +185,9 @@ class KafkaAdminClient(object):
'sasl_kerberos_service_name': 'kafka',
'sasl_kerberos_domain_name': None,
'sasl_oauth_token_provider': None,
'sasl_aws_msk_iam_access_key_id': None,
'sasl_aws_msk_iam_secret_access_key': None,
'sasl_aws_msk_region': None,

# metrics configs
'metric_reporters': [],
Expand Down
39 changes: 39 additions & 0 deletions kafka/aws_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from hashlib import sha256
from string import digits, ascii_letters

ALPHA_NUMERIC_AND_SOME_MISC = "".join(['_', '-', '~', '.']) + digits + ascii_letters


def is_alpha_numeric_or_some_misc(arg: str) -> bool:
return arg in ALPHA_NUMERIC_AND_SOME_MISC


def bin_to_hex(s: str) -> str:
as_bytes = s.encode()
return as_bytes.hex().upper()


def aws_uri_encode(arg: str, encode_slash: bool = True) -> str:
result = ''
chars = arg

for char in chars:
is_alpha_numeric = is_alpha_numeric_or_some_misc(arg)
is_slash = char == '/'
if is_alpha_numeric:
result += char
elif is_slash:
if encode_slash:
result += '%2F'
else:
result += char
else:
result += '%' + bin_to_hex(char)

return result


def get_digester():
return sha256()


8 changes: 7 additions & 1 deletion kafka/client_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,9 @@ class KafkaClient(object):
sasl mechanism handshake. Default: one of bootstrap servers
sasl_oauth_token_provider (AbstractTokenProvider): OAuthBearer token provider
instance. (See kafka.oauth.abstract). Default: None
sasl_aws_msk_iam_access_key_id (str): aws access key id for msk_iam auth. Default: None
sasl_aws_msk_iam_secret_access_key (str): aws secret access key for msk_iam auth. Default: None
sasl_aws_msk_region (str): aws region for msk_iam auth. Default: None
"""

DEFAULT_CONFIG = {
Expand Down Expand Up @@ -192,7 +195,10 @@ class KafkaClient(object):
'sasl_plain_password': None,
'sasl_kerberos_service_name': 'kafka',
'sasl_kerberos_domain_name': None,
'sasl_oauth_token_provider': None
'sasl_oauth_token_provider': None,
'sasl_aws_msk_iam_access_key_id': None,
'sasl_aws_msk_iam_secret_access_key': None,
'sasl_aws_msk_region': None,
}

def __init__(self, **configs):
Expand Down
Loading