diff --git a/Teste Kafka-1729006037150.json b/Teste Kafka-1729006037150.json
new file mode 100644
index 0000000..ed37c66
--- /dev/null
+++ b/Teste Kafka-1729006037150.json
@@ -0,0 +1,168 @@
+{
+ "__inputs": [
+ {
+ "name": "DS_KAFKA",
+ "label": "kafka",
+ "description": "",
+ "type": "datasource",
+ "pluginId": "hamedkarbasi93-kafka-datasource",
+ "pluginName": "Kafka"
+ }
+ ],
+ "__elements": {},
+ "__requires": [
+ {
+ "type": "grafana",
+ "id": "grafana",
+ "name": "Grafana",
+ "version": "11.2.2"
+ },
+ {
+ "type": "datasource",
+ "id": "hamedkarbasi93-kafka-datasource",
+ "name": "Kafka",
+ "version": "0.2.0"
+ },
+ {
+ "type": "panel",
+ "id": "timeseries",
+ "name": "Time series",
+ "version": ""
+ }
+ ],
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": {
+ "type": "grafana",
+ "uid": "-- Grafana --"
+ },
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "fiscalYearStartMonth": 0,
+ "graphTooltip": 0,
+ "id": null,
+ "links": [],
+ "panels": [
+ {
+ "datasource": {
+ "type": "hamedkarbasi93-kafka-datasource",
+ "uid": "${DS_KAFKA}"
+ },
+ "description": "Infelizmente o plugin do Kafka não é oficial e só consegue ler JSONs com estrutura básica de chave: valor. \n\nDemais encadeamentos serão ignorados.",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisBorderShow": false,
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "barWidthFactor": 0.6,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 0
+ },
+ "id": 1,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "targets": [
+ {
+ "autoOffsetReset": "latest",
+ "datasource": {
+ "type": "hamedkarbasi93-kafka-datasource",
+ "uid": "${DS_KAFKA}"
+ },
+ "partition": 0,
+ "refId": "A",
+ "timestampMode": "now",
+ "topicName": "prometheus-events",
+ "withStreaming": true
+ }
+ ],
+ "title": "Teste Kafka",
+ "type": "timeseries"
+ }
+ ],
+ "schemaVersion": 39,
+ "tags": [],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-6h",
+ "to": "now"
+ },
+ "timepicker": {},
+ "timezone": "browser",
+ "title": "Teste Kafka",
+ "uid": "fe0yj7ufd2xogf",
+ "version": 1,
+ "weekStart": ""
+}
\ No newline at end of file
diff --git a/apache_kafka_3.8.0.json b/apache_kafka_3.8.0.json
new file mode 100644
index 0000000..6e2ee16
--- /dev/null
+++ b/apache_kafka_3.8.0.json
@@ -0,0 +1,295 @@
+{
+ "Id": "2190a61276b4406249d1ddb4067dcbced4ded0cb283adce03e12a8864cfd5202",
+ "Created": "2024-10-15T12:52:37.961106028Z",
+ "Path": "/__cacert_entrypoint.sh",
+ "Args": [
+ "/etc/kafka/docker/run"
+ ],
+ "State": {
+ "Status": "running",
+ "Running": true,
+ "Paused": false,
+ "Restarting": false,
+ "OOMKilled": false,
+ "Dead": false,
+ "Pid": 11803,
+ "ExitCode": 0,
+ "Error": "",
+ "StartedAt": "2024-10-15T12:52:38.149484735Z",
+ "FinishedAt": "0001-01-01T00:00:00Z"
+ },
+ "Image": "sha256:b610bd8a193ab94359c15d0419831169d6ef8090ee1ef0cc00e37c5fa87a8061",
+ "ResolvConfPath": "/var/lib/docker/containers/2190a61276b4406249d1ddb4067dcbced4ded0cb283adce03e12a8864cfd5202/resolv.conf",
+ "HostnamePath": "/var/lib/docker/containers/2190a61276b4406249d1ddb4067dcbced4ded0cb283adce03e12a8864cfd5202/hostname",
+ "HostsPath": "/var/lib/docker/containers/2190a61276b4406249d1ddb4067dcbced4ded0cb283adce03e12a8864cfd5202/hosts",
+ "LogPath": "/var/lib/docker/containers/2190a61276b4406249d1ddb4067dcbced4ded0cb283adce03e12a8864cfd5202/2190a61276b4406249d1ddb4067dcbced4ded0cb283adce03e12a8864cfd5202-json.log",
+ "Name": "/kafka",
+ "RestartCount": 0,
+ "Driver": "overlay2",
+ "Platform": "linux",
+ "MountLabel": "",
+ "ProcessLabel": "",
+ "AppArmorProfile": "",
+ "ExecIDs": null,
+ "HostConfig": {
+ "Binds": null,
+ "ContainerIDFile": "",
+ "LogConfig": {
+ "Type": "json-file",
+ "Config": {}
+ },
+ "NetworkMode": "kafka-stack_default",
+ "PortBindings": {
+ "9092/tcp": [
+ {
+ "HostIp": "",
+ "HostPort": "9092"
+ }
+ ]
+ },
+ "RestartPolicy": {
+ "Name": "no",
+ "MaximumRetryCount": 0
+ },
+ "AutoRemove": false,
+ "VolumeDriver": "",
+ "VolumesFrom": null,
+ "ConsoleSize": [
+ 0,
+ 0
+ ],
+ "CapAdd": null,
+ "CapDrop": null,
+ "CgroupnsMode": "host",
+ "Dns": null,
+ "DnsOptions": null,
+ "DnsSearch": null,
+ "ExtraHosts": [],
+ "GroupAdd": null,
+ "IpcMode": "private",
+ "Cgroup": "",
+ "Links": null,
+ "OomScoreAdj": 0,
+ "PidMode": "",
+ "Privileged": false,
+ "PublishAllPorts": false,
+ "ReadonlyRootfs": false,
+ "SecurityOpt": null,
+ "UTSMode": "",
+ "UsernsMode": "",
+ "ShmSize": 67108864,
+ "Runtime": "runc",
+ "Isolation": "",
+ "CpuShares": 0,
+ "Memory": 0,
+ "NanoCpus": 0,
+ "CgroupParent": "",
+ "BlkioWeight": 0,
+ "BlkioWeightDevice": null,
+ "BlkioDeviceReadBps": null,
+ "BlkioDeviceWriteBps": null,
+ "BlkioDeviceReadIOps": null,
+ "BlkioDeviceWriteIOps": null,
+ "CpuPeriod": 0,
+ "CpuQuota": 0,
+ "CpuRealtimePeriod": 0,
+ "CpuRealtimeRuntime": 0,
+ "CpusetCpus": "",
+ "CpusetMems": "",
+ "Devices": null,
+ "DeviceCgroupRules": null,
+ "DeviceRequests": null,
+ "MemoryReservation": 0,
+ "MemorySwap": 0,
+ "MemorySwappiness": null,
+ "OomKillDisable": false,
+ "PidsLimit": null,
+ "Ulimits": null,
+ "CpuCount": 0,
+ "CpuPercent": 0,
+ "IOMaximumIOps": 0,
+ "IOMaximumBandwidth": 0,
+ "Mounts": [
+ {
+ "Type": "volume",
+ "Source": "e5fa1d06aa48e8a2344461829b37c903d79ca81d818546f7eb383e16d837d2d0",
+ "Target": "/etc/kafka/secrets"
+ },
+ {
+ "Type": "volume",
+ "Source": "cb77efaf73cedb933262e4ef33ea6074188d28a4b45bf9e88e6a388770c617f5",
+ "Target": "/mnt/shared/config"
+ },
+ {
+ "Type": "volume",
+ "Source": "9c8e656e3c3e136315bda911815eb129dde0aefaa0fac9ff22fe65d5cad41fe8",
+ "Target": "/var/lib/kafka/data"
+ }
+ ],
+ "MaskedPaths": [
+ "/proc/asound",
+ "/proc/acpi",
+ "/proc/kcore",
+ "/proc/keys",
+ "/proc/latency_stats",
+ "/proc/timer_list",
+ "/proc/timer_stats",
+ "/proc/sched_debug",
+ "/proc/scsi",
+ "/sys/firmware",
+ "/sys/devices/virtual/powercap"
+ ],
+ "ReadonlyPaths": [
+ "/proc/bus",
+ "/proc/fs",
+ "/proc/irq",
+ "/proc/sys",
+ "/proc/sysrq-trigger"
+ ]
+ },
+ "GraphDriver": {
+ "Data": {
+ "LowerDir": "/var/lib/docker/overlay2/b7a12cd3b79dab93acfcf59c37557439eae6ba378dbd5d4d1259221a3f9153c5-init/diff:/var/lib/docker/overlay2/b700669ad3084bce7e1515c819a48c8508c5fcbad0d4050c50c1176ce1ce8f65/diff:/var/lib/docker/overlay2/d118afb40093dc5bb41d0064ffffd7cd82ae3ced86c8d2b1fe8c70d3497e0f3a/diff:/var/lib/docker/overlay2/94e73fc25cca57530765fda06f246db4577c4ec9b34699e80f51af1c5eb7c297/diff:/var/lib/docker/overlay2/010546f2892bd3d11338facf8ea8c3e075399ba81b4eefce1d32652b48804e5f/diff:/var/lib/docker/overlay2/1cb1367950875439a93ac42cef99b1ab4f2bce8d11afae060036f4541d171179/diff:/var/lib/docker/overlay2/57cd54b8f15e7f7857157ce8f0da9466740a060f84f7edb723284c171ec61c86/diff:/var/lib/docker/overlay2/3f5d22da6bcca8987a9114f794b89c383758643e0184424017fb8c56eaca0c6c/diff:/var/lib/docker/overlay2/f36ddd1f63bc5905192d21640c2d944162fe7b21d17124d885f656834991b7f6/diff:/var/lib/docker/overlay2/bf2e92a52fda891739d9cdef4bb2e6ebc1c3224707403dd426f34a0cdebd598d/diff:/var/lib/docker/overlay2/18aba7d42afebb5b3147571eda03a2010ed5432c80110580c71e73494e74ca4b/diff",
+ "MergedDir": "/var/lib/docker/overlay2/b7a12cd3b79dab93acfcf59c37557439eae6ba378dbd5d4d1259221a3f9153c5/merged",
+ "UpperDir": "/var/lib/docker/overlay2/b7a12cd3b79dab93acfcf59c37557439eae6ba378dbd5d4d1259221a3f9153c5/diff",
+ "WorkDir": "/var/lib/docker/overlay2/b7a12cd3b79dab93acfcf59c37557439eae6ba378dbd5d4d1259221a3f9153c5/work"
+ },
+ "Name": "overlay2"
+ },
+ "Mounts": [
+ {
+ "Type": "volume",
+ "Name": "e5fa1d06aa48e8a2344461829b37c903d79ca81d818546f7eb383e16d837d2d0",
+ "Source": "/var/lib/docker/volumes/e5fa1d06aa48e8a2344461829b37c903d79ca81d818546f7eb383e16d837d2d0/_data",
+ "Destination": "/etc/kafka/secrets",
+ "Driver": "local",
+ "Mode": "z",
+ "RW": true,
+ "Propagation": ""
+ },
+ {
+ "Type": "volume",
+ "Name": "cb77efaf73cedb933262e4ef33ea6074188d28a4b45bf9e88e6a388770c617f5",
+ "Source": "/var/lib/docker/volumes/cb77efaf73cedb933262e4ef33ea6074188d28a4b45bf9e88e6a388770c617f5/_data",
+ "Destination": "/mnt/shared/config",
+ "Driver": "local",
+ "Mode": "z",
+ "RW": true,
+ "Propagation": ""
+ },
+ {
+ "Type": "volume",
+ "Name": "9c8e656e3c3e136315bda911815eb129dde0aefaa0fac9ff22fe65d5cad41fe8",
+ "Source": "/var/lib/docker/volumes/9c8e656e3c3e136315bda911815eb129dde0aefaa0fac9ff22fe65d5cad41fe8/_data",
+ "Destination": "/var/lib/kafka/data",
+ "Driver": "local",
+ "Mode": "z",
+ "RW": true,
+ "Propagation": ""
+ }
+ ],
+ "Config": {
+ "Hostname": "2190a61276b4",
+ "Domainname": "",
+ "User": "appuser",
+ "AttachStdin": false,
+ "AttachStdout": true,
+ "AttachStderr": true,
+ "ExposedPorts": {
+ "9092/tcp": {}
+ },
+ "Tty": false,
+ "OpenStdin": false,
+ "StdinOnce": false,
+ "Env": [
+ "PATH=/opt/java/openjdk/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
+ "JAVA_HOME=/opt/java/openjdk",
+ "LANG=en_US.UTF-8",
+ "LANGUAGE=en_US:en",
+ "LC_ALL=en_US.UTF-8",
+ "JAVA_VERSION=jdk-21.0.4+7"
+ ],
+ "Cmd": [
+ "/etc/kafka/docker/run"
+ ],
+ "Image": "apache/kafka:3.8.0",
+ "Volumes": {
+ "/etc/kafka/secrets": {},
+ "/mnt/shared/config": {},
+ "/var/lib/kafka/data": {}
+ },
+ "WorkingDir": "",
+ "Entrypoint": [
+ "/__cacert_entrypoint.sh"
+ ],
+ "OnBuild": null,
+ "Labels": {
+ "com.docker.compose.config-hash": "1270fc41a95eaa03a25d644620568b8188a55ac2745e3babceddef585dc74cd4",
+ "com.docker.compose.container-number": "1",
+ "com.docker.compose.depends_on": "",
+ "com.docker.compose.image": "sha256:b610bd8a193ab94359c15d0419831169d6ef8090ee1ef0cc00e37c5fa87a8061",
+ "com.docker.compose.oneoff": "False",
+ "com.docker.compose.project": "kafka-stack",
+ "com.docker.compose.project.config_files": "E:\\forge\\kafka-stack\\docker-compose.yml",
+ "com.docker.compose.project.working_dir": "E:\\forge\\kafka-stack",
+ "com.docker.compose.replace": "62389cb2faa223c2e10ba44f6626ca264258750b9cdb7ed2fc8852d5ad54268d",
+ "com.docker.compose.service": "kafka",
+ "com.docker.compose.version": "2.29.2",
+ "maintainer": "Apache Kafka",
+ "org.label-schema.build-date": "2024-07-23",
+ "org.label-schema.description": "Apache Kafka",
+ "org.label-schema.name": "kafka",
+ "org.label-schema.vcs-url": "https://github.com/apache/kafka"
+ }
+ },
+ "NetworkSettings": {
+ "Bridge": "",
+ "SandboxID": "ed0473f0a6e7f8e7255be7b1820ec9d97efd447e6582d0b41475b3c54133b165",
+ "SandboxKey": "/var/run/docker/netns/ed0473f0a6e7",
+ "Ports": {
+ "9092/tcp": [
+ {
+ "HostIp": "0.0.0.0",
+ "HostPort": "9092"
+ }
+ ]
+ },
+ "HairpinMode": false,
+ "LinkLocalIPv6Address": "",
+ "LinkLocalIPv6PrefixLen": 0,
+ "SecondaryIPAddresses": null,
+ "SecondaryIPv6Addresses": null,
+ "EndpointID": "",
+ "Gateway": "",
+ "GlobalIPv6Address": "",
+ "GlobalIPv6PrefixLen": 0,
+ "IPAddress": "",
+ "IPPrefixLen": 0,
+ "IPv6Gateway": "",
+ "MacAddress": "",
+ "Networks": {
+ "kafka-stack_default": {
+ "IPAMConfig": null,
+ "Links": null,
+ "Aliases": [
+ "kafka",
+ "kafka"
+ ],
+ "MacAddress": "02:42:ac:13:00:02",
+ "DriverOpts": null,
+ "NetworkID": "617446b562e2873e10689daa248f543b05157b5cc3fcce79d503656e4f57f926",
+ "EndpointID": "7492836cf3bf28b30d2b9de1be53abfa325141aae338ff790999e8cdd256c102",
+ "Gateway": "172.19.0.1",
+ "IPAddress": "172.19.0.2",
+ "IPPrefixLen": 16,
+ "IPv6Gateway": "",
+ "GlobalIPv6Address": "",
+ "GlobalIPv6PrefixLen": 0,
+ "DNSNames": [
+ "kafka",
+ "2190a61276b4"
+ ]
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/cheatsheet.sh b/cheatsheet.sh
new file mode 100644
index 0000000..38cb985
--- /dev/null
+++ b/cheatsheet.sh
@@ -0,0 +1,14 @@
+# kafka location on container
+/opt/kafka
+
+# create topic
+bin/kafka-topics.sh --create --topic topic_name --bootstrap-server localhost:9092
+
+# list topics
+bin/kafka-topics.sh --bootstrap-server localhost:9092 --list
+
+# produce message
+bin/kafka-console-producer.sh --topic topic_name --bootstrap-server localhost:9092
+
+# consume message
+bin/kafka-console-consumer.sh --topic prometheus-events --bootstrap-server localhost:9092 --from-beginning
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..3d59b16
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,58 @@
+---
+services:
+ zookeeper:
+ image: confluentinc/cp-zookeeper:7.4.4
+ container_name: zookeeper
+ environment:
+ ZOOKEEPER_CLIENT_PORT: 2181
+ ZOOKEEPER_TICK_TIME: 2000
+ ports:
+ - 22181:2181
+
+ kafka:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka
+ depends_on:
+ - zookeeper
+ ports:
+ - 29092:29092
+ - 9092:9092
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+
+ prometheus:
+ container_name: prometheus
+ image: prom/prometheus:v2.54.1
+ command:
+ - --web.enable-admin-api
+ - --config.file=/etc/prometheus/prometheus.yml
+ - --storage.tsdb.path=/prometheus
+ ports:
+ - 9090:9090
+ volumes:
+ - ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
+
+ prom-kafka:
+ image: telefonica/prometheus-kafka-adapter:1.9.1
+ container_name: prom-kafka
+ ports:
+ - 8080:8080
+ environment:
+ - KAFKA_BROKER_LIST=kafka:9092
+ - KAFKA_TOPIC=prometheus-events
+
+ grafana:
+ container_name: grafana
+ image: grafana/grafana:11.2.2
+ # user: $(id -u)
+ environment:
+ GF_INSTALL_PLUGINS: hamedkarbasi93-kafka-datasource
+ ports:
+ - "3000:3000"
+ volumes:
+ - ./grafana_data:/var/lib/grafana
\ No newline at end of file
diff --git a/grafana.ini b/grafana.ini
new file mode 100644
index 0000000..c45e0bc
--- /dev/null
+++ b/grafana.ini
@@ -0,0 +1,2 @@
+[plugins]
+allow_loading_unsigned_plugins=meln5674-mongodb-community
diff --git a/grafana_data/grafana.db b/grafana_data/grafana.db
new file mode 100644
index 0000000..b2a9e4e
Binary files /dev/null and b/grafana_data/grafana.db differ
diff --git a/grafana_data/plugins/hamedkarbasi93-kafka-datasource/CHANGELOG.md b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/CHANGELOG.md
new file mode 100644
index 0000000..9d63e78
--- /dev/null
+++ b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/CHANGELOG.md
@@ -0,0 +1,5 @@
+# Changelog
+
+## 1.0.0 (Unreleased)
+
+Initial release.
diff --git a/grafana_data/plugins/hamedkarbasi93-kafka-datasource/LICENSE b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/LICENSE
new file mode 100644
index 0000000..8dada3e
--- /dev/null
+++ b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/grafana_data/plugins/hamedkarbasi93-kafka-datasource/MANIFEST.txt b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/MANIFEST.txt
new file mode 100644
index 0000000..e22b959
--- /dev/null
+++ b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/MANIFEST.txt
@@ -0,0 +1,37 @@
+
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA512
+
+{
+ "manifestVersion": "2.0.0",
+ "signatureType": "community",
+ "signedByOrg": "hamedkarbasi93",
+ "signedByOrgName": "hamedkarbasi93",
+ "plugin": "hamedkarbasi93-kafka-datasource",
+ "version": "0.2.0",
+ "time": 1658868046577,
+ "keyId": "7e4d0c6a708866e7",
+ "files": {
+ "CHANGELOG.md": "aaa78be4710ef41d56ddab1edbd180ef9f9adfea2f293109aae925dc33a1b9b3",
+ "module.js.map": "046aa3b839d76eb2cd78294ba7406a92039ce4cfffa78a0dc1364483a4cf5413",
+ "LICENSE": "b40930bbcf80744c86c46a12bc9da056641d722716c378f5659b9e555ef833e1",
+ "module.js": "1d02c3162ac34703cd35797c1ab01d2e0bff46bd8a069f57812d900fad8700f5",
+ "img/graph.gif": "bc2435427bdc894600c085f1182d4361b9f0b2a3b9ebcbd255210ddc68de6651",
+ "img/kafka_logo.svg": "6fa65c611b19a253f716f9a9811c342c92212cb3b2d74e4e358fa300faa7fb18",
+ "module.js.LICENSE.txt": "0d8f66cd4afb566cb5b7e1540c68f43b939d3eba12ace290f18abc4f4cb53ed0",
+ "plugin.json": "b5aa465fe184c14a9493a2d169d896f0d5b36a5a8eefcea3c24f4cfc895ba39a",
+ "gpx_kafka-datasource_linux_amd64": "a98939f318f1b2729b60d03971120b22295c46aa4775c9aebefce0988be75b8a",
+ "README.md": "3dc96ad1b75a53b2a50bebf189cc88383786b34d606d5b153aef7ea931d25104"
+ }
+}
+-----BEGIN PGP SIGNATURE-----
+Version: OpenPGP.js v4.10.10
+Comment: https://openpgpjs.org
+
+wrkEARMKAAYFAmLgUU4AIQkQfk0ManCIZucWIQTzOyW2kQdOhGNlcPN+TQxq
+cIhm53Y/AgkBH7wc6V52nSKXMgpw7fmZvWrPnYgOXEyz6jDt03sWF/wGllsp
+6kwWe8Gi4RFCE6+JuIVg4BOpsRxRb1aet5V8rh4CCQH5iR5WxLEiDoAu/UX+
+QAfCcgNKsSJP/chSqH5rEHjEm0bJhCrYMvb7b3CrlQ460VKvXLfb2gm2cjHW
+gwPX3DxyFw==
+=W2Ph
+-----END PGP SIGNATURE-----
diff --git a/grafana_data/plugins/hamedkarbasi93-kafka-datasource/README.md b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/README.md
new file mode 100644
index 0000000..884f86f
--- /dev/null
+++ b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/README.md
@@ -0,0 +1,131 @@
+# Kafka Datasource for Grafana
+[](LICENSE)
+[](https://github.com/hoptical/grafana-kafka-datasource/actions/workflows/ci.yml)
+[](https://github.com/hoptical/grafana-kafka-datasource/actions/workflows/release.yml)
+
+The Kafka data source plugin allows you to visualize streaming Kafka data from within Grafana.
+
+## Reqirements
+
+- Apache Kafka v0.9+
+- Grafana v8.0+
+
+> Note: This is a backend plugin, so the Grafana server should've access to the Kafka broker.
+
+## Getting started
+
+### Installation via grafana-cli tool
+
+Use the grafana-cli tool to install the plugin from the commandline:
+
+```bash
+grafana-cli plugins install hamedkarbasi93-kafka-datasource
+```
+
+The plugin will be installed into your grafana plugins directory; the default is `/var/lib/grafana/plugins`. [More information on the cli tool](https://grafana.com/docs/grafana/latest/administration/cli/#plugins-commands).
+
+### Installation via zip file
+
+Alternatively, you can manually download the [latest](https://github.com/hoptical/grafana-kafka-datasource/releases/latest) release .zip file and unpack it into your grafana plugins directory; the default is `/var/lib/grafana/plugins`.
+
+## Configure the data source
+
+[Add a data source](https://grafana.com/docs/grafana/latest/datasources/add-a-data-source/) by filling in the following fields:
+
+### Basic fields
+
+| Field | Description |
+| ----- | -------------------------------------------------- |
+| Name | A name for this particular AppDynamics data source |
+| Servers | The URL of the Kafka bootstrap servers separated by comma. E.g. `broker1:9092, broker2:9092` |
+
+### Query the Data source
+
+To query the Kafka topic, you have to config the below items in the query editor.
+
+| Field | Description |
+| ----- | -------------------------------------------------- |
+| Topic | Topic Name |
+| Partition | Partition Number |
+| Auto offset reset | Starting offset to consume that can be from latest or last 100. |
+| Timestamp Mode | Timestamp of the message value to visualize; It can be Now or Message Timestamp
+> **Note**: Make sure to enable the `streaming` toggle.
+
+
+
+## Known limitations
+
+- The plugin currently does not support any authorization and authentication method.
+- The plugin currently does not support TLS.
+- Plugin is based on [confluent-kafka-go](https://github.com/confluentinc/confluent-kafka-go), hence it only supports Linux-based operating systems as discussed in [#6](https://github.com/hoptical/grafana-kafka-datasource/issues/6). However, we're cosidering changing the base package to support all operating systems.
+
+This plugin supports topics publishing very simple JSON formatted messages. Note that only the following structure is supported as of now:
+
+```json
+{
+ "value1": 1.0,
+ "value2": 2,
+ "value3": 3.33,
+ ...
+}
+```
+
+We plan to support more complex JSON data structures, Protobuf and AVRO in the upcoming releases. Contributions are highly encouraged!
+## Compiling the data source by yourself
+
+A data source backend plugin consists of both frontend and backend components.
+
+### Frontend
+
+1. Install dependencies
+
+ ```bash
+ yarn install
+ ```
+
+2. Build plugin in development mode or run in watch mode
+
+ ```bash
+ yarn dev
+ ```
+
+ or
+
+ ```bash
+ yarn watch
+ ```
+
+3. Build plugin in production mode
+
+ ```bash
+ yarn build
+ ```
+
+### Backend
+
+1. Update [Grafana plugin SDK for Go](https://grafana.com/docs/grafana/latest/developers/plugins/backend/grafana-plugin-sdk-for-go/) dependency to the latest minor version:
+
+ ```bash
+ go get -u github.com/grafana/grafana-plugin-sdk-go
+ go mod tidy
+ ```
+
+2. Build backend plugin binaries for Linux:
+
+ ```bash
+ mage build:backend
+ ```
+
+## Contributing
+
+Thank you for considering contributing! If you find an issue or have a better way to do something, feel free to open an issue or a PR.
+
+## License
+
+This repository is open-sourced software licensed under the [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0).
+
+## Learn more
+
+- [Build a data source backend plugin tutorial](https://grafana.com/tutorials/build-a-data-source-backend-plugin)
+
+- [Grafana plugin SDK for Go](https://grafana.com/docs/grafana/latest/developers/plugins/backend/grafana-plugin-sdk-for-go/)
diff --git a/grafana_data/plugins/hamedkarbasi93-kafka-datasource/gpx_kafka-datasource_linux_amd64 b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/gpx_kafka-datasource_linux_amd64
new file mode 100644
index 0000000..47af12e
Binary files /dev/null and b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/gpx_kafka-datasource_linux_amd64 differ
diff --git a/grafana_data/plugins/hamedkarbasi93-kafka-datasource/img/graph.gif b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/img/graph.gif
new file mode 100644
index 0000000..1099150
Binary files /dev/null and b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/img/graph.gif differ
diff --git a/grafana_data/plugins/hamedkarbasi93-kafka-datasource/img/kafka_logo.svg b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/img/kafka_logo.svg
new file mode 100644
index 0000000..a685a30
--- /dev/null
+++ b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/img/kafka_logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/grafana_data/plugins/hamedkarbasi93-kafka-datasource/module.js b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/module.js
new file mode 100644
index 0000000..5414393
--- /dev/null
+++ b/grafana_data/plugins/hamedkarbasi93-kafka-datasource/module.js
@@ -0,0 +1,3 @@
+/*! For license information please see module.js.LICENSE.txt */
+define(["react","@grafana/ui","@grafana/data","@grafana/runtime","lodash"],(function(e,t,n,a,o){return function(e){var t={};function n(a){if(t[a])return t[a].exports;var o=t[a]={i:a,l:!1,exports:{}};return e[a].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,a){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:a})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var a=Object.create(null);if(n.r(a),Object.defineProperty(a,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(a,o,function(t){return e[t]}.bind(null,o));return a},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="/",n(n.s=5)}([function(t,n){t.exports=e},function(e,n){e.exports=t},function(e,t){e.exports=n},function(e,t){e.exports=a},function(e,t){e.exports=o},function(e,t,n){"use strict";n.r(t);var a=n(2),o=function(e,t){return(o=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n])})(e,t)};function r(e,t){function n(){this.constructor=e}o(e,t),e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)}var i=function(){return(i=Object.assign||function(e){for(var t,n=1,a=arguments.length;n= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport function __createBinding(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}\r\n\r\nexport function __exportStar(m, exports) {\r\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) exports[p] = m[p];\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n};\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\r\n result.default = mod;\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","import { DataQuery, DataSourceJsonData } from '@grafana/data';\n\nexport enum AutoOffsetReset {\n EARLIEST = 'earliest',\n LATEST = 'latest',\n}\n\nexport enum TimestampMode {\n Now = 'now',\n Message = 'message',\n}\n\nexport type AutoOffsetResetInterface = {\n [key in AutoOffsetReset]: string;\n};\n\nexport type TimestampModeInterface = {\n [key in TimestampMode]: string;\n};\n\nexport interface KafkaDataSourceOptions extends DataSourceJsonData {\n bootstrapServers: string;\n}\n\nexport interface KafkaSecureJsonData {\n apiKey?: string;\n}\n\nexport interface KafkaQuery extends DataQuery {\n topicName: string;\n partition: number;\n withStreaming: boolean;\n autoOffsetReset: AutoOffsetReset;\n timestampMode: TimestampMode;\n}\n\nexport const defaultQuery: Partial = {\n partition: 0,\n withStreaming: true,\n autoOffsetReset: AutoOffsetReset.LATEST,\n timestampMode: TimestampMode.Now,\n};\n","import { DataSourceInstanceSettings } from '@grafana/data';\nimport { DataSourceWithBackend } from '@grafana/runtime';\nimport { KafkaDataSourceOptions, KafkaQuery } from './types';\n\nexport class DataSource extends DataSourceWithBackend {\n constructor(instanceSettings: DataSourceInstanceSettings) {\n super(instanceSettings);\n }\n}\n","import React, { ChangeEvent, PureComponent } from 'react';\nimport { LegacyForms } from '@grafana/ui';\nimport { DataSourcePluginOptionsEditorProps } from '@grafana/data';\nimport { KafkaDataSourceOptions, KafkaSecureJsonData } from './types';\n\nconst { SecretFormField, FormField } = LegacyForms;\n\ninterface Props extends DataSourcePluginOptionsEditorProps {}\n\ninterface State {}\n\nexport class ConfigEditor extends PureComponent {\n onAPIKeyChange = (event: ChangeEvent) => {\n const { onOptionsChange, options } = this.props;\n onOptionsChange({\n ...options,\n secureJsonData: {\n apiKey: event.target.value,\n },\n });\n };\n\n onResetAPIKey = () => {\n const { onOptionsChange, options } = this.props;\n onOptionsChange({\n ...options,\n secureJsonFields: {\n ...options.secureJsonFields,\n apiKey: false,\n },\n secureJsonData: {\n ...options.secureJsonData,\n apiKey: '',\n },\n });\n };\n\n onBootstrapServersChange = (event: ChangeEvent) => {\n const { onOptionsChange, options } = this.props;\n const jsonData = {\n ...options.jsonData,\n bootstrapServers: event.target.value,\n };\n onOptionsChange({ ...options, jsonData });\n };\n\n render() {\n const { options } = this.props;\n const { jsonData, secureJsonFields } = options;\n const secureJsonData = (options.secureJsonData || {}) as KafkaSecureJsonData;\n\n return (\n