blob: 4a78b2ed5996fcd08181a61a8ffca431f7b01667 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
|
#
# Copyright 2018 Confluent Inc.
#
# Licensed under the Confluent Community License (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at
#
# http://www.confluent.io/confluent-community-license
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# A simple example that copies from a topic to a Postgres database.
# The first few settings are required for all connectors:
# a name, the connector class to run, and the maximum number of tasks to create:
name=postgresql-sink
connector.class=io.confluent.connect.jdbc.JdbcSinkConnector
tasks.max=1
key.converter=org.apache.kafka.connect.storage.StringConverter
value.converter=io.confluent.connect.protobuf.ProtobufConverter
value.converter.schema.registry.url=http://localhost:8081
# The topics to consume from - required for sink connectors like this one
topics=postgres-topic
# Configuration specific to the JDBC sink connector.
# We want to connect to a Postgres database stored in the file test.db and auto-create tables.
connection.url=jdbc:postgresql://127.0.0.1:5432/opendc
connection.user=matt
connection.password=admin
auto.create=true
# Define when identifiers should be quoted in DDL and DML statements.
# The default is 'always' to maintain backward compatibility with prior versions.
# Set this to 'never' to avoid quoting fully-qualified or simple table and column names.
#quote.sql.identifiers=always
# Here are some values that enable JSON formatted files to be ingested by Postgresql
insert.mode=insert
|