# # Copyright 2018 Confluent Inc. # # Licensed under the Confluent Community License (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at # # http://www.confluent.io/confluent-community-license # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # # A simple example that copies from a topic to a Postgres database. # The first few settings are required for all connectors: # a name, the connector class to run, and the maximum number of tasks to create: name=postgresql-sink connector.class=io.confluent.connect.jdbc.JdbcSinkConnector tasks.max=1 key.converter=org.apache.kafka.connect.storage.StringConverter value.converter=io.confluent.connect.protobuf.ProtobufConverter value.converter.schema.registry.url=http://localhost:8081 # The topics to consume from - required for sink connectors like this one topics=postgres-topic # Configuration specific to the JDBC sink connector. # We want to connect to a Postgres database stored in the file test.db and auto-create tables. connection.url=jdbc:postgresql://127.0.0.1:5432/opendc connection.user=matt connection.password=admin auto.create=true # Define when identifiers should be quoted in DDL and DML statements. # The default is 'always' to maintain backward compatibility with prior versions. # Set this to 'never' to avoid quoting fully-qualified or simple table and column names. #quote.sql.identifiers=always # Here are some values that enable JSON formatted files to be ingested by Postgresql insert.mode=insert