logstash-plugins/logstash-input-jdbc

LogStash::ConfigurationError", :message=>"Expected one of [ \\t\\r\\n], \"#\", \"input\", \"filter\", \"output\" at line 1, column 1 (byte 1)

ZoolYe opened this issue · 0 comments

logstash 7.10

  • logstash.conf
input {
	stdin {}
	jdbc {
		type => "jdbc"
		 # 数据库连接地址
		jdbc_connection_string => "jdbc:mysql://xxxx:3306/xxx?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&useSSL=false"
		 # 数据库连接账号密码;
		jdbc_user => "xxxx"
		jdbc_password => "xxxx"
		 # MySQL依赖包路径;
		jdbc_driver_library => "/usr/share/logstash/logstash-core/lib/jars/mysql-connector-java-5.1.41.jar"
		 # the name of the driver class for mysql
		jdbc_driver_class => "com.mysql.jdbc.Driver"
		 # 数据库重连尝试次数
		connection_retry_attempts => "3"
		 # 判断数据库连接是否可用,默认false不开启
		jdbc_validate_connection => "true"
		 # 数据库连接可用校验超时时间,默认3600S
		jdbc_validation_timeout => "3600"
		 # 开启分页查询(默认false不开启);
		jdbc_paging_enabled => "true"
		 # 单次分页查询条数(默认100000,若字段较多且更新频率较高,建议调低此值);
		jdbc_page_size => "500"
		 # statement为查询数据sql,如果sql较复杂,建议配通过statement_filepath配置sql文件的存放路径;
		 # sql_last_value为内置的变量,存放上次查询结果中最后一条数据tracking_column的值,此处即为ModifyTime;
		 # statement_filepath => "mysql/jdbc.sql"
		statement => "SELECT
            i.id,
            i.item_name AS itemName,
            i.sell_counts AS sellCounts,
            ii.url AS imgUrl,
            tempSpec.price AS price,
            i.updated_time AS updatedTime 
        FROM
            items i
            LEFT JOIN items_img ii ON i.id = ii.item_id
            LEFT JOIN ( SELECT item_id, MIN( price_discount ) AS price FROM items_spec GROUP BY item_id ) tempSpec ON i.id = tempSpec.item_id 
        WHERE
            ii.is_main = 1
        AND i.updated_time >= :sql_last_value"
		 # 是否将字段名转换为小写,默认true(如果有数据序列化、反序列化需求,建议改为false);
		lowercase_column_names => false
		 # Value can be any of: fatal,error,warn,info,debug,默认info;
		sql_log_level => warn
		 # 是否记录上次执行结果,true表示会将上次执行结果的tracking_column字段的值保存到last_run_metadata_path指定的文件中;
		record_last_run => true
		 # 需要记录查询结果某字段的值时,此字段为true,否则默认tracking_column为timestamp的值;
		use_column_value => true
		 # 需要记录的字段,用于增量同步,需是数据库字段
		tracking_column => "updatedTime"
		 # Value can be any of: numeric,timestamp,Default value is "numeric"
		tracking_column_type => timestamp
		 # record_last_run上次数据存放位置;
		last_run_metadata_path => "/usr/share/logstash/pipeline/last_id.txt"
		 # 是否清除last_run_metadata_path的记录,需要增量同步时此字段必须为false;
		clean_run => false
		 #
		 # 同步频率(分 时 天 月 年),默认每分钟同步一次;
		schedule => "* * * * *"
	}
}
filter {
	json {
		source => "message"
		remove_field => ["message"]
	}
	# convert 字段类型转换,将字段TotalMoney数据类型改为float;
	mutate {
		convert => {
			"TotalMoney" => "float"
		}
	}
}
output {
	elasticsearch {
		 # host => "192.168.1.1"
		 # port => "9200"
		 # 配置ES集群地址
		hosts => ["xxxxxx:9200"]
		 # 索引名字,必须小写
		index => "fooide-items"
		 # 数据唯一索引(建议使用数据库KeyID)
		document_id => "%{id}"
        template =>"/usr/share/logstash/pipeline/logstash-ik.json"
        template_name =>"myik"
        template_overwrite =>"true"
        manage_template => false
	}
	stdout {
		codec => json_lines
	}
}