Running multiple configuration files as logstash services using NSSM

478 Views Asked by At

I am using logstash-7.4 and running it as service using NSSM. I have one config file which ingest the data to index(index_one) of ElasticSearch and another config file which ingest the data in another index(indiex_two) of ElasticSearch. (Note:- both config file are schedule on different intervals and timing). Can I set both the files as service with two different names like service_one for conf file ingesting data to index_one and service_two for conf file ingesting data to indiex_two. Is it good to do that or there is better way to do the same.

Below are the both config files : config file1: #file:db.conf

input { 
    jdbc { 
        jdbc_driver_library => ""
        jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver" 
        jdbc_connection_string => "jdbc:oracle:thin:@nl0123vca0020.xyz-nl0123.abc.com:1521/pintu1"
        jdbc_user =>"usersys2"
        jdbc_password => "password"
        statement => "select name, id, address , col_1, col_2, col_3 from  demo_table_1"
        schedule => "0 */2 * * *"
        last_run_metadata_path => "E:/logstash-7.4.2/config/intouch_db_index_increment.txt"
        use_column_value => true
        tracking_column => "version"
    } 
}
filter {
    mutate {
        convert => {
            "contentid" => "string"
        }
    }
}
output{
    elasticsearch {
        hosts => ["http://***.***.119.199:9200"]
        index => "index_two"
        document_id =>"%{contentid}"
        user => "elastic" 
        password => "passwordes" 
    }
}

config file-2:-

input {
    jdbc {
        jdbc_driver_library => ""
        jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
        jdbc_connection_string => "jdbc:oracle:thin:@nl0123vca0020.xyz-nl0123.abc.com:1521/pintu1"
        jdbc_user =>"usersys2"
        jdbc_password => "password"
        statement  => "select autosuggestid,userid,ldapalias,email, decode(trim(firstname || ' ' || lastname),'', ldapalias, (firstname || ' ' || lastname)) FULLNAME,status as USERSTATUS from demo_autosuggest where rownum < 999999999999" 
        jdbc_fetch_size => "100000" 
        schedule => "0 12 * * *"
    }
}
output{
    elasticsearch {
        hosts => ["http://***.***.119.199:9200"]
        index => "index_two"
        document_id =>"%{autosuggestid}"
        user => "elastic"
        password => "passwordes"
    }
}
1

There are 1 best solutions below

0
On
This is how I have configured the conf file of logstash to ingest data from two different sql statements into multiple Elasticsearch indices using logstash.


    input { 
        jdbc { 
            type=>"autosuggest"
            jdbc_driver_library => ""
            jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver" 
            jdbc_connection_string => "jdbc:oracle:thin:@nl0123vca0020.xyz-nl0123.abc.com:1521/pintu1"
            jdbc_user =>"usersys2"
            jdbc_password => "password"
            statement => "select name, id, address , col_1, col_2, col_3 from  demo_table_1"
            schedule => "0 */2 * * *"
            last_run_metadata_path => "E:/logstash-7.4.2/config/intouch_db_index_increment.txt"
            use_column_value => true
            tracking_column => "version"
        } 
jdbc {
        type=>"dbindex"
        jdbc_driver_library => ""
        jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
        jdbc_connection_string => "jdbc:oracle:thin:@nl0123vca0020.xyz-nl0123.abc.com:1521/pintu1"
        jdbc_user =>"usersys2"
        jdbc_password => "password"
        statement  => "select autosuggestid,userid,ldapalias,email, decode(trim(firstname || ' ' || lastname),'', ldapalias, (firstname || ' ' || lastname)) FULLNAME,status as USERSTATUS from demo_autosuggest where rownum < 999999999999" 
        jdbc_fetch_size => "100000" 
        schedule => "0 12 * * *"
    }
    }

    filter {
       if [type] == "dbindex" 
        mutate {
            convert => {
                "contentid" => "string"
            }
        }
    }
    output{
        if [type] == "autosuggest"
    {  
        elasticsearch {
            hosts => ["http://***.***.119.199:9200"]
            index => "index_two"
            document_id =>"%{contentid}"
            user => "elastic" 
            password => "passwordes" 
        }
    }
   if [type] == "dbindex" 
    {
   elasticsearch {
        hosts => ["http://***.***.119.199:9200"]
        index => "index_two"
        document_id =>"%{autosuggestid}"
        user => "elastic"
        password => "passwordes"
    }
    } 
    }