import .log file in elasticsearch and Kibana
up vote
0
down vote
favorite
I have this .log file with JSON and it looks like this
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":84.59,"view":33.7,"db":47.45,"ip":"127.0.0.1","route":"public#index","request_id":"4d7016832294bafa8f593453eed2adb1","source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:06Z","@version":"1"}
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":15.44,"view":13.85,"db":0.91,"ip":null,"route":null,"request_id":null,"source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:28Z","@version":"1"}
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":13.86,"view":12.47,"db":0.8,"ip":null,"route":null,"request_id":null,"source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:40Z","@version":"1"}
and i try to import this in elasticsearch and Kibana.
I tried with this code in command line
curl -XPOST "http://localhost:9200/test/test" -H "Content-Type: application/json" -d @logfile.log
and i get this error
{"error":{"root_cause":[{"type":"mapper_parsing_exception","reason":"failed to parse"}],"type":"mapper_parsing_exception","reason":"failed to parse","caused_by":{"type":"illegal_argument_exception","reason":"Malformed content, found extra data after parsing: START_OBJECT"}},"status":400}
and when i look in Kibana I see this
what i am doing wrong?
json elasticsearch curl kibana logfile
add a comment |
up vote
0
down vote
favorite
I have this .log file with JSON and it looks like this
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":84.59,"view":33.7,"db":47.45,"ip":"127.0.0.1","route":"public#index","request_id":"4d7016832294bafa8f593453eed2adb1","source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:06Z","@version":"1"}
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":15.44,"view":13.85,"db":0.91,"ip":null,"route":null,"request_id":null,"source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:28Z","@version":"1"}
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":13.86,"view":12.47,"db":0.8,"ip":null,"route":null,"request_id":null,"source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:40Z","@version":"1"}
and i try to import this in elasticsearch and Kibana.
I tried with this code in command line
curl -XPOST "http://localhost:9200/test/test" -H "Content-Type: application/json" -d @logfile.log
and i get this error
{"error":{"root_cause":[{"type":"mapper_parsing_exception","reason":"failed to parse"}],"type":"mapper_parsing_exception","reason":"failed to parse","caused_by":{"type":"illegal_argument_exception","reason":"Malformed content, found extra data after parsing: START_OBJECT"}},"status":400}
and when i look in Kibana I see this
what i am doing wrong?
json elasticsearch curl kibana logfile
so, after a lot of research i figure out that i have multiple documents in the logfile, and not only one document. I should send only one document or use the bulk API. how can i change my file to only one document, or how do i change the file content to conform with bulk format ?
– mat's
Nov 11 at 5:28
add a comment |
up vote
0
down vote
favorite
up vote
0
down vote
favorite
I have this .log file with JSON and it looks like this
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":84.59,"view":33.7,"db":47.45,"ip":"127.0.0.1","route":"public#index","request_id":"4d7016832294bafa8f593453eed2adb1","source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:06Z","@version":"1"}
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":15.44,"view":13.85,"db":0.91,"ip":null,"route":null,"request_id":null,"source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:28Z","@version":"1"}
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":13.86,"view":12.47,"db":0.8,"ip":null,"route":null,"request_id":null,"source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:40Z","@version":"1"}
and i try to import this in elasticsearch and Kibana.
I tried with this code in command line
curl -XPOST "http://localhost:9200/test/test" -H "Content-Type: application/json" -d @logfile.log
and i get this error
{"error":{"root_cause":[{"type":"mapper_parsing_exception","reason":"failed to parse"}],"type":"mapper_parsing_exception","reason":"failed to parse","caused_by":{"type":"illegal_argument_exception","reason":"Malformed content, found extra data after parsing: START_OBJECT"}},"status":400}
and when i look in Kibana I see this
what i am doing wrong?
json elasticsearch curl kibana logfile
I have this .log file with JSON and it looks like this
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":84.59,"view":33.7,"db":47.45,"ip":"127.0.0.1","route":"public#index","request_id":"4d7016832294bafa8f593453eed2adb1","source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:06Z","@version":"1"}
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":15.44,"view":13.85,"db":0.91,"ip":null,"route":null,"request_id":null,"source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:28Z","@version":"1"}
{"method":"GET","path":"/public/index","format":"html","controller":"public","action":"index","status":200,"duration":13.86,"view":12.47,"db":0.8,"ip":null,"route":null,"request_id":null,"source":"unknown","tags":["request"],"@timestamp":"2018-11-09T22:54:40Z","@version":"1"}
and i try to import this in elasticsearch and Kibana.
I tried with this code in command line
curl -XPOST "http://localhost:9200/test/test" -H "Content-Type: application/json" -d @logfile.log
and i get this error
{"error":{"root_cause":[{"type":"mapper_parsing_exception","reason":"failed to parse"}],"type":"mapper_parsing_exception","reason":"failed to parse","caused_by":{"type":"illegal_argument_exception","reason":"Malformed content, found extra data after parsing: START_OBJECT"}},"status":400}
and when i look in Kibana I see this
what i am doing wrong?
json elasticsearch curl kibana logfile
json elasticsearch curl kibana logfile
edited Nov 11 at 5:09
asked Nov 11 at 3:56
mat's
156
156
so, after a lot of research i figure out that i have multiple documents in the logfile, and not only one document. I should send only one document or use the bulk API. how can i change my file to only one document, or how do i change the file content to conform with bulk format ?
– mat's
Nov 11 at 5:28
add a comment |
so, after a lot of research i figure out that i have multiple documents in the logfile, and not only one document. I should send only one document or use the bulk API. how can i change my file to only one document, or how do i change the file content to conform with bulk format ?
– mat's
Nov 11 at 5:28
so, after a lot of research i figure out that i have multiple documents in the logfile, and not only one document. I should send only one document or use the bulk API. how can i change my file to only one document, or how do i change the file content to conform with bulk format ?
– mat's
Nov 11 at 5:28
so, after a lot of research i figure out that i have multiple documents in the logfile, and not only one document. I should send only one document or use the bulk API. how can i change my file to only one document, or how do i change the file content to conform with bulk format ?
– mat's
Nov 11 at 5:28
add a comment |
1 Answer
1
active
oldest
votes
up vote
1
down vote
accepted
The bulk format is
action_and_meta_datan
optional_sourcen
action_and_meta_datan
optional_sourcen
....
You have all the optional_source
lines in your file already.
Just add a line containing
{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }
before each of your lines.
Then POST against the bulk api in ES https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
thanks @ibexit, I really have a hard time here. How can i add this line to the file before each line? I use the gem logstasher
– mat's
Nov 11 at 19:09
hey @mat's. Let´s see if I can help you. What OS are you working on? Are you familiar with Linux shell? If so, try this:awk -v FS="" '{print "{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }n"$0}' bulk_file_name > bulk_preprocessed
– ibexit
Nov 12 at 14:03
thanks. i found a way to add a line in the logstasher.rb (in initializer) and it works perfect.
– mat's
Nov 12 at 17:18
add a comment |
1 Answer
1
active
oldest
votes
1 Answer
1
active
oldest
votes
active
oldest
votes
active
oldest
votes
up vote
1
down vote
accepted
The bulk format is
action_and_meta_datan
optional_sourcen
action_and_meta_datan
optional_sourcen
....
You have all the optional_source
lines in your file already.
Just add a line containing
{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }
before each of your lines.
Then POST against the bulk api in ES https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
thanks @ibexit, I really have a hard time here. How can i add this line to the file before each line? I use the gem logstasher
– mat's
Nov 11 at 19:09
hey @mat's. Let´s see if I can help you. What OS are you working on? Are you familiar with Linux shell? If so, try this:awk -v FS="" '{print "{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }n"$0}' bulk_file_name > bulk_preprocessed
– ibexit
Nov 12 at 14:03
thanks. i found a way to add a line in the logstasher.rb (in initializer) and it works perfect.
– mat's
Nov 12 at 17:18
add a comment |
up vote
1
down vote
accepted
The bulk format is
action_and_meta_datan
optional_sourcen
action_and_meta_datan
optional_sourcen
....
You have all the optional_source
lines in your file already.
Just add a line containing
{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }
before each of your lines.
Then POST against the bulk api in ES https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
thanks @ibexit, I really have a hard time here. How can i add this line to the file before each line? I use the gem logstasher
– mat's
Nov 11 at 19:09
hey @mat's. Let´s see if I can help you. What OS are you working on? Are you familiar with Linux shell? If so, try this:awk -v FS="" '{print "{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }n"$0}' bulk_file_name > bulk_preprocessed
– ibexit
Nov 12 at 14:03
thanks. i found a way to add a line in the logstasher.rb (in initializer) and it works perfect.
– mat's
Nov 12 at 17:18
add a comment |
up vote
1
down vote
accepted
up vote
1
down vote
accepted
The bulk format is
action_and_meta_datan
optional_sourcen
action_and_meta_datan
optional_sourcen
....
You have all the optional_source
lines in your file already.
Just add a line containing
{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }
before each of your lines.
Then POST against the bulk api in ES https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
The bulk format is
action_and_meta_datan
optional_sourcen
action_and_meta_datan
optional_sourcen
....
You have all the optional_source
lines in your file already.
Just add a line containing
{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }
before each of your lines.
Then POST against the bulk api in ES https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
answered Nov 11 at 9:01
ibexit
637313
637313
thanks @ibexit, I really have a hard time here. How can i add this line to the file before each line? I use the gem logstasher
– mat's
Nov 11 at 19:09
hey @mat's. Let´s see if I can help you. What OS are you working on? Are you familiar with Linux shell? If so, try this:awk -v FS="" '{print "{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }n"$0}' bulk_file_name > bulk_preprocessed
– ibexit
Nov 12 at 14:03
thanks. i found a way to add a line in the logstasher.rb (in initializer) and it works perfect.
– mat's
Nov 12 at 17:18
add a comment |
thanks @ibexit, I really have a hard time here. How can i add this line to the file before each line? I use the gem logstasher
– mat's
Nov 11 at 19:09
hey @mat's. Let´s see if I can help you. What OS are you working on? Are you familiar with Linux shell? If so, try this:awk -v FS="" '{print "{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }n"$0}' bulk_file_name > bulk_preprocessed
– ibexit
Nov 12 at 14:03
thanks. i found a way to add a line in the logstasher.rb (in initializer) and it works perfect.
– mat's
Nov 12 at 17:18
thanks @ibexit, I really have a hard time here. How can i add this line to the file before each line? I use the gem logstasher
– mat's
Nov 11 at 19:09
thanks @ibexit, I really have a hard time here. How can i add this line to the file before each line? I use the gem logstasher
– mat's
Nov 11 at 19:09
hey @mat's. Let´s see if I can help you. What OS are you working on? Are you familiar with Linux shell? If so, try this:
awk -v FS="" '{print "{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }n"$0}' bulk_file_name > bulk_preprocessed
– ibexit
Nov 12 at 14:03
hey @mat's. Let´s see if I can help you. What OS are you working on? Are you familiar with Linux shell? If so, try this:
awk -v FS="" '{print "{ "index" : { "_index" : "YOUR-INDEX-NAME", "_type" : "_doc"} }n"$0}' bulk_file_name > bulk_preprocessed
– ibexit
Nov 12 at 14:03
thanks. i found a way to add a line in the logstasher.rb (in initializer) and it works perfect.
– mat's
Nov 12 at 17:18
thanks. i found a way to add a line in the logstasher.rb (in initializer) and it works perfect.
– mat's
Nov 12 at 17:18
add a comment |
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53245714%2fimport-log-file-in-elasticsearch-and-kibana%23new-answer', 'question_page');
}
);
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
so, after a lot of research i figure out that i have multiple documents in the logfile, and not only one document. I should send only one document or use the bulk API. how can i change my file to only one document, or how do i change the file content to conform with bulk format ?
– mat's
Nov 11 at 5:28