ethereum-etl ethereumetl elk logstash kibana part2
filter {
if [etltype] == "blocks" { #[fields][srctype]
csv {
columns => [
"number", "hash", "parent_hash", "nonce", "sha3_uncles", "logs_bloom", "transactions_root",
"state_root", "receipts_root", "miner", "difficulty", "total_difficulty", "size", "extra_data",
"gas_limit", "gas_used", "timestamp", "transaction_count"
]
separator => ","
remove_field => ["message"]
skip_empty_columns => true
skip_empty_rows => true
}
}else if [etltype] == "contracts" { #[fields][srctype]
csv {
columns => [
"address", "bytecode", "function_sighashes", "is_erc20", "is_erc721"
]
separator => ","
remove_field => ["message"]
skip_empty_columns => true
skip_empty_rows => true
}
}else if [etltype] == "logs" { #[fields][srctype]
csv {
columns => [
"log_index", "transaction_hash", "transaction_index", "block_hash", "block_number",
"address", "data", "topics"
]
separator => ","
remove_field => ["message"]
skip_empty_columns => true
skip_empty_rows => true
}
}else if [etltype] == "receipts" { #[fields][srctype]
csv {
columns => [
"transaction_hash", "transaction_index", "block_hash", "block_number", "cumulative_gas_used",
"gas_used", "contract_address", "root", "status"
]
separator => ","
remove_field => ["message"]
skip_empty_columns => true
skip_empty_rows => true
}
}else if [etltype] == "token_transfers" { #[fields][srctype]
csv {
columns => [
""
]
separator => ","
remove_field => ["message"]
skip_empty_columns => true
skip_empty_rows => true
}
}else if [etltype] == "tokens" { #[fields][srctype]
csv {
columns => [
""
]
separator => ","
remove_field => ["message"]
skip_empty_columns => true
skip_empty_rows => true
}
}else if [etltype] == "transactions" { #[fields][srctype]
csv {
columns => [
"hash", "nonce", "block_hash", "block_number", "transaction_index", "from_address",
"to_address", "value", "gas", "gas_price", "inputcontext"
]
separator => ","
remove_field => ["message"]
skip_empty_columns => true
skip_empty_rows => true
}
}
}
output {
if [etltype] == "blocks" {
elasticsearch {
hosts => "xxx.xxx.xxx.xxx:9200"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-blocks-%{+YYYY.MM.dd}"
document_id => "%{[hash]}"
}
}else if [etltype] == "logs" {
elasticsearch {
hosts => "xxx.xxx.xxx.xxx:9200"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-logs-%{+YYYY.MM.dd}"
}
}else if [etltype] == "transactions" {
elasticsearch {
hosts => "xxx.xxx.xxx.xxx:9200"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-transactions-%{+YYYY.MM.dd}"
document_id => "%{[hash]}"
}
}else if [etltype] == "contracts" {
elasticsearch {
hosts => "xxx.xxx.xxx.xxx:9200"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-contracts-%{+YYYY.MM.dd}"
document_id => "%{[address]}"
}
}else{
elasticsearch {
hosts => "xxx.xxx.xxx.xxx:9200"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
}
}
stdout { codec => rubydebug }
}
transactions csv fileds are
hash,nonce,block_hash,block_number,transaction_index,from_address,to_address,value,gas,gas_price,input
input
must change to other name like
inputcontext
like this:
hash,nonce,block_hash,block_number,transaction_index,from_address,to_address,value,gas,gas_price,inputcontext
Fxxxx No change name can’t import success, even logstash get correct. But this bug sometime use new docker-compose ELK can import success. So just change name more easy.
===============================
Error No Use
if [etltype] in ["blocks"]
Correct
if [etltype] == "blocks"
Only more then two args
if [etltype] in ["blocks", "transactions" ...]
```This is ok