all output columns with logstash

  
filter {  
  if [srctype] == "etl" { #[fields][srctype]  
    csv {  
      columns => [  
        "number", "hash",  "parent_hash",  "nonce",  "sha3_uncles",  "logs_bloom",  "transactions_root",  
        "state_root",  "receipts_root",  "timestamp",  "extra_data",  "transaction_count",  "gas_limit",  
        "size",  "total_difficulty",  "difficulty",  "miner",  "block_hash",  "block_number",  
        "transaction_index",  "from_address",  "to_address",  "value",  "gas",  "gas_price",  "input",  
        "address",  "bytecode",  "function_sighashes",  "is_erc20",  "is_erc721",  "log_index",  
        "transaction_hash",  "data",  "topics",  "cumulative_gas_used",  "gas_used",  "contract_address",  
        "root,status"  
      ]  
      separator => ","  
      remove_field => ["message"]  
      #autodetect_column_names => true   #have problems  
      #autogenerate_column_names => true #have problems  
      skip_empty_columns => true  
      skip_empty_rows => true  
    }  
  }