filbeat to kafka
https://www.elastic.co/guide/en/beats/filebeat/current/kafka-output.html
sink-connector to mysql
https://docs.confluent.io/current/connect/kafka-connect-jdbc/sink-connector/index.html
ethereum-etl export data every time write over file. Filebeat always reload file then ELK receive repeat records…
So
.env STARTBLOCK=01205866 ENDBLOCK=01205888 startetl.sh IP_PORT is go-ethereum-node1 outside ip & port. here is docker-machine
source get file that path need to be careful
#!/bin/bash IP_PORT=192.168.99.100:18545 ETH_METHOD=eth_blockNumber BLOCKNUMBER_JSON_HEX=$(curl -X POST -H "Content-Type: application/json" --data '{"jsonrpc":"2.0","method":"'$ETH_METHOD'","params":[],"id":1}' $IP_PORT | jq '.result' | tr -d '"') BLOCKNUMBER_DEX=$(printf "%08d\n" $BLOCKNUMBER_JSON_HEX) printf "\n===== Now Geth BlockerNumber =====\n" printf "HEX: %s\n" $BLOCKNUMBER_JSON_HEX printf "DEC: %s\n" $BLOCKNUMBER_DEX source .
https://github.com/taskrabbit/elasticsearch-dump
Output elasticsearch->json docker run --rm -ti -u root -v /mnt/sda1/var/lib/docker/volumes/xxxxxxx/_data/data:/tmp taskrabbit/elasticsearch-dump --input=http://192.168.99.101:9200/filebeat-6.5.4-etl-logs --output=/tmp/etl-logs.json --type=mapping docker run --rm -ti -u root -v /mnt/sda1/var/lib/docker/volumes/xxxxxxx/_data/data:/tmp taskrabbit/elasticsearch-dump --input=http://192.168.99.101:9200/filebeat-6.5.4-etl-transactions --output=/tmp/etl-transactions.json --type=mapping docker run --rm -ti -u root -v /mnt/sda1/var/lib/docker/volumes/xxxxxxx/_data/data:/tmp taskrabbit/elasticsearch-dump --input=http://192.168.99.101:9200/filebeat-6.5.4-etl-receipts --output=/tmp/etl-receipts.json --type=mapping docker run --rm -ti -u root -v /mnt/sda1/var/lib/docker/volumes/xxxxxxx/_data/data:/tmp taskrabbit/elasticsearch-dump --input=http://192.168.99.101:9200/filebeat-6.5.4-etl-contracts --output=/tmp/etl-contracts.json --type=mapping docker run --rm -ti -u root -v /mnt/sda1/var/lib/docker/volumes/xxxxxxx/_data/data:/tmp taskrabbit/elasticsearch-dump --input=http://192.168.99.101:9200/filebeat-6.5.4-etl-blocks --output=/tmp/etl-blocks.json --type=mapping docker run --rm -ti -u root -v /mnt/sda1/var/lib/docker/volumes/xxxxxxx/_data/data:/tmp taskrabbit/elasticsearch-dump --input=http://192.
https://blog.csdn.net/qq_38486203/article/details/80817037
Search minedNumber
GET /filebeat-6.*-geth*/_search?q=geth_ip:xxx.xxx.xxx.xxx { "_source": ["name", "minedNumber", "gethdate"], "sort": [ { "gethdate": { "order": "desc" } } ], "from": 1, "size": 1 } Get minedNumber
curl -XGET "http://xxx.xxx.xxx.xxx:9200/filebeat-6.*-geth*/_search?q=geth_ip:xxx.xxx.xxx.xxx" -H 'Content-Type: application/json' -d' { "_source": ["name", "minedNumber", "gethdate"], "sort": [ { "gethdate": { "order": "desc" } } ], "from": 1, "size": 1 }' | jq ".hits.hits[]._source.minedNumber"
docker-compose.yml
version: '3.3' services: ethereum_etl: build: context: . env_file: .env volumes: - /var/log/hardblue/etl/:/ethereum-etl/output:rw #- /root/go/src/github.com/ethereum/go-ethereum/build/bin/data:/ethereum-etl/ipc #restart: unless-stopped networks: - etl networks: etl: driver: bridge .env
STARTBLOCK=00000000 DOCKERFILE
FROM python:3.6-alpine MAINTAINER Eric Lim ENV PROJECT_DIR=ethereum-etl RUN apk add unzip RUN wget https://github.com/blockchain-etl/ethereum-etl/archive/develop.zip \ && unzip develop.zip && rm develop.zip RUN mv ethereum-etl-develop /$PROJECT_DIR WORKDIR /$PROJECT_DIR RUN apk add --no-cache gcc musl-dev #for C libraries: RUN pip install --upgrade pip && pip install -e /$PROJECT_DIR/ #CMD ["export_all", "-s", "01990000", "-e", "99999999", "-p", "http://xxx.
https://lab.miguelmota.com/ethereum-input-data-decoder/example/
filter { if [etltype] == "blocks" { #[fields][srctype] csv { columns => [ "number", "hash", "parent_hash", "nonce", "sha3_uncles", "logs_bloom", "transactions_root", "state_root", "receipts_root", "miner", "difficulty", "total_difficulty", "size", "extra_data", "gas_limit", "gas_used", "timestamp", "transaction_count" ] separator => "," remove_field => ["message"] skip_empty_columns => true skip_empty_rows => true } }else if [etltype] == "contracts" { #[fields][srctype] csv { columns => [ "address", "bytecode", "function_sighashes", "is_erc20", "is_erc721" ] separator => "," remove_field => ["message"] skip_empty_columns => true skip_empty_rows => true } }else if [etltype] == "logs" { #[fields][srctype] csv { columns => [ "log_index", "transaction_hash", "transaction_index", "block_hash", "block_number", "address", "data", "topics" ] separator => "," remove_field => ["message"] skip_empty_columns => true skip_empty_rows => true } }else if [etltype] == "receipts" { #[fields][srctype] csv { columns => [ "transaction_hash", "transaction_index", "block_hash", "block_number", "cumulative_gas_used", "gas_used", "contract_address", "root", "status" ] separator => "," remove_field => ["message"] skip_empty_columns => true skip_empty_rows => true } }else if [etltype] == "token_transfers" { #[fields][srctype] csv { columns => [ "" ] separator => "," remove_field => ["message"] skip_empty_columns => true skip_empty_rows => true } }else if [etltype] == "tokens" { #[fields][srctype] csv { columns => [ "" ] separator => "," remove_field => ["message"] skip_empty_columns => true skip_empty_rows => true } }else if [etltype] == "transactions" { #[fields][srctype] csv { columns => [ "hash", "nonce", "block_hash", "block_number", "transaction_index", "from_address", "to_address", "value", "gas", "gas_price", "inputcontext" ] separator => "," remove_field => ["message"] skip_empty_columns => true skip_empty_rows => true } } } output { if [etltype] == "blocks" { elasticsearch { hosts => "xxx.