Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ----Create a database---
- $ influx
- > CREATE DATABASE "NAME"
- Use a database
- >use NAME OF DATABASE
- Drop Series
- > DROP SERIES FROM "NAME"
- Drop Measurment
- >DROP MEASUREMENT "NAME"
- See measurment
- >SELECT * FROM "NAME"
- See with timestamp
- >SELECT * FROM "h2o_feet" WHERE time > now() - 7d
- Delete measurement with time val
- >DELETE FROM "NAME" WHERE time < 'TIME'
- Insert manual Value with time
- >INSERT treasures,captain_id=pirate_king value=2 15556874561
- ^--measurement--^--tag-------------^-value---^--timestamp--
- Last values
- >select * from WVdata where time > now() - 1h ORDER BY DESC limit 10
- Field Types
- >show field keys from "FIELD"
- Duplicate Measurements //-1st- create exact copy with name, -2nd- creates new measurement name
- >use firstdatabase
- >SELECT * INTO "database2"..:MEASUREMENT from "firstmeasurement" GROUP BY *
- or
- > SELECT * INTO "database2"..YOUR_NEW_MEASUREMENT from "firstmeasurement" GROUP BY *
- COUNT METRICS
- >select count(*) from "measurement"
- Check Size
- $ sudo du -sh /var/lib/influxdb/data/DATABASE
- Find diffrence min/max value by day and timezone
- SELECT spread(value) FROM testing WHERE ID='L1E' AND time > now() - 1d GROUP BY time(1d) tz('Europe/Athens)
- ---GET-----
- $influx -database 'csv_WVdata' -host '160.40.49.235' -username 'influxadmin' -password 'admin' -execute 'select * from WVdata limit 1'
- --------
- ---Show Databases--
- $ influx -execute 'SHOW DATABASES'
- ---Execute queries that do require a database specification, and change the timestamp precision:----
- $ influx -execute 'SELECT * FROM "SERIES" LIMIT 3' -database="DATABASE" -precision=rfc3339
- ---Specify the format of the server responses with -format---
- The default format is column:
- $ influx -format=column ------------- Change the format to csv:$ influx -format=csv --------- Change the format to json:$ influx -format=json -pretty
- ----Export data to Exel file---
- $ sudo /usr/bin/influx -precision rfc3339 -database 'openhab_db' -host 'localhost' -username 'openhab' -password 'admin' -execute 'select * from InfluxLogging_Commands' -format 'csv' > /home/InfluxToExel/export.csv
- ^-RFC3339 format for timestamp ^--name of DB ^--your url to DB-- ^----DB Table------ ^---path to file--^--name of the file
- ----------------------with limit-----------------------------------------------------------------------
- $influx -database 'openhab_db' -host 'localhost' -username 'openhab' -password 'admin' -execute 'select * from L1P_Avrg limit 3' -format 'csv' > /home/InfluxToExel/export.csv
- influx -database 'csv_WattVolt' -host 'localhost' -username 'influxadmin' -password 'admin' -execute 'select * from WVdata' -format 'column' > /etc/telegraf/export.txt <-----------testing
- Also --You can skip sudo /usr/bin/ part and start with influx
- ----------------------------post it with curl----------------------------------[
- $curl -i -XPOST "http://influxdb_hostname:port/write?db=YOURDATABASENAME&precision=s" -u username:password --data-binary @YOURFILE.txt
- ||||
- -----example----------------------------------------------------------------------------------------------
- $ curl -i -XPOST "http://160.40.48.86:8086/write?db=import&precision=s" --data-binary @import.txt
- ------Export data to json with Get including, chunked(limit 1 result per line,limit timestamp last 1 day)---------------
- $curl -G 'http://localhost:8086/query' --data-urlencode "db=openhab_db" --data-urlencode "chunked=true" --data-urlencode "chunk_size=1" --data-urlencode "q=SELECT * FROM InfluxLogging_Commands where time > now() - 1d" > /home/InfluxToExel/exporttest2.json
- ------Export multiple data to json with Get including, chunked(limit 1 result per line,limit timestamp last 1 day)---------------
- $sudo curl -G 'http://localhost:8086/query' --data-urlencode "db=openhab_db" --data-urlencode "chunked=true" --data-urlencode "chunk_size=1" --data-urlencode "q=SELECT * FROM InfluxLogging_Commands where time > now() - 1d; SELECT * FROM L1P_Avrg where time > now() - 1d" > /home/InfluxToExel/exporttest2.json
- ------Chunking(split results per line)------------
- curl -G 'http://localhost:8086/query' --data-urlencode "db=deluge" --data-urlencode "chunked=true" --data-urlencode "chunk_size=20000" --data-urlencode "q=SELECT * FROM liters"
- ^---chunk set---- ^---how many per line----
- --------EXAMPLE QUERY----------------------------------------------------
- curl -G 'http://160.40.49.235:8086/query' -u influxadmin:admin --data-urlencode "db=BESSRES" --data-urlencode "chunked=true" --data-urlencode "chunk_size=1" --data-urlencode 'q=SELECT "mx:Energy_preds" FROM "House_01" WHERE time > 10d LIMIT 10'
- influx -precision=rfc3339 -database 'BESSRES' -host '160.40.49.235' -username 'influxadmin' -password 'admin' -execute 'select "mx:Energy" from House_01 WHERE time > now() - 10d ORDER BY DESC LIMIT 10'
- --------------------MULTI QUERY----------------------------------
- curl -G 'http://localhost:8086/query?pretty=true' -u influxadmin:admin --data-urlencode "db=openhab_db" --data-urlencode "q=SELECT * FROM L1A;SELECT * FROM L1A2;SELECT * FROM L1A3;SELECT * FROM L1P_Avrg;SELECT * FROM L1P2_Avrg;SELECT * FROM L1P3_Avrg;SELECT * FROM L1V;SELECT * FROM L1V2;SELECT * FROM L1V3;SELECT * FROM L1VA;SELECT * FROM L1VA2;SELECT * FROM L1VA3;SELECT * FROM L1E;SELECT * FROM L1E2;SELECT * FROM L1E3;SELECT * FROM L1E_Avrg;SELECT * FROM L1E2_Avrg;SELECT * FROM L1E3_Avrg" -H "Accept: application/csv" > /home/isaioglou/export.csv
- curl -G 'http://localhost:8086/query?pretty=true' -u influxadmin:admin --data-urlencode "db=3ph_gavazzi" --data-urlencode "q=SELECT value FROM L1A,L1C,L1E,L1E_Avrg,L1F,L1P,L1P_Avrg,L1V,Weather_Humidity,Weather_Temperature,System_Temperature_CPU,System_Temperature_GPU WHERE time > now() - 20d LIMIT 1" -H "Accept: application/csv" > /home/isaioglou/export.csv
- --------------------1ph_Gavazzi--------------------------------------------------------------------------------------------
- curl -G 'http://localhost:8086/query?pretty=true' -u influxadmin:admin --data-urlencode "db=3ph_gavazzi" --data-urlencode "q=SELECT value FROM L1A,L1C,L1E,L1E_Avrg,L1F,L1P,L1P_Avrg,L1V,Weather_Humidity,Weather_Temperature,System_Temperature_CPU,System_Temperature_GPU WHERE time > now() - 2d" -H "Accept: application/csv" > /home/isaioglou/"1phGavazzi_$(date +%F)".csv
- --------------------3ph_Gavazzi--------------------------------------------------------------------------------------------
- curl -G 'http://localhost:8086/query' --data-urlencode "db=openhab_db" --data-urlencode "q=SELECT value FROM L1A,L1A2,L1A3,L1P,L1P2,L1P3,L1P_Avrg,L1P2_Avrg,L1P3_Avrg,L1E,L1E2,L1E3,L1E_Avrg,L1E2_Avrg,L1E3_Avrg,Weather_Humidity,Weather_Temperature,System_Temperature_CPU,System_Temperature_GPU WHERE time > now() - 2d" -H "Accept: application/csv" > /home/isaioglou/"1phGavazzi_$(date +%F)".csv
- ---------------CURL MULTI EXPORT TO CSV FROM DATABASE---------------------------------------------------------------------------------------
- curl -G 'http://localhost:8086/query?pretty=true' -u influxadmin:admin --data-urlencode "db=trololo" --data-urlencode "q=SELECT * FROM napo;SELECT * FROM napo_v2" -H "Accept: application/csv" > /home/isaioglou/export.csv
- { DEL -H "Accept: application/csv" if you want json }
- You can use jq to convert the JSON output to CSV as follows, which also allows you to get RFC3339 formatted timestamps:
- jq -r "(.results[0].series[0].columns), (.results[0].series[0].values[]) | @csv"
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement