ci #1

Merged
n merged 3 commits from ci into main 2024-02-15 17:44:21 +01:00
3 changed files with 56 additions and 36 deletions

23
.gitea/workflows/lint.yml Normal file
View file

@ -0,0 +1,23 @@
name: lint
run-name: lint is launched by ${{ github.actor }}
on: [push]
jobs:
check:
runs-on: ubuntu-latest
name: lint
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by Gitea!"
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
- name: Check out repository code
uses: actions/checkout@v4
- name: Create Python virtualenv
run: |
mkdir -p .cache
python -m venv .
source bin/activate
XDG_CACHE_HOME=.cache pip install pylint-venv pylint
- name: Lint
run: XDG_CACHE_HOME=.cache ./bin/pylint -d E0401 *.py
- run: echo "🍏 This job's status is ${{ job.status }}."

View file

@ -1,11 +1,11 @@
# pacer2influxdb # pacer2influxdb
export Pacer data to InfluxDB export [Pacer](https://play.google.com/store/apps/details?id=cc.pacer.androidapp) data to [InfluxDB](https://www.influxdata.com/products/influxdb-overview/)
* require Python 3, influxdb-python and sqlite-python * requires Python 3, `influxdb-python` and `sqlite-python`
* original code from https://github.com/korjavin/sqlite2influxdb and https://github.com/influxdata/influxdb-python * original code from https://github.com/korjavin/sqlite2influxdb and https://github.com/influxdata/influxdb-python
* sqlite database is /data/data/cc.pacer.androidapp/databases/MDData.db on an Android device, may need root privileges to get it * sqlite database is `/data/data/cc.pacer.androidapp/databases/MDData.db` on an Android device, may need root privileges to get it
* collects steps, distance, active time and calories * collects steps, distance, active time and calories
* for now InfluxDB database needs to be created first * for now InfluxDB database needs to be created first
* incremental feed: just insert missing points * incremental feed: just insert missing points
* grafana dashboard export in json format is included, adjust datasource name * [Grafana](https://grafana.com/) dashboard export in json format is included, adjust datasource name

View file

@ -1,60 +1,59 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from influxdb import InfluxDBClient
"""Export Pacer data to InfluxDB"""
import datetime import datetime
import time import time
import sqlite3 as lite import sqlite3 as lite
import argparse import argparse
from influxdb import InfluxDBClient
def main(host='localhost', port=8086, user='root', password='root', dbname='demo', dbfile='demo.db'): def main(args):
"""Instantiate the connection to the InfluxDB client.""" """Instantiate the connection to the InfluxDB client."""
influx_db = InfluxDBClient(args.host, args.port, args.user, args.password, args.dbname)
db = InfluxDBClient(host, port, user, password, dbname) measurements = influx_db.get_list_measurements()
measurements = db.get_list_measurements() time_stamp = 0
ts = 0
lastdate = 0 lastdate = 0
laststeps = 0 laststeps = 0
if measurements != []: if measurements != []:
lastentry = db.query('SELECT LAST("steps") FROM "steps"') lastentry = influx_db.query('SELECT LAST("steps") FROM "steps"')
points = lastentry.get_points('steps') points = lastentry.get_points('steps')
lastdate = list(points)[0]['time'] lastdate = list(points)[0]['time']
ts = time.mktime(datetime.datetime.strptime(lastdate, '%Y-%m-%dT%H:%M:%SZ').timetuple()) time_stamp = time.mktime(datetime.datetime.strptime(lastdate, '%Y-%m-%dT%H:%M:%SZ')
if ts == datetime.datetime.now().timestamp() // 86400 * 86400 + time.timezone: .timetuple())
points = lastentry.get_points('steps')
laststeps = list(points)[0]['last'] if time_stamp == datetime.datetime.now().timestamp() // 86400 * 86400 + time.timezone:
points = lastentry.get_points('steps')
laststeps = list(points)[0]['last']
if args.verbose: if args.verbose:
print("last entry is %s, ts is %s, number of steps is %s\n" % (lastdate,ts,laststeps)) print(f'last entry is {lastdate}, timestamp is {time_stamp}, \
number of steps is {laststeps}\n')
con = lite.connect(dbfile) con = lite.connect(args.dbfile)
with con: with con:
cur = con.cursor() cur = con.cursor()
cur.execute("SELECT recordedForDate,steps,distanceInMeters,activeTimeInSeconds,calories FROM dailyActivityLog WHERE recordedForDate >= %s AND steps > %s" % (ts,laststeps)) cur.execute(f'SELECT recordedForDate,steps,distanceInMeters,activeTimeInSeconds,calories \
FROM dailyActivityLog \
WHERE recordedForDate >= {time_stamp} AND steps > {laststeps}')
while True: while True:
row = cur.fetchone() row = cur.fetchone()
if row == None: if row is None:
break break
mytime = datetime.datetime.fromtimestamp(row[0]).strftime('%Y-%m-%dT%H:%M:%SZ') mytime = datetime.datetime.fromtimestamp(row[0]).strftime('%Y-%m-%dT%H:%M:%SZ')
data = [ data = [{"measurement":"steps",
{"measurement":"steps",
"time":mytime, "time":mytime,
"fields": { "fields": {
"steps":row[1], "steps":row[1],
"distanceInMeters":row[2], "distanceInMeters":row[2],
"activeTimeInSeconds":row[3], "activeTimeInSeconds":row[3],
"calories":row[4] "calories":row[4]
} }
} }]
]
if args.verbose: if args.verbose:
print("writing data for %s" % (mytime)) print(f'writing data for {mytime}')
influx_db.write_points(data)
db.write_points(data) influx_db.close()
db.close()
def parse_args(): def parse_args():
"""Parse the args from main.""" """Parse the args from main."""
@ -80,7 +79,5 @@ def parse_args():
help='sqlite (pacer) database name') help='sqlite (pacer) database name')
return parser.parse_args() return parser.parse_args()
if __name__ == '__main__': if __name__ == '__main__':
args = parse_args() main(parse_args())
main(host=args.host, port=args.port, user=args.user, password=args.password, dbname=args.dbname, dbfile=args.dbfile)