ci #1
3 changed files with 56 additions and 36 deletions
23
.gitea/workflows/lint.yml
Normal file
23
.gitea/workflows/lint.yml
Normal file
|
@ -0,0 +1,23 @@
|
|||
name: lint
|
||||
run-name: lint is launched by ${{ github.actor }}
|
||||
on: [push]
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
name: lint
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by Gitea!"
|
||||
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
- name: Create Python virtualenv
|
||||
run: |
|
||||
mkdir -p .cache
|
||||
python -m venv .
|
||||
source bin/activate
|
||||
XDG_CACHE_HOME=.cache pip install pylint-venv pylint
|
||||
- name: Lint
|
||||
run: XDG_CACHE_HOME=.cache ./bin/pylint -d E0401 *.py
|
||||
- run: echo "🍏 This job's status is ${{ job.status }}."
|
||||
|
|
@ -1,11 +1,11 @@
|
|||
# pacer2influxdb
|
||||
|
||||
export Pacer data to InfluxDB
|
||||
export [Pacer](https://play.google.com/store/apps/details?id=cc.pacer.androidapp) data to [InfluxDB](https://www.influxdata.com/products/influxdb-overview/)
|
||||
|
||||
* require Python 3, influxdb-python and sqlite-python
|
||||
* requires Python 3, `influxdb-python` and `sqlite-python`
|
||||
* original code from https://github.com/korjavin/sqlite2influxdb and https://github.com/influxdata/influxdb-python
|
||||
* sqlite database is /data/data/cc.pacer.androidapp/databases/MDData.db on an Android device, may need root privileges to get it
|
||||
* sqlite database is `/data/data/cc.pacer.androidapp/databases/MDData.db` on an Android device, may need root privileges to get it
|
||||
* collects steps, distance, active time and calories
|
||||
* for now InfluxDB database needs to be created first
|
||||
* incremental feed: just insert missing points
|
||||
* grafana dashboard export in json format is included, adjust datasource name
|
||||
* [Grafana](https://grafana.com/) dashboard export in json format is included, adjust datasource name
|
||||
|
|
|
@ -1,45 +1,47 @@
|
|||
#!/usr/bin/env python3
|
||||
from influxdb import InfluxDBClient
|
||||
|
||||
"""Export Pacer data to InfluxDB"""
|
||||
|
||||
import datetime
|
||||
import time
|
||||
import sqlite3 as lite
|
||||
import argparse
|
||||
from influxdb import InfluxDBClient
|
||||
|
||||
def main(host='localhost', port=8086, user='root', password='root', dbname='demo', dbfile='demo.db'):
|
||||
def main(args):
|
||||
"""Instantiate the connection to the InfluxDB client."""
|
||||
|
||||
db = InfluxDBClient(host, port, user, password, dbname)
|
||||
measurements = db.get_list_measurements()
|
||||
ts = 0
|
||||
influx_db = InfluxDBClient(args.host, args.port, args.user, args.password, args.dbname)
|
||||
measurements = influx_db.get_list_measurements()
|
||||
time_stamp = 0
|
||||
lastdate = 0
|
||||
laststeps = 0
|
||||
if measurements != []:
|
||||
lastentry = db.query('SELECT LAST("steps") FROM "steps"')
|
||||
|
||||
lastentry = influx_db.query('SELECT LAST("steps") FROM "steps"')
|
||||
points = lastentry.get_points('steps')
|
||||
lastdate = list(points)[0]['time']
|
||||
ts = time.mktime(datetime.datetime.strptime(lastdate, '%Y-%m-%dT%H:%M:%SZ').timetuple())
|
||||
if ts == datetime.datetime.now().timestamp() // 86400 * 86400 + time.timezone:
|
||||
time_stamp = time.mktime(datetime.datetime.strptime(lastdate, '%Y-%m-%dT%H:%M:%SZ')
|
||||
.timetuple())
|
||||
|
||||
if time_stamp == datetime.datetime.now().timestamp() // 86400 * 86400 + time.timezone:
|
||||
points = lastentry.get_points('steps')
|
||||
laststeps = list(points)[0]['last']
|
||||
|
||||
if args.verbose:
|
||||
print("last entry is %s, ts is %s, number of steps is %s\n" % (lastdate,ts,laststeps))
|
||||
print(f'last entry is {lastdate}, timestamp is {time_stamp}, \
|
||||
number of steps is {laststeps}\n')
|
||||
|
||||
con = lite.connect(dbfile)
|
||||
con = lite.connect(args.dbfile)
|
||||
with con:
|
||||
|
||||
cur = con.cursor()
|
||||
cur.execute("SELECT recordedForDate,steps,distanceInMeters,activeTimeInSeconds,calories FROM dailyActivityLog WHERE recordedForDate >= %s AND steps > %s" % (ts,laststeps))
|
||||
|
||||
cur.execute(f'SELECT recordedForDate,steps,distanceInMeters,activeTimeInSeconds,calories \
|
||||
FROM dailyActivityLog \
|
||||
WHERE recordedForDate >= {time_stamp} AND steps > {laststeps}')
|
||||
while True:
|
||||
|
||||
row = cur.fetchone()
|
||||
if row == None:
|
||||
if row is None:
|
||||
break
|
||||
mytime = datetime.datetime.fromtimestamp(row[0]).strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
data = [
|
||||
{"measurement":"steps",
|
||||
data = [{"measurement":"steps",
|
||||
"time":mytime,
|
||||
"fields": {
|
||||
"steps":row[1],
|
||||
|
@ -47,14 +49,11 @@ def main(host='localhost', port=8086, user='root', password='root', dbname='demo
|
|||
"activeTimeInSeconds":row[3],
|
||||
"calories":row[4]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
}]
|
||||
if args.verbose:
|
||||
print("writing data for %s" % (mytime))
|
||||
|
||||
db.write_points(data)
|
||||
db.close()
|
||||
print(f'writing data for {mytime}')
|
||||
influx_db.write_points(data)
|
||||
influx_db.close()
|
||||
|
||||
def parse_args():
|
||||
"""Parse the args from main."""
|
||||
|
@ -80,7 +79,5 @@ def parse_args():
|
|||
help='sqlite (pacer) database name')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = parse_args()
|
||||
main(host=args.host, port=args.port, user=args.user, password=args.password, dbname=args.dbname, dbfile=args.dbfile)
|
||||
main(parse_args())
|
||||
|
|
Loading…
Reference in a new issue