Add support for json queries for recordings

develop
Ensar Sarajčić 2018-10-30 20:16:27 +01:00
parent e7e75fc658
commit 9c7ab8d121
9 changed files with 198 additions and 7 deletions

View File

@ -18,6 +18,7 @@ def add_resources():
from .resources.token import TokenResource, ValidateTokenResource
from .resources.device import (DeviceResource,
DeviceRecordingResource,
DeviceRecordingQueryResource,
DeviceListResource,
DeviceTypeResource,
DeviceTypeListResource,
@ -42,6 +43,8 @@ def add_resources():
api.add_resource(DeviceResource, '/v1/devices/<int:device_id>')
api.add_resource(DeviceRecordingResource,
'/v1/devices/<int:device_id>/recordings')
api.add_resource(DeviceRecordingQueryResource,
'/v1/devices/<int:device_id>/recordings/jsonql')
api.add_resource(DeviceListResource, '/v1/devices')
api.add_resource(DeviceTypeResource,
'/v1/devices/types/<int:device_type_id>')

View File

@ -31,7 +31,13 @@ class DeviceWithConfigurationSchema(DeviceSchema):
class RecordingsSchema(BaseResourceSchema):
recorded_at = fields.DateTime()
record_type = fields.Integer()
record_value = fields.String()
record_value = fields.Float()
class RecordingsQuerySchema(Schema):
selections = fields.Raw()
filters = fields.Raw()
groups = fields.Raw()
class DeviceSecretSchema(BaseResourceSchema):
@ -103,6 +109,17 @@ class DeviceRecordingResource(ProtectedResource):
return RecordingsSchema().dump(created_recording), 201
class DeviceRecordingQueryResource(ProtectedResource):
@use_args(RecordingsQuerySchema(), locations=('json',))
@swag_from('swagger/create_device_recording_query_spec.yaml')
def post(self, args, device_id):
validate_device_ownership(device_id)
try:
return devices.run_custom_query(device_id, args), 200
except ValueError as e:
abort(400, message=str(e), status='error')
class DeviceListResource(ProtectedResource):
@use_args(DeviceSchema(), locations=('json',))
@swag_from('swagger/create_device_spec.yaml')

View File

@ -3,6 +3,7 @@ import hmac
import urllib.parse
from .models import Device, Recording, DeviceAssociation, DeviceType
from app.core import app
from app.jsonql import api as jsonql
# Private helpers
@ -260,3 +261,30 @@ def create_recording(device_id, raw_json):
recording = parse_raw_json_recording(device_id, raw_json)
with app.app_context():
recording.save()
def run_custom_query(device_id, request):
"""
"""
if not Device.exists(id=device_id):
raise ValueError("Device does not exist!")
def recording_field_provider(name):
if name == 'record_value':
return Recording.record_value
if name == 'record_type':
return Recording.record_type
if name == 'device_id':
return Recording.device_id
if name == 'recorded_at':
return Recording.recorded_at
if name == 'received_at':
return Recording.received_at
resulting_query = jsonql.run_query_on(Recording.query.with_entities(),
recording_field_provider,
**request)
print("Resulting query: " + str(resulting_query))
result = resulting_query.filter(Recording.device_id == device_id).all()
print("RESULT: " + str(result))
return result

View File

@ -16,14 +16,14 @@ class Recording(db.Model):
default=db.func.current_timestamp())
device_id = db.Column(db.Integer, db.ForeignKey('devices.id'))
record_type = db.Column(db.Integer, nullable=False)
record_value = db.Column(db.String, nullable=False)
record_value = db.Column(db.Float, nullable=False)
raw_record = db.Column(JSON, nullable=True)
def __init__(self, device_id, record_type,
record_value, recorded_at, raw_json):
self.device_id = int(device_id)
self.record_type = int(record_type)
self.record_value = str(record_value)
self.record_value = float(record_value)
self.recorded_at = datetime.fromtimestamp(int(recorded_at))
self.received_at = datetime.utcnow()
self.raw_record = raw_json

View File

93
app/jsonql/api.py 100644
View File

@ -0,0 +1,93 @@
from app.core import db
GROUPS = ['sum', 'avg', 'count']
FILTERS = ['$gt', '$lt', '$eq']
PERIODS = ['year', 'month', 'week', 'day']
def run_query_on(query_object, field_provider, **kwargs):
selections, filters, groups = validate_selections(**kwargs)
entities = []
print('Starting with args: ' + str(kwargs))
if selections is not None:
for selection in selections.keys():
entities.append(get_column(selections[selection],
field_provider(selection)))
print('New entities: ' + str(entities))
query_object = query_object.with_entities(*entities)
if filters is not None:
for filtered_column in filters.keys():
for actual_filter in filters[filtered_column].keys():
query_object = query_object.filter(
get_filter(field_provider(filtered_column),
actual_filter,
filters[filtered_column][actual_filter]))
if groups is not None:
for group in groups.keys():
query_object = query_object.group_by(
get_group(field_provider(group), groups[group]))
return query_object
def get_column(selection_name, field):
if selection_name == 'value':
return field
if selection_name == 'sum':
return db.func.sum(field)
if selection_name == 'avg':
return db.func.avg(field)
if selection_name == 'count':
return db.func.count(field)
def get_filter(filter_column, filter_key, filter_value):
if filter_key == '$gt':
return filter_column > filter_value
if filter_key == '$lt':
return filter_column < filter_value
if filter_key == '$eq':
return filter_column == filter_value
def get_group(group_column, group_value):
if group_value == 'value':
return group_column
if group_value in PERIODS:
return db.func.date_part(group_value, group_column)
# We now expect a date format
return db.func.to_char(group_column, group_value)
def is_group(**kwargs):
if kwargs.get('group') is not None:
return True
def validate_selections(**kwargs):
selections = kwargs.get('selections')
filters = kwargs.get('filters')
groups = kwargs.get('groups')
if selections is None:
raise ValueError("Missing selections!")
if is_group(**kwargs):
for key in selections.keys():
if selections[key] not in GROUPS:
raise ValueError("Can only use sum, avg and count when\
grouping!")
if filters is not None:
for key in filters.keys():
for inner_key in filters[key].keys():
if inner_key not in FILTERS:
raise ValueError("Unknown filter: " + str(
inner_key))
return selections, filters, groups

View File

@ -0,0 +1,16 @@
{
"target": "Recording",
"selection": {
"record_value": "sum",
"record_type": "count",
"record_type": "avg"
},
"filter": {
"date": {
"$gt": "12-12-02"
}
},
"group": {
"date": "month"
}
}

View File

@ -157,9 +157,9 @@ definitions:
record_type:
$ref: '#/definitions/id'
record_value:
type: string
type: float
description: Value of the recording
example: '25 degrees'
example: 25.0
RecordingCreation:
type: object
@ -173,9 +173,9 @@ definitions:
record_type:
$ref: '#/definitions/id'
record_value:
type: string
type: float
description: Value of the recording
example: '25 degrees'
example: 25.0
DeviceType:
type: object

View File

@ -0,0 +1,34 @@
"""empty message
Revision ID: c09252c2c547
Revises: dad1f9b4eec2
Create Date: 2018-10-30 19:33:17.778301
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c09252c2c547'
down_revision = 'dad1f9b4eec2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('recordings', 'record_value',
existing_type=sa.String(),
type_=sa.Float(),
postgresql_using='record_value::double precision')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('recordings', 'record_value',
existing_type=sa.Float(),
type_=sa.String(),
nullable=False)
# ### end Alembic commands ###