Skip to main content

Query parameters

The queries support adding parameters to the query field. Like this:

query GET_MY_POINT{
points(where: {name:{_EQ:"my-special-point-name"}}) {
id
name
}
}

The available parameters vary between the fields, see below here for reference, and remember that you can also use auto-completion to help out when querying.

Where

The where input supports filtering on all the type fields, except metadata, using evaluations like.

OperatorDescription
_EQEqual to
_NENot Equal to
_LTLess than
_LTELess than or equal to
_GTGreater than
_GTEGreater than or equal to

They can also be combined using operators like

OperatorDescription
_ANDAll evaluations are true
_OROne evaluations is true
_NORNo evaluations are true

Examples

where: {createdAt: {_GT: "2020-01-01T12:00:00+0000"}}
where: {
_AND:[
{createdAt: {_GT: "2020-01-01T12:00:00+0000"}}
{createdAt: {_LT: "2020-01-02T12:00:00+0000"}}
]
}

Paginate

Paginate supports the following inputs

TypeDescription
first: xReturn the first X items in the filtered result
last: xReturn last X items in the filtered result
after: idReturn items after a given signal id
before: idReturn items before a given signal id

Examples

paginate: {first:10}
paginate: {last:10 after:"61b0aeac9f0b95a1c3199fec"}

Pagination is often used together with the pageInfo field to navigate in signals data, like this:

query SIGNALS_WITH_PAGINATION {
signals {
pageInfo {
startCursor
hasNextPage
hasPreviousPage
endCursor
}
edges {
node {
id
timestamp
createdAt
type
unit
pointId
data {
rawValue
numericValue
}
}
}
}
}
Response
{
"data": {
"signals": {
"pageInfo": {
"startCursor": "eyJpZCI6IjYxYWY3ZDljOWYwYjk1YTFjM2E0YTBiNiJ9",
"hasNextPage": true,
"hasPreviousPage": false,
"endCursor": "eyJpZCI6IjYxYWY3ZDljOWYwYjk1YTFjM2E0YTBjOSJ9"
},
"edges": "edges": [
{
"node": {
"id": "6195f5fdc1bbb47401b07fa2",
"timestamp": "2021-11-17T14:30:21.000000000+0000",
"createdAt": "2021-11-18T06:43:09.854000000+0000",
"type": "air temperature",
"unit": "CELSIUS_DEGREES",
"pointId": "6195f58364d3a66041c9548b",
"data": {
"numericValue": 22.200000762939453,
"rawValue": "22.2"
}
}
},
// ...
]
}
}
}

Paginate Examples

Sometimes you need quick access to a large amount of raw data. In those cases a quick way to get that would be to paginate through a query and save the result as a csv file, or further process it in pandas, danfo or a data pipeline.

Python

Requirements

requests (tested with v2.26.0)

pip install requests
# pagination.py
# set environment variables before running:
# TENANT_ID=my-tenant TENANT_KEY=abcdetc MAX_PAGINATIONS=100 FROM_TIME=2022-05-30T12:00 TO_TIME=2022-05-30T13:00 python pagination.py
import os
import requests
import csv
from datetime import datetime, timedelta, timezone

URL = "https://iot.dimensionfour.io/graph"
TENANT_ID = os.environ["TENANT_ID"]
TENANT_KEY = os.environ["TENANT_KEY"]
MAX_PAGINATIONS = int(os.getenv("MAX_PAGINATIONS", "10"))
FROM_TIME = os.getenv(
"FROM_TIME", (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
)
TO_TIME = os.getenv("TO_TIME", datetime.now(timezone.utc).isoformat())
FILENAME = "d4_export.csv"

print("Starting up!")
print(f"TENANT_ID: {TENANT_ID}")
print(f"MAX_PAGINATIONS: {MAX_PAGINATIONS}")
print(f"FROM_TIME: {FROM_TIME}")
print(f"TO_TIME: {TO_TIME}")


headers = {
"x-tenant-id": TENANT_ID,
"x-tenant-key": TENANT_KEY,
}

first_query = """query PAGINATED_SIGNALS(
$fromTime: Timestamp!
$toTime: Timestamp!
){
signals(
paginate: { first: 1 }
where: { _AND: [
{ timestamp : {_GT: $fromTime } }
{ timestamp : {_LT: $toTime } }
]
}
) {
pageInfo {
startCursor
hasNextPage
hasPreviousPage
endCursor
}
edges {
node {
id
type
unit
timestamp
data {
numericValue
}
}
}
}
}"""

paginated_query = """query PAGINATED_SIGNALS(
$cursor: Cursor!
$fromTime: Timestamp!
$toTime: Timestamp!
){
signals(
paginate: {
first: 100
after: $cursor
}
where: { _AND: [
{ timestamp : {_GT: $fromTime } }
{ timestamp : {_LT: $toTime } }
]
}
) {
pageInfo {
startCursor
hasNextPage
hasPreviousPage
endCursor
}
edges {
node {
id
type
unit
timestamp
data {
rawValue
numericValue
}
}
}
}
}"""


def query(query, variables):
body = {"query": query, "variables": variables}
res = requests.post(URL, json=body, headers=headers)
if res.status_code != 200:
print("ERROR, status code not 200...")
try:
result_json = res.json()
except Exception as e:
print("Json decode error: {e}")
if "errors" in result_json.keys():
print(f"Api Error: {result_json['errors']}, exiting...")
quit()
if len(result_json["data"]["signals"]["edges"]) <= 0:
print("No data found, exiting")
quit()
return result_json


def save_and_quit(rows_to_save):
if len(rows_to_save) > 0:
print(f"Saving to csv: {FILENAME}")
with open(FILENAME, "w+", newline="", encoding="utf-8") as new_file:
writer = csv.DictWriter(new_file, fieldnames=rows_to_save[0].keys())
writer.writeheader()
writer.writerows(rows_to_save)
else:
print("No rows to print, exiting")
quit()


result = query(first_query, {"fromTime": FROM_TIME, "toTime": TO_TIME})
cursor = result["data"]["signals"]["pageInfo"]["startCursor"]
has_next_page = result["data"]["signals"]["pageInfo"]["hasNextPage"]
print(f"First query result: {result}")

rows = []
for i in range(MAX_PAGINATIONS):
print(f"Pagination #: {i}")
if not has_next_page:
print("No more data available, stopping pagination")
save_and_quit(rows)
result = query(
paginated_query, {"fromTime": FROM_TIME, "toTime": TO_TIME, "cursor": cursor}
)

for node in result["data"]["signals"]["edges"]:
node_content = node["node"]
row = {
"id": node_content["id"],
"type": node_content["type"],
"unit": node_content["unit"],
"value": float(node_content["data"]["rawValue"]),
"time": node_content["timestamp"],
}
rows.append(row)
cursor = result["data"]["signals"]["pageInfo"]["endCursor"]
has_next_page = result["data"]["signals"]["pageInfo"]["hasNextPage"]

if i == MAX_PAGINATIONS - 1:
print("Max pagination reached, stopping!")
save_and_quit(rows)