Merge pull request #131 from lcmcninch/get-pages

Implement a means of getting more than 1000 data points
This commit is contained in:
Brent Rubell 2022-03-10 14:53:02 -05:00 committed by GitHub
commit 06df42d534
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 71 additions and 10 deletions

View file

@ -23,6 +23,9 @@ from time import struct_time
import json
import platform
import pkg_resources
import re
from urllib.parse import urlparse
from urllib.parse import parse_qs
# import logging
import requests
@ -30,6 +33,8 @@ import requests
from .errors import RequestError, ThrottlingError
from .model import Data, Feed, Group, Dashboard, Block, Layout
DEFAULT_PAGE_LIMIT = 100
# set outgoing version, pulled from setup.py
version = pkg_resources.require("Adafruit_IO")[0].version
default_headers = {
@ -61,6 +66,9 @@ class Client(object):
# constructing the path.
self.base_url = base_url.rstrip('/')
# Store the last response of a get or post
self._last_response = None
@staticmethod
def to_red(data):
"""Hex color feed to red channel.
@ -112,10 +120,12 @@ class Client(object):
def _compose_url(self, path):
return '{0}/api/{1}/{2}/{3}'.format(self.base_url, 'v2', self.username, path)
def _get(self, path):
def _get(self, path, params=None):
response = requests.get(self._compose_url(path),
headers=self._headers({'X-AIO-Key': self.key}),
proxies=self.proxies)
proxies=self.proxies,
params=params)
self._last_response = response
self._handle_error(response)
return response.json()
@ -125,6 +135,7 @@ class Client(object):
'Content-Type': 'application/json'}),
proxies=self.proxies,
data=json.dumps(data))
self._last_response = response
self._handle_error(response)
return response.json()
@ -133,6 +144,7 @@ class Client(object):
headers=self._headers({'X-AIO-Key': self.key,
'Content-Type': 'application/json'}),
proxies=self.proxies)
self._last_response = response
self._handle_error(response)
# Data functionality.
@ -242,17 +254,53 @@ class Client(object):
path = "feeds/{0}/data/previous".format(feed)
return Data.from_dict(self._get(path))
def data(self, feed, data_id=None):
def data(self, feed, data_id=None, max_results=DEFAULT_PAGE_LIMIT):
"""Retrieve data from a feed. If data_id is not specified then all the data
for the feed will be returned in an array.
:param string feed: Name/Key/ID of Adafruit IO feed.
:param string data_id: ID of the piece of data to delete.
:param int max_results: The maximum number of results to return. To
return all data, set to None.
"""
if data_id is None:
path = "feeds/{0}/data".format(feed)
return list(map(Data.from_dict, self._get(path)))
path = "feeds/{0}/data/{1}".format(feed, data_id)
return Data.from_dict(self._get(path))
if max_results is None:
res = self._get(f'feeds/{feed}/details')
max_results = res['details']['data']['count']
if data_id:
path = "feeds/{0}/data/{1}".format(feed, data_id)
return Data.from_dict(self._get(path))
params = {'limit': max_results} if max_results else None
data = []
path = "feeds/{0}/data".format(feed)
while len(data) < max_results:
data.extend(list(map(Data.from_dict, self._get(path,
params=params))))
nlink = self.get_next_link()
if not nlink:
break
# Parse the link for the query parameters
params = parse_qs(urlparse(nlink).query)
if max_results:
params['limit'] = max_results - len(data)
return data
def get_next_link(self):
"""Parse the `next` page URL in the pagination Link header.
This is necessary because of a bug in the API's implementation of the
link header. If that bug is fixed, the link would be accesible by
response.links['next']['url'] and this method would be broken.
:return: The url for the next page of data
:rtype: str
"""
if not self._last_response:
return
link_header = self._last_response.headers['link']
res = re.search('rel="next", <(.+?)>', link_header)
if not res:
return
return res.groups()[0]
def create_data(self, feed, data):
"""Create a new row of data in the specified feed.

View file

@ -33,6 +33,19 @@ You can get all of the data for a feed by using the ``data(feed)`` method. The r
for d in data:
print('Data value: {0}'.format(d.value))
By default, the maximum number of data points returned is 1000. This limit can be changed by using the max_results parameter.
.. code-block:: python
# Get less than the default number of data points
data = aio.data('Test', max_results=100)
# Get more than the default number of data points
data = aio.data('Test', max_results=2000)
# Get all of the points
data = aio.data('Test', max_results=None)
You can also get a specific value by ID by using the ``feeds(feed, data_id)`` method. This will return a single piece of feed data with the provided data ID if it exists in the feed. The returned object will be an instance of the Data class.

View file

@ -64,7 +64,7 @@ class TestClient(base.IOTestCase):
def empty_feed(self, client, feed):
# Remove all the data from a specified feed (but don't delete the feed).
data = client.data(feed)
data = client.data(feed, max_results=None)
for d in data:
client.delete(feed, d.id)
@ -406,4 +406,4 @@ class TestClient(base.IOTestCase):
if __name__ == "__main__":
unittest.main()
unittest.main()