changeset 5639:f576957cbb1f

Add support for prev/next/self links when returning paginated results. To do this: 1) change "data" envelope from an array to a dict 2) move the "data" array to the "collection" property, which is an array of elements in the collection. 3) add @links dict keyed by link relation: self, next, prev. Each relation is an array of dicts with uri and rel keys. In this case there is only one element, but there is nothing preventing a relation from having multiple url's. So this follows the formatting needed for the general case. Relations are present only if it makes sense. So first page has no prev and last page has no next. 4) add @total_size with number of element selected if they were not paginated. Replicates data in X-Count-Total header. Changed index to start at 1. So the first page is page_index 1 and not page_index 0. (So I am no longer surprised when I set page_index to 1 and am missing a bunch of records 8-)). Also a small fixup, json response ends with a newline so printing the data, or using curl makes sure that anything printing after the json output (like shell prompts) is on a new line. Tests added for all cases.
author John Rouillard <rouilj@ieee.org>
date Sat, 09 Mar 2019 11:06:10 -0500
parents 7e3cceec3f4f
children a82c56a9c2a2
files CHANGES.txt roundup/rest.py test/rest_common.py
diffstat 3 files changed, 153 insertions(+), 40 deletions(-) [+]
line wrap: on
line diff
--- a/CHANGES.txt	Fri Mar 08 22:30:11 2019 -0500
+++ b/CHANGES.txt	Sat Mar 09 11:06:10 2019 -0500
@@ -61,6 +61,10 @@
    - If dict2xml.py is installed, the rest interface can produce an XML
      format response if the accept header is set to text/xml.
      (See: https://pypi.org/project/dict2xml/)
+   - When retrieving collection move list of collection elements to
+     collection property. Add @links property with self, next and prev
+     links (where needed). Add @total_size with size of entire
+     collection (unpaginated). Pagination index starts at 1 not 0.
   (John Rouillard)
 - issue2550833: the export_csv web action now returns labels/names
   rather than id's. Replace calls to export_csv with the export_csv_id
--- a/roundup/rest.py	Fri Mar 08 22:30:11 2019 -0500
+++ b/roundup/rest.py	Sat Mar 09 11:06:10 2019 -0500
@@ -484,7 +484,7 @@
         filter_props = {}
         page = {
             'size': None,
-            'index': None
+            'index': 1   # setting just size starts at page 1
         }
         for form_field in input.value:
             key = form_field.name
@@ -506,21 +506,43 @@
             obj_list = class_obj.filter(None, filter_props)
 
         # extract result from data
-        result = [
+        result={}
+        result['collection'] = [
             {'id': item_id, 'link': class_path + item_id}
             for item_id in obj_list
             if self.db.security.hasPermission(
                 'View', self.db.getuid(), class_name, itemid=item_id
             )
         ]
+        result_len = len(result['collection'])
 
-        # pagination
-        if page['size'] is not None and page['index'] is not None:
-            page_start = max(page['index'] * page['size'], 0)
-            page_end = min(page_start + page['size'], len(result))
-            result = result[page_start:page_end]
+        # pagination - page_index from 1...N
+        if page['size'] is not None:
+            page_start = max((page['index']-1) * page['size'], 0)
+            page_end = min(page_start + page['size'], result_len)
+            result['collection'] = result['collection'][page_start:page_end]
+            result['@links'] = {}
+            for rel in ('next', 'prev', 'self'):
+                if rel == 'next':
+                    # if current index includes all data, continue
+                    if page['index']*page['size'] > result_len: continue
+                    index=page['index']+1
+                if rel == 'prev':
+                    if page['index'] <= 1: continue
+                    index=page['index']-1
+                if rel == 'self': index=page['index']
 
-        self.client.setHeader("X-Count-Total", str(len(result)))
+                result['@links'][rel] = []
+                result['@links'][rel].append({
+                    'rel': rel,
+                    'uri': "%s/%s?page_index=%s&"%(self.data_path,
+                                                   class_name,index) \
+                       + '&'.join([ "%s=%s"%(field.name,field.value) \
+                         for field in input.value \
+                           if field.name != "page_index"]) })
+
+        result['@total_size'] = result_len
+        self.client.setHeader("X-Count-Total", str(result_len))
         return 200, result
 
     @Routing.route("/data/<:class_name>/<:item_id>", 'GET')
@@ -1356,7 +1378,9 @@
             self.client.response_code = 406
             output = "Content type is not accepted by client"
 
-        return output
+        # Make output json end in a newline to
+        # separate from following text in logs etc..
+        return output + "\n"
 
 
 class RoundupJSONEncoder(json.JSONEncoder):
--- a/test/rest_common.py	Fri Mar 08 22:30:11 2019 -0500
+++ b/test/rest_common.py	Sat Mar 09 11:06:10 2019 -0500
@@ -92,7 +92,13 @@
         # Retrieve all three users.
         results = self.server.get_collection('user', self.empty_form)
         self.assertEqual(self.dummy_client.response_code, 200)
-        self.assertEqual(len(results['data']), 3)
+        self.assertEqual(len(results['data']['collection']), 3)
+        self.assertEqual(results['data']['@total_size'], 3)
+        print self.dummy_client.additional_headers["X-Count-Total"]
+        self.assertEqual(
+            self.dummy_client.additional_headers["X-Count-Total"],
+            "3"
+        )
 
         # Obtain data for 'joe'.
         results = self.server.get_element('user', self.joeid, self.empty_form)
@@ -169,10 +175,13 @@
         ]
         results = self.server.get_collection('issue', form)
         self.assertEqual(self.dummy_client.response_code, 200)
-        self.assertIn(get_obj(base_path, issue_open_norm), results['data'])
-        self.assertIn(get_obj(base_path, issue_open_crit), results['data'])
+        self.assertIn(get_obj(base_path, issue_open_norm),
+                      results['data']['collection'])
+        self.assertIn(get_obj(base_path, issue_open_crit),
+                      results['data']['collection'])
         self.assertNotIn(
-            get_obj(base_path, issue_closed_norm), results['data']
+            get_obj(base_path, issue_closed_norm),
+            results['data']['collection']
         )
 
         # Retrieve all issue status=closed and priority=critical
@@ -183,10 +192,13 @@
         ]
         results = self.server.get_collection('issue', form)
         self.assertEqual(self.dummy_client.response_code, 200)
-        self.assertIn(get_obj(base_path, issue_closed_crit), results['data'])
-        self.assertNotIn(get_obj(base_path, issue_open_crit), results['data'])
+        self.assertIn(get_obj(base_path, issue_closed_crit),
+                      results['data']['collection'])
+        self.assertNotIn(get_obj(base_path, issue_open_crit),
+                         results['data']['collection'])
         self.assertNotIn(
-            get_obj(base_path, issue_closed_norm), results['data']
+            get_obj(base_path, issue_closed_norm),
+            results['data']['collection']
         )
 
         # Retrieve all issue status=closed and priority=normal,critical
@@ -197,39 +209,48 @@
         ]
         results = self.server.get_collection('issue', form)
         self.assertEqual(self.dummy_client.response_code, 200)
-        self.assertIn(get_obj(base_path, issue_closed_crit), results['data'])
-        self.assertIn(get_obj(base_path, issue_closed_norm), results['data'])
-        self.assertNotIn(get_obj(base_path, issue_open_crit), results['data'])
-        self.assertNotIn(get_obj(base_path, issue_open_norm), results['data'])
+        self.assertIn(get_obj(base_path, issue_closed_crit),
+                      results['data']['collection'])
+        self.assertIn(get_obj(base_path, issue_closed_norm),
+                      results['data']['collection'])
+        self.assertNotIn(get_obj(base_path, issue_open_crit),
+                         results['data']['collection'])
+        self.assertNotIn(get_obj(base_path, issue_open_norm),
+                         results['data']['collection'])
 
     def testPagination(self):
         """
-        Retrieve all three users
-        obtain data for 'joe'
+        Test pagination. page_size is required and is an integer
+        starting at 1. page_index is optional and is an integer
+        starting at 1. Verify that pagination links are present
+        if paging, @total_size and X-Count-Total header match
+        number of items.        
         """
         # create sample data
-        for i in range(0, random.randint(5, 10)):
+        for i in range(0, random.randint(8,15)):
             self.db.issue.create(title='foo' + str(i))
 
         # Retrieving all the issues
         results = self.server.get_collection('issue', self.empty_form)
         self.assertEqual(self.dummy_client.response_code, 200)
-        total_length = len(results['data'])
-
-        # Pagination will be 70% of the total result
-        page_size = total_length * 70 // 100
-        page_zero_expected = page_size
-        page_one_expected = total_length - page_zero_expected
+        total_length = len(results['data']['collection'])
+        # Verify no pagination links if paging not used
+        self.assertFalse('@links' in results['data'])
+        self.assertEqual(results['data']['@total_size'], total_length)
+        self.assertEqual(
+            self.dummy_client.additional_headers["X-Count-Total"],
+            str(total_length)
+        )
 
-        # Retrieve page 0
-        form = cgi.FieldStorage()
-        form.list = [
-            cgi.MiniFieldStorage('page_size', page_size),
-            cgi.MiniFieldStorage('page_index', 0)
-        ]
-        results = self.server.get_collection('issue', form)
-        self.assertEqual(self.dummy_client.response_code, 200)
-        self.assertEqual(len(results['data']), page_zero_expected)
+
+        # Pagination will be 45% of the total result
+        # So 2 full pages and 1 partial page.
+        page_size = total_length * 45 // 100
+        page_one_expected = page_size
+        page_two_expected = page_size
+        page_three_expected = total_length - (2*page_one_expected)
+        base_url="http://tracker.example/cgi-bin/roundup.cgi/" \
+                 "bugs/rest/data/issue"
 
         # Retrieve page 1
         form = cgi.FieldStorage()
@@ -239,7 +260,18 @@
         ]
         results = self.server.get_collection('issue', form)
         self.assertEqual(self.dummy_client.response_code, 200)
-        self.assertEqual(len(results['data']), page_one_expected)
+        self.assertEqual(len(results['data']['collection']),
+                         page_one_expected)
+        self.assertTrue('@links' in results['data'])
+        self.assertTrue('self' in results['data']['@links'])
+        self.assertTrue('next' in results['data']['@links'])
+        self.assertFalse('prev' in results['data']['@links'])
+        self.assertEqual(results['data']['@links']['self'][0]['uri'],
+                         "%s?page_index=1&page_size=%s"%(base_url,page_size))
+        self.assertEqual(results['data']['@links']['next'][0]['uri'],
+                         "%s?page_index=2&page_size=%s"%(base_url,page_size))
+
+        page_one_results = results # save this for later
 
         # Retrieve page 2
         form = cgi.FieldStorage()
@@ -249,8 +281,61 @@
         ]
         results = self.server.get_collection('issue', form)
         self.assertEqual(self.dummy_client.response_code, 200)
-        self.assertEqual(len(results['data']), 0)
+        self.assertEqual(len(results['data']['collection']), page_two_expected)
+        self.assertTrue('@links' in results['data'])
+        self.assertTrue('self' in results['data']['@links'])
+        self.assertTrue('next' in results['data']['@links'])
+        self.assertTrue('prev' in results['data']['@links'])
+        self.assertEqual(results['data']['@links']['self'][0]['uri'],
+                         "http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=2&page_size=%s"%page_size)
+        self.assertEqual(results['data']['@links']['next'][0]['uri'],
+                         "http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=3&page_size=%s"%page_size)
+        self.assertEqual(results['data']['@links']['prev'][0]['uri'],
+                         "http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=1&page_size=%s"%page_size)
+        self.assertEqual(results['data']['@links']['self'][0]['rel'],
+                         'self')
+        self.assertEqual(results['data']['@links']['next'][0]['rel'],
+                         'next')
+        self.assertEqual(results['data']['@links']['prev'][0]['rel'],
+                         'prev')
 
+        # Retrieve page 3
+        form = cgi.FieldStorage()
+        form.list = [
+            cgi.MiniFieldStorage('page_size', page_size),
+            cgi.MiniFieldStorage('page_index', 3)
+        ]
+        results = self.server.get_collection('issue', form)
+        self.assertEqual(self.dummy_client.response_code, 200)
+        self.assertEqual(len(results['data']['collection']), page_three_expected)
+        self.assertTrue('@links' in results['data'])
+        self.assertTrue('self' in results['data']['@links'])
+        self.assertFalse('next' in results['data']['@links'])
+        self.assertTrue('prev' in results['data']['@links'])
+        self.assertEqual(results['data']['@links']['self'][0]['uri'],
+                         "http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=3&page_size=%s"%page_size)
+        self.assertEqual(results['data']['@links']['prev'][0]['uri'],
+                         "http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=2&page_size=%s"%page_size)
+
+        # Verify that page_index is optional
+        # Should start at page 1
+        form = cgi.FieldStorage()
+        form.list = [
+            cgi.MiniFieldStorage('page_size', page_size),
+        ]
+        results = self.server.get_collection('issue', form)
+        self.assertEqual(self.dummy_client.response_code, 200)
+        self.assertEqual(len(results['data']['collection']), page_size)
+        self.assertTrue('@links' in results['data'])
+        self.assertTrue('self' in results['data']['@links'])
+        self.assertTrue('next' in results['data']['@links'])
+        self.assertFalse('prev' in results['data']['@links'])
+        self.assertEqual(page_one_results, results)
+
+        # FIXME add tests for out of range once we decide what response
+        # is needed to:
+        #   page_size < 0
+        #   page_index < 0
 
     def testEtagProcessing(self):
         '''

Roundup Issue Tracker: http://roundup-tracker.org/