66# The full license is in the file LICENSE, distributed with this software.
77# -----------------------------------------------------------------------------
88
9+ from tornado import gen
910from tornado .web import HTTPError
11+ from json import dumps
1012
1113import qiita_db as qdb
1214from .oauth2 import OauthBaseHandler , authenticate_oauth
@@ -44,7 +46,7 @@ def _get_analysis(a_id):
4446
4547class APIAnalysisMetadataHandler (OauthBaseHandler ):
4648 @authenticate_oauth
47- def get (self , analysis_id ):
49+ async def get (self , analysis_id ):
4850 """Retrieves the analysis metadata
4951
5052 Parameters
@@ -56,15 +58,37 @@ def get(self, analysis_id):
5658 -------
5759 dict
5860 The contents of the analysis keyed by sample id
61+
62+ Notes
63+ -----
64+ This response needed to be broken in chunks because we were hitting
65+ the max size of a respose: 2G; based on: https://bit.ly/3CPvyjd
5966 """
67+ chunk_len = 1024 * 1024 * 1 # 1 MiB
68+
69+ response = None
6070 with qdb .sql_connection .TRN :
6171 a = _get_analysis (analysis_id )
6272 mf_fp = qdb .util .get_filepath_information (
6373 a .mapping_file )['fullpath' ]
64- response = None
6574 if mf_fp is not None :
6675 df = qdb .metadata_template .util .load_template_to_dataframe (
6776 mf_fp , index = '#SampleID' )
68- response = df .to_dict (orient = 'index' )
77+ response = dumps (df .to_dict (orient = 'index' ))
78+
79+ if response is not None :
80+ crange = range (chunk_len , len (response )+ chunk_len , chunk_len )
81+ for i , (win ) in enumerate (crange ):
82+ # sending the chunk and flushing
83+ chunk = response [i * chunk_len :win ]
84+ self .write (chunk )
85+ await self .flush ()
86+
87+ # cleaning chuck and pause the coroutine so other handlers
88+ # can run, note that this is required/important based on the
89+ # original implementation in https://bit.ly/3CPvyjd
90+ del chunk
91+ await gen .sleep (0.000000001 ) # 1 nanosecond
6992
70- self .write (response )
93+ else :
94+ self .write (None )
0 commit comments