6
6
# The full license is in the file LICENSE, distributed with this software.
7
7
# -----------------------------------------------------------------------------
8
8
9
+ from tornado import gen
9
10
from tornado .web import HTTPError
11
+ from iostream import StreamClosedError
12
+ from json import dumps
10
13
11
14
import qiita_db as qdb
12
15
from .oauth2 import OauthBaseHandler , authenticate_oauth
@@ -44,7 +47,7 @@ def _get_analysis(a_id):
44
47
45
48
class APIAnalysisMetadataHandler (OauthBaseHandler ):
46
49
@authenticate_oauth
47
- def get (self , analysis_id ):
50
+ async def get (self , analysis_id ):
48
51
"""Retrieves the analysis metadata
49
52
50
53
Parameters
@@ -56,15 +59,34 @@ def get(self, analysis_id):
56
59
-------
57
60
dict
58
61
The contents of the analysis keyed by sample id
62
+
63
+ Notes
64
+ -----
65
+ This response needed to be broken in chunks because we were hitting
66
+ the max size of a respose: 2G; based on: https://bit.ly/3CPvyjd
59
67
"""
68
+ chunk_len = 1024 * 1024 * 1 # 1 MiB
69
+
60
70
with qdb .sql_connection .TRN :
61
71
a = _get_analysis (analysis_id )
62
72
mf_fp = qdb .util .get_filepath_information (
63
73
a .mapping_file )['fullpath' ]
64
- response = None
65
74
if mf_fp is not None :
66
75
df = qdb .metadata_template .util .load_template_to_dataframe (
67
76
mf_fp , index = '#SampleID' )
68
- response = df .to_dict (orient = 'index' )
77
+ response = dumps ( df .to_dict (orient = 'index' ) )
69
78
70
- self .write (response )
79
+ crange = range (chunk_len , len (response )+ chunk_len , chunk_len )
80
+ for i , (win ) in enumerate (crange ):
81
+ chunk = response [i * chunk_len :win ]
82
+ try :
83
+ self .write (chunk )
84
+ await self .flush ()
85
+ except StreamClosedError :
86
+ break
87
+ finally :
88
+ del chunk
89
+ # pause the coroutine so other handlers can run
90
+ await gen .sleep (0.000000001 ) # 1 nanosecond
91
+ else :
92
+ self .write (None )
0 commit comments