22
22
from traitlets .traitlets import HasTraits
23
23
24
24
from .unixsock import UnixResolver
25
- from .utils import call_with_asked_args
25
+ from .utils import call_with_asked_args , mime_types_match
26
26
from .websocket import WebSocketHandlerMixin , pingable_ws_connect
27
27
28
28
@@ -95,6 +95,15 @@ def get(self, *args):
95
95
self .redirect (urlunparse (dest ))
96
96
97
97
98
+ COMMON_BINARY_MIME_TYPES = [
99
+ "image/*" ,
100
+ "audio/*" ,
101
+ "video/*" ,
102
+ "application/*" ,
103
+ "text/event-stream" ,
104
+ ]
105
+
106
+
98
107
class ProxyHandler (WebSocketHandlerMixin , JupyterHandler ):
99
108
"""
100
109
A tornado request handler that proxies HTTP and websockets from
@@ -117,10 +126,41 @@ def __init__(self, *args, **kwargs):
117
126
"rewrite_response" ,
118
127
tuple (),
119
128
)
129
+ self .progressive = kwargs .pop ("progressive" , None )
120
130
self ._requested_subprotocols = None
121
131
self .update_last_activity = kwargs .pop ("update_last_activity" , True )
122
132
super ().__init__ (* args , ** kwargs )
123
133
134
+ @property
135
+ def progressive (self ):
136
+ accept_header = self .request .headers .get ("Accept" )
137
+
138
+ if self ._progressive is not None :
139
+ if callable (self ._progressive ):
140
+ return self ._progressive (accept_header )
141
+ else :
142
+ return self ._progressive
143
+
144
+ # Progressive and RewritableResponse are mutually exclusive
145
+ if self .rewrite_response :
146
+ return False
147
+
148
+ if accept_header is None :
149
+ return False
150
+
151
+ # If the client can accept multiple types, we will not make the request progressive
152
+ if "," in accept_header :
153
+ return False
154
+
155
+ return any (
156
+ mime_types_match (pattern , accept_header )
157
+ for pattern in COMMON_BINARY_MIME_TYPES
158
+ )
159
+
160
+ @progressive .setter
161
+ def progressive (self , value ):
162
+ self ._progressive = value
163
+
124
164
# Support/use jupyter_server config arguments allow_origin and allow_origin_pat
125
165
# to enable cross origin requests propagated by e.g. inverting proxies.
126
166
@@ -376,16 +416,16 @@ async def proxy(self, host, port, proxied_path):
376
416
)
377
417
else :
378
418
client = httpclient .AsyncHTTPClient (force_instance = True )
379
- # check if the request is stream request
380
- accept_header = self .request .headers .get ("Accept" )
381
- if accept_header == "text/event-stream" :
419
+
420
+ if self .progressive :
382
421
return await self ._proxy_progressive (host , port , proxied_path , body , client )
383
422
else :
384
423
return await self ._proxy_buffered (host , port , proxied_path , body , client )
385
424
386
425
async def _proxy_progressive (self , host , port , proxied_path , body , client ):
387
426
# Proxy in progressive flush mode, whenever chunks are received. Potentially slower but get results quicker for voila
388
427
# Set up handlers so we can progressively flush result
428
+ self .log .debug (f"Request to '{ proxied_path } ' will be proxied progressive" )
389
429
390
430
headers_raw = []
391
431
@@ -466,9 +506,10 @@ def streaming_callback(chunk):
466
506
self .write (response .body )
467
507
468
508
async def _proxy_buffered (self , host , port , proxied_path , body , client ):
469
- req = self ._build_proxy_request ( host , port , proxied_path , body )
509
+ self .log . debug ( f"Request to ' { proxied_path } ' will be proxied buffered" )
470
510
471
- self .log .debug (f"Proxying request to { req .url } " )
511
+ req = self ._build_proxy_request (host , port , proxied_path , body )
512
+ self .log .debug (f"Proxy request URL: { req .url } " )
472
513
473
514
try :
474
515
# Here, "response" is a tornado.httpclient.HTTPResponse object.
0 commit comments