Skip to content

Commit 63a4bb3

Browse files
ganisbackyuvipanda
authored andcommitted
fix pre-commit
1 parent fad72ab commit 63a4bb3

File tree

3 files changed

+38
-26
lines changed

3 files changed

+38
-26
lines changed

jupyter_server_proxy/handlers.py

Lines changed: 34 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@
44
Some original inspiration from https://github.com/senko/tornado-proxy
55
"""
66

7-
import os, json, re
7+
import os
8+
import re
89
import socket
910
from asyncio import Lock
1011
from copy import copy
@@ -307,7 +308,7 @@ def _build_proxy_request(self, host, port, proxied_path, body, **extra_opts):
307308
decompress_response=False,
308309
headers=headers,
309310
**self.proxy_request_options(),
310-
**extra_opts,
311+
**extra_opts,
311312
)
312313
return req
313314

@@ -376,12 +377,12 @@ async def proxy(self, host, port, proxied_path):
376377
else:
377378
client = httpclient.AsyncHTTPClient(force_instance=True)
378379
# check if the request is stream request
379-
accept_header = self.request.headers.get('Accept')
380-
if accept_header == 'text/event-stream':
380+
accept_header = self.request.headers.get("Accept")
381+
if accept_header == "text/event-stream":
381382
return await self._proxy_progressive(host, port, proxied_path, body, client)
382383
else:
383384
return await self._proxy_buffered(host, port, proxied_path, body, client)
384-
385+
385386
async def _proxy_progressive(self, host, port, proxied_path, body, client):
386387
# Proxy in progressive flush mode, whenever chunks are received. Potentially slower but get results quicker for voila
387388
# Set up handlers so we can progressively flush result
@@ -390,15 +391,19 @@ async def _proxy_progressive(self, host, port, proxied_path, body, client):
390391

391392
def dump_headers(headers_raw):
392393
for line in headers_raw:
393-
r = re.match('^([a-zA-Z0-9\-_]+)\s*\:\s*([^\r\n]+)[\r\n]*$', line)
394+
r = re.match("^([a-zA-Z0-9\\-_]+)\\s*\\:\\s*([^\r\n]+)[\r\n]*$", line)
394395
if r:
395-
k,v = r.groups([1,2])
396-
if k not in ('Content-Length', 'Transfer-Encoding',
397-
'Content-Encoding', 'Connection'):
396+
k, v = r.groups([1, 2])
397+
if k not in (
398+
"Content-Length",
399+
"Transfer-Encoding",
400+
"Content-Encoding",
401+
"Connection",
402+
):
398403
# some header appear multiple times, eg 'Set-Cookie'
399-
self.set_header(k,v)
404+
self.set_header(k, v)
400405
else:
401-
r = re.match('^HTTP[^\s]* ([0-9]+)', line)
406+
r = re.match(r"^HTTP[^\s]* ([0-9]+)", line)
402407
if r:
403408
status_code = r.group(1)
404409
self.set_status(int(status_code))
@@ -414,20 +419,27 @@ def streaming_callback(chunk):
414419
# record activity at start and end of requests
415420
self._record_activity()
416421
# Do this here, not in header_callback so we can be sure headers are out of the way first
417-
dump_headers(headers_raw) # array will be empty if this was already called before
422+
dump_headers(
423+
headers_raw
424+
) # array will be empty if this was already called before
418425
self.write(chunk)
419426
self.flush()
420427

421428
# Now make the request
422429

423-
req = self._build_proxy_request(host, port, proxied_path, body,
424-
streaming_callback=streaming_callback,
425-
header_callback=header_callback)
426-
430+
req = self._build_proxy_request(
431+
host,
432+
port,
433+
proxied_path,
434+
body,
435+
streaming_callback=streaming_callback,
436+
header_callback=header_callback,
437+
)
438+
427439
# no timeout for stream api
428440
req.request_timeout = 7200
429441
req.connect_timeout = 600
430-
442+
431443
try:
432444
response = await client.fetch(req, raise_error=False)
433445
except httpclient.HTTPError as err:
@@ -444,15 +456,16 @@ def streaming_callback(chunk):
444456
self.set_status(500)
445457
self.write(str(response.error))
446458
else:
447-
self.set_status(response.code, response.reason) # Should already have been set
459+
self.set_status(
460+
response.code, response.reason
461+
) # Should already have been set
448462

449-
dump_headers(headers_raw) # Should already have been emptied
463+
dump_headers(headers_raw) # Should already have been emptied
450464

451-
if response.body: # Likewise, should already be chunked out and flushed
465+
if response.body: # Likewise, should already be chunked out and flushed
452466
self.write(response.body)
453467

454468
async def _proxy_buffered(self, host, port, proxied_path, body, client):
455-
456469
req = self._build_proxy_request(host, port, proxied_path, body)
457470

458471
self.log.debug(f"Proxying request to {req.url}")
@@ -535,7 +548,6 @@ def rewrite_pe(rewritable_response: RewritableResponse):
535548
if rewritten_response.body:
536549
self.write(rewritten_response.body)
537550

538-
539551
async def proxy_open(self, host, port, proxied_path=""):
540552
"""
541553
Called when a client opens a websocket connection.

tests/resources/jupyter_server_config.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,10 @@ def cats_only(response, path):
4242
response.code = 403
4343
response.body = b"dogs not allowed"
4444

45+
4546
def my_env():
46-
return {
47-
"MYVAR": "String with escaped {{var}}"
48-
}
47+
return {"MYVAR": "String with escaped {{var}}"}
48+
4949

5050
c.ServerProxy.servers = {
5151
"python-http": {

tests/test_proxies.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -375,7 +375,7 @@ def streaming_cb(data):
375375
)
376376
assert times_called == limit
377377
assert all([0.45 < t < 3.0 for t in stream_read_intervals])
378-
assert stream_data == [b'data: 0\n\n', b'data: 1\n\n', b'data: 2\n\n']
378+
assert stream_data == [b"data: 0\n\n", b"data: 1\n\n", b"data: 2\n\n"]
379379

380380

381381
async def test_server_proxy_websocket_messages(

0 commit comments

Comments
 (0)