Skip to content

Commit f43a42d

Browse files
committed
add doownload file feature
1 parent d6f4816 commit f43a42d

File tree

6 files changed

+196
-146
lines changed

6 files changed

+196
-146
lines changed

jupyterlab_s3_browser/_version.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@ def _fetchVersion():
1717
pass
1818

1919
raise FileNotFoundError( # noqa: F821
20-
"Could not find package.json under dir {}".format(HERE)
21-
)
20+
"Could not find package.json under dir {}".format(HERE))
2221

2322

2423
__version__ = _fetchVersion()

jupyterlab_s3_browser/handlers.py

Lines changed: 81 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,20 @@
44
import base64
55
import json
66
import logging
7-
from pathlib import Path
87

98
import boto3
10-
import s3fs
119
import tornado
1210
from botocore.exceptions import NoCredentialsError
1311
from jupyter_server.base.handlers import APIHandler
1412
from jupyter_server.utils import url_path_join
13+
from pathlib import Path
14+
15+
import s3fs
16+
import boto3
1517

1618

1719
class DirectoryNotEmptyException(Exception):
1820
"""Raise for attempted deletions of non-empty directories"""
19-
2021
pass
2122

2223

@@ -48,7 +49,7 @@ def create_s3_resource(config):
4849
)
4950

5051
else:
51-
return boto3.resource("s3")
52+
return boto3.resource('s3')
5253

5354

5455
def _test_aws_s3_role_access():
@@ -57,10 +58,11 @@ def _test_aws_s3_role_access():
5758
"""
5859
test = boto3.resource("s3")
5960
all_buckets = test.buckets.all()
60-
result = [
61-
{"name": bucket.name + "/", "path": bucket.name + "/", "type": "directory"}
62-
for bucket in all_buckets
63-
]
61+
result = [{
62+
"name": bucket.name + "/",
63+
"path": bucket.name + "/",
64+
"type": "directory"
65+
} for bucket in all_buckets]
6466
return result
6567

6668

@@ -79,8 +81,7 @@ def has_aws_s3_role_access():
7981
access_key_id = line.split("=", 1)[1]
8082
# aws keys reliably start with AKIA for long-term or ASIA for short-term
8183
if not access_key_id.startswith(
82-
"AKIA"
83-
) and not access_key_id.startswith("ASIA"):
84+
"AKIA") and not access_key_id.startswith("ASIA"):
8485
# if any keys are not valid AWS keys, don't try to authenticate
8586
logging.info(
8687
"Found invalid AWS aws_access_key_id in ~/.aws/credentials file, "
@@ -111,12 +112,11 @@ def test_s3_credentials(endpoint_url, client_id, client_secret, session_token):
111112
aws_session_token=session_token,
112113
)
113114
all_buckets = test.buckets.all()
114-
logging.debug(
115-
[
116-
{"name": bucket.name + "/", "path": bucket.name + "/", "type": "directory"}
117-
for bucket in all_buckets
118-
]
119-
)
115+
logging.debug([{
116+
"name": bucket.name + "/",
117+
"path": bucket.name + "/",
118+
"type": "directory"
119+
} for bucket in all_buckets])
120120

121121

122122
class AuthHandler(APIHandler): # pylint: disable=abstract-method
@@ -177,7 +177,8 @@ def post(self, path=""):
177177
client_secret = req["client_secret"]
178178
session_token = req["session_token"]
179179

180-
test_s3_credentials(endpoint_url, client_id, client_secret, session_token)
180+
test_s3_credentials(endpoint_url, client_id, client_secret,
181+
session_token)
181182

182183
self.config.endpoint_url = endpoint_url
183184
self.config.client_id = client_id
@@ -202,7 +203,51 @@ def convertS3FStoJupyterFormat(result):
202203
}
203204

204205

205-
class S3Handler(APIHandler):
206+
class FilesHandler(APIHandler):
207+
"""
208+
Handles requests for getting files (e.g. for downloading)
209+
"""
210+
211+
@property
212+
def config(self):
213+
return self.settings["s3_config"]
214+
215+
@tornado.web.authenticated
216+
def get(self, path=""):
217+
"""
218+
Takes a path and returns lists of files/objects
219+
and directories/prefixes based on the path.
220+
"""
221+
path = path.removeprefix("/")
222+
223+
try:
224+
if not self.s3fs:
225+
self.s3fs = create_s3fs(self.config)
226+
227+
self.s3fs.invalidate_cache()
228+
229+
with self.s3fs.open(path, "rb") as f:
230+
result = f.read()
231+
232+
except S3ResourceNotFoundException as e:
233+
result = json.dumps({
234+
"error":
235+
404,
236+
"message":
237+
"The requested resource could not be found.",
238+
})
239+
except Exception as e:
240+
logging.error("Exception encountered during GET {}: {}".format(
241+
path, e))
242+
result = json.dumps({"error": 500, "message": str(e)})
243+
244+
self.finish(result)
245+
246+
s3fs = None
247+
s3_resource = None
248+
249+
250+
class ContentsHandler(APIHandler):
206251
"""
207252
Handles requests for getting S3 objects
208253
"""
@@ -230,18 +275,18 @@ def get(self, path=""):
230275
self.s3fs.invalidate_cache()
231276

232277
if (path and not path.endswith("/")) and (
233-
"X-Custom-S3-Is-Dir" not in self.request.headers
278+
"X-Custom-S3-Is-Dir" not in self.request.headers
234279
): # TODO: replace with function
235280
with self.s3fs.open(path, "rb") as f:
236281
result = {
237282
"path": path,
238283
"type": "file",
239-
"content": base64.encodebytes(f.read()).decode("ascii"),
284+
"content":
285+
base64.encodebytes(f.read()).decode("ascii"),
240286
}
241287
else:
242288
raw_result = list(
243-
map(convertS3FStoJupyterFormat, self.s3fs.listdir(path))
244-
)
289+
map(convertS3FStoJupyterFormat, self.s3fs.listdir(path)))
245290
result = list(filter(lambda x: x["name"] != "", raw_result))
246291

247292
except S3ResourceNotFoundException as e:
@@ -250,7 +295,8 @@ def get(self, path=""):
250295
"message": "The requested resource could not be found.",
251296
}
252297
except Exception as e:
253-
logging.error("Exception encountered during GET {}: {}".format(path, e))
298+
logging.error("Exception encountered during GET {}: {}".format(
299+
path, e))
254300
result = {"error": 500, "message": str(e)}
255301

256302
self.finish(json.dumps(result))
@@ -283,7 +329,8 @@ def put(self, path=""):
283329
result = {
284330
"path": path,
285331
"type": "file",
286-
"content": base64.encodebytes(f.read()).decode("ascii"),
332+
"content":
333+
base64.encodebytes(f.read()).decode("ascii"),
287334
}
288335
elif "X-Custom-S3-Move-Src" in self.request.headers:
289336
source = self.request.headers["X-Custom-S3-Move-Src"]
@@ -295,7 +342,8 @@ def put(self, path=""):
295342
result = {
296343
"path": path,
297344
"type": "file",
298-
"content": base64.encodebytes(f.read()).decode("ascii"),
345+
"content":
346+
base64.encodebytes(f.read()).decode("ascii"),
299347
}
300348
elif "X-Custom-S3-Is-Dir" in self.request.headers:
301349
path = path.lower()
@@ -351,14 +399,12 @@ def delete(self, path=""):
351399
objects_matching_prefix = self.s3fs.listdir(path + "/")
352400
is_directory = (len(objects_matching_prefix) > 1) or (
353401
(len(objects_matching_prefix) == 1)
354-
and objects_matching_prefix[0]["Key"] != path
355-
)
402+
and objects_matching_prefix[0]['Key'] != path)
356403

357404
if is_directory:
358405
if (len(objects_matching_prefix) > 1) or (
359406
(len(objects_matching_prefix) == 1)
360-
and objects_matching_prefix[0]["Key"] != path + "/"
361-
):
407+
and objects_matching_prefix[0]['Key'] != path + "/"):
362408
raise DirectoryNotEmptyException()
363409
else:
364410
# for some reason s3fs.rm doesn't work reliably
@@ -393,7 +439,11 @@ def setup_handlers(web_app):
393439

394440
base_url = web_app.settings["base_url"]
395441
handlers = [
396-
(url_path_join(base_url, "jupyterlab_s3_browser", "auth(.*)"), AuthHandler),
397-
(url_path_join(base_url, "jupyterlab_s3_browser", "files(.*)"), S3Handler),
442+
(url_path_join(base_url, "jupyterlab_s3_browser",
443+
"auth(.*)"), AuthHandler),
444+
(url_path_join(base_url, "jupyterlab_s3_browser",
445+
"contents(.*)"), ContentsHandler),
446+
(url_path_join(base_url, "jupyterlab_s3_browser",
447+
"files(.*)"), FilesHandler),
398448
]
399449
web_app.add_handlers(host_pattern, handlers)

jupyterlab_s3_browser/tests/test_get_s3.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,11 @@ def test_get_single_bucket():
1212
s3.create_bucket(Bucket=bucket_name)
1313

1414
result = jupyterlab_s3_browser.get_s3_objects_from_path(s3, "/")
15-
assert result == [{"name": bucket_name, "type": "directory", "path": bucket_name}]
15+
assert result == [{
16+
"name": bucket_name,
17+
"type": "directory",
18+
"path": bucket_name
19+
}]
1620

1721

1822
@mock_s3
@@ -24,10 +28,11 @@ def test_get_multiple_buckets():
2428
s3.create_bucket(Bucket=bucket_name)
2529

2630
result = jupyterlab_s3_browser.get_s3_objects_from_path(s3, "/")
27-
expected_result = [
28-
{"name": bucket_name, "type": "directory", "path": bucket_name}
29-
for bucket_name in bucket_names
30-
]
31+
expected_result = [{
32+
"name": bucket_name,
33+
"type": "directory",
34+
"path": bucket_name
35+
} for bucket_name in bucket_names]
3136
assert result == expected_result
3237

3338

@@ -60,6 +65,6 @@ def test_get_files_inside_bucket():
6065
},
6166
]
6267
print(result)
63-
assert sorted(result, key=lambda i: i["name"]) == sorted(
64-
expected_result, key=lambda i: i["name"]
65-
)
68+
assert sorted(result,
69+
key=lambda i: i["name"]) == sorted(expected_result,
70+
key=lambda i: i["name"])

setup.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,8 @@
3737

3838
data_files_spec = [
3939
("share/jupyter/labextensions/%s" % labext_name, str(lab_path), "**"),
40-
("share/jupyter/labextensions/%s" % labext_name, str(HERE), "install.json"),
40+
("share/jupyter/labextensions/%s" % labext_name, str(HERE),
41+
"install.json"),
4142
(
4243
"etc/jupyter/jupyter_server_config.d",
4344
"jupyter-config/jupyter_server_config.d",
@@ -50,10 +51,9 @@
5051
),
5152
]
5253

53-
54-
cmdclass = create_cmdclass(
55-
"jsdeps", package_data_spec=package_data_spec, data_files_spec=data_files_spec
56-
)
54+
cmdclass = create_cmdclass("jsdeps",
55+
package_data_spec=package_data_spec,
56+
data_files_spec=data_files_spec)
5757

5858
js_command = combine_commands(
5959
install_npm(HERE, build_cmd="build:prod", npm=["jlpm"]),
@@ -99,7 +99,9 @@
9999
"singleton-decorator",
100100
"jupyterlab>=2.0.0",
101101
],
102-
extras_require={"dev": ["jupyter_packaging~=0.7.9", "pytest", "moto", "coverage"]},
102+
extras_require={
103+
"dev": ["jupyter_packaging~=0.7.9", "pytest", "moto", "coverage"]
104+
},
103105
)
104106

105107
if __name__ == "__main__":

0 commit comments

Comments
 (0)