diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000000..0c0ab5ad1ad --- /dev/null +++ b/.flake8 @@ -0,0 +1,4 @@ +[flake8] +ignore = E111, E114 +exclude = .git +max-complexity = 10 diff --git a/.style.yapf b/.style.yapf index 9668b4b10a2..d570325f521 100644 --- a/.style.yapf +++ b/.style.yapf @@ -1,4 +1,5 @@ [style] based_on_style = google -indent_width: 2 -continuation_indent_width: 2 +indent_width = 2 +continuation_indent_width = 4 +spaces_before_comment = 2 diff --git a/bootstrap/Makefile b/bootstrap/Makefile index e5a300cc7a3..84a7b169103 100755 --- a/bootstrap/Makefile +++ b/bootstrap/Makefile @@ -109,7 +109,7 @@ test-known-platforms-generate: test-known-platforms-init # By default we disable loading plugins and link dockerfordesktop into the kfctl binary (static) static: @ex pkg/apis/apps/group.go len(data): - return False + # if our test file is smaller than the reference it surely fails! + if len(ref) > len(data): + return False - # trim our file to the same number of lines as the reference file - data = data[:len(ref)] + # trim our file to the same number of lines as the reference file + data = data[:len(ref)] - p = regexs["year"] - for d in data: - if p.search(d): - return False + p = regexs["year"] + for d in data: + if p.search(d): + return False - # Replace all occurrences of the regex "2016|2015|2014" with "YEAR" - p = regexs["date"] - for i, d in enumerate(data): - (data[i], found) = p.subn('YEAR', d) - if found != 0: - break + # Replace all occurrences of the regex "2016|2015|2014" with "YEAR" + p = regexs["date"] + for i, d in enumerate(data): + (data[i], found) = p.subn('YEAR', d) + if found != 0: + break - # if we don't match the reference at this point, fail - if ref != data: - return False + # if we don't match the reference at this point, fail + if ref != data: + return False + + return True - return True def file_extension(filename): - return os.path.splitext(filename)[1].split(".")[-1].lower() + return os.path.splitext(filename)[1].split(".")[-1].lower() + skipped_dirs = ['Godeps', 'vendor', 'third_party', '_gopath', '_output', '.git'] + + def normalize_files(files): - newfiles = [] - for pathname in files: - if any(x in pathname for x in skipped_dirs): - continue - newfiles.append(pathname) - for i, pathname in enumerate(newfiles): - if not os.path.isabs(pathname): - newfiles[i] = os.path.join(rootdir, pathname) - return newfiles + newfiles = [] + for pathname in files: + if any(x in pathname for x in skipped_dirs): + continue + newfiles.append(pathname) + for i, pathname in enumerate(newfiles): + if not os.path.isabs(pathname): + newfiles[i] = os.path.join(rootdir, pathname) + return newfiles + def get_files(extensions): - files = [] - if len(args.filenames) > 0: - files = args.filenames - else: - for root, dirs, walkfiles in os.walk(rootdir): - # don't visit certain dirs. This is just a performance improvement - # as we would prune these later in normalize_files(). But doing it - # cuts down the amount of filesystem walking we do and cuts down - # the size of the file list - for d in skipped_dirs: - if d in dirs: - dirs.remove(d) - - for name in walkfiles: - pathname = os.path.join(root, name) - files.append(pathname) - - files = normalize_files(files) - outfiles = [] - for pathname in files: - extension = file_extension(pathname) - if extension in extensions: - outfiles.append(pathname) - return outfiles + files = [] + if len(args.filenames) > 0: + files = args.filenames + else: + for root, dirs, walkfiles in os.walk(rootdir): + # don't visit certain dirs. This is just a performance improvement as we + # would prune these later in normalize_files(). But doing it cuts down the + # amount of filesystem walking we do and cuts down the size of the file + # list + for d in skipped_dirs: + if d in dirs: + dirs.remove(d) + + for name in walkfiles: + pathname = os.path.join(root, name) + files.append(pathname) + + files = normalize_files(files) + outfiles = [] + for pathname in files: + extension = file_extension(pathname) + if extension in extensions: + outfiles.append(pathname) + return outfiles + def get_regexs(): - regexs = {} - # Search for "YEAR" which exists in the boilerplate, but shouldn't in the real thing - regexs["year"] = re.compile( 'YEAR' ) - # dates can be 2014, 2015 or 2016, company holder names can be anything - regexs["date"] = re.compile( '(2014|2015|2016|2017|2018|2019|2020)' ) - # strip // +build \n\n build constraints - regexs["go_build_constraints"] = re.compile(r"^(// \+build.*\n)+\n", re.MULTILINE) - # strip #!.* from shell scripts - regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE) - return regexs + regexs = {} + # Search for "YEAR" which exists in the boilerplate, but shouldn't in the + # real thing + regexs["year"] = re.compile('YEAR') + # dates can be 2014, 2015 or 2016, company holder names can be anything + regexs["date"] = re.compile('(2014|2015|2016|2017|2018|2019|2020)') + # strip // +build \n\n build constraints + regexs["go_build_constraints"] = re.compile(r"^(// \+build.*\n)+\n", + re.MULTILINE) + # strip #!.* from shell scripts + regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE) + return regexs + def main(): - regexs = get_regexs() - refs = get_refs() - filenames = get_files(refs.keys()) + regexs = get_regexs() + refs = get_refs() + filenames = get_files(refs.keys()) + + for filename in filenames: + if not file_passes(filename, refs, regexs): + print(filename, file=sys.stdout) - for filename in filenames: - if not file_passes(filename, refs, regexs): - print(filename, file=sys.stdout) if __name__ == "__main__": sys.exit(main()) diff --git a/components/build_image.py b/components/build_image.py index fd4d805dab1..18346f52b1f 100644 --- a/components/build_image.py +++ b/components/build_image.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """Script to build images. For example, @@ -14,7 +15,10 @@ import yaml -def run(command, cwd=None, env=None, polling_interval=datetime.timedelta(seconds=1)): +def run(command, + cwd=None, + env=None, + polling_interval=datetime.timedelta(seconds=1)): """Run a subprocess. Copied from kubeflow/test so it's easier to run locally. TODO(lunkai): refactor to dedup. @@ -35,7 +39,11 @@ def run(command, cwd=None, env=None, polling_interval=datetime.timedelta(seconds logging.info("Running: Environment:\n%s", "\n".join(lines)) process = subprocess.Popen( - command, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + command, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) logging.info("Subprocess output:\n") output = [] @@ -53,12 +61,13 @@ def run(command, cwd=None, env=None, polling_interval=datetime.timedelta(seconds logging.info(line.strip()) if process.returncode != 0: - raise subprocess.CalledProcessError(process.returncode, - "cmd: {0} exited with code {1}".format( - " ".join(command), process.returncode), "\n".join(output)) + raise subprocess.CalledProcessError( + process.returncode, "cmd: {0} exited with code {1}".format( + " ".join(command), process.returncode), "\n".join(output)) return "\n".join(output) + def wait_for_docker_daemon(timeout=60): """Waiting for docker daemon to be ready. This is needed in DinD scenario.""" start_time = time.time() @@ -75,6 +84,7 @@ def wait_for_docker_daemon(timeout=60): # TODO(lunkai): use TimeoutError when we use py3. raise RuntimeError + def get_build_args(config): """ Make the list of params for docker build from config. @@ -86,13 +96,16 @@ def get_build_args(config): config_list = [key + "=" + val for key, val in config.items()] return list(chain.from_iterable([["--build-arg", x] for x in config_list])) + def get_config(context_dir, version): """Returns a dict of configuration from the version-config file.""" - config_file = os.path.join(context_dir, "versions", version, "version-config.json") + config_file = os.path.join(context_dir, "versions", version, + "version-config.json") with open(config_file) as f: config = yaml.load(f) return config + def build_tf_serving(args): wait_for_docker_daemon() dir_path = os.path.dirname(os.path.realpath(__file__)) @@ -101,19 +114,23 @@ def build_tf_serving(args): config = get_config(context_dir, version) build_args = get_build_args(config) - image_name = "{}/tensorflow-serving-{}:{}".format(args.registry, version, args.tag) + image_name = "{}/tensorflow-serving-{}:{}".format(args.registry, version, + args.tag) - command = list(chain( - ["docker", "build", "--pull"], - build_args, - ["-t", image_name, "-f", "Dockerfile.{}".format(args.platform), "."] - )) + command = list( + chain( + ["docker", "build", "--pull"], build_args, + ["-t", image_name, "-f", "Dockerfile.{}".format(args.platform), "."])) run(command, cwd=context_dir) if args.push_gcr: - run(["gcloud", "auth", "activate-service-account", "--key-file", os.environ['GOOGLE_APPLICATION_CREDENTIALS']]) + run([ + "gcloud", "auth", "activate-service-account", "--key-file", + os.environ['GOOGLE_APPLICATION_CREDENTIALS'] + ]) run(["gcloud", "docker", "--", "push", image_name]) + def build_tf_notebook(args): wait_for_docker_daemon() dir_path = os.path.dirname(os.path.realpath(__file__)) @@ -125,47 +142,35 @@ def build_tf_notebook(args): image_name = "{}/tensorflow-{}-notebook-{}:{}".format( args.registry, args.tf_version, args.platform, args.tag) - command = list(chain( - ["docker", "build", "--pull"], - build_args, - ["-t", image_name, "-f", "Dockerfile", "."] - )) + command = list( + chain(["docker", "build", "--pull"], build_args, + ["-t", image_name, "-f", "Dockerfile", "."])) run(command, cwd=context_dir) if args.push_gcr: - run(["gcloud", "auth", "activate-service-account", "--key-file", os.environ['GOOGLE_APPLICATION_CREDENTIALS']]) + run([ + "gcloud", "auth", "activate-service-account", "--key-file", + os.environ['GOOGLE_APPLICATION_CREDENTIALS'] + ]) run(["gcloud", "docker", "--", "push", image_name]) + def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() parser.add_argument( - "--registry", - default="gcr.io/kubeflow-images-public", - help="The registry of the image" - ) - parser.add_argument( - "--tag", - default="latest", - help="The image tag" - ) - parser.add_argument( - "--tf_version", - default="1.6", - help="Tensorflow version" - ) - parser.add_argument( - "--platform", - default="cpu", - help="cpu or gpu" - ) + "--registry", + default="gcr.io/kubeflow-images-public", + help="The registry of the image") + parser.add_argument("--tag", default="latest", help="The image tag") + parser.add_argument("--tf_version", default="1.6", help="Tensorflow version") + parser.add_argument("--platform", default="cpu", help="cpu or gpu") parser.add_argument( - "--push_gcr", - action='store_true', - default=False, - help="Whether to push the image after building." - ) + "--push_gcr", + action='store_true', + default=False, + help="Whether to push the image after building.") parser_tf_serving = subparsers.add_parser("tf_serving") parser_tf_serving.set_defaults(func=build_tf_serving) @@ -176,11 +181,13 @@ def main(): args = parser.parse_args() args.func(args) + if __name__ == "__main__": logging.basicConfig( - level=logging.INFO, - format=('%(levelname)s|%(asctime)s' - '|%(pathname)s|%(lineno)d| %(message)s'), - datefmt='%Y-%m-%dT%H:%M:%S',) + level=logging.INFO, + format=('%(levelname)s|%(asctime)s' + '|%(pathname)s|%(lineno)d| %(message)s'), + datefmt='%Y-%m-%dT%H:%M:%S', + ) logging.getLogger().setLevel(logging.INFO) main() diff --git a/components/centraldashboard/.gitignore b/components/centraldashboard/.gitignore index 0cfc89f24ad..c346b13427c 100644 --- a/components/centraldashboard/.gitignore +++ b/components/centraldashboard/.gitignore @@ -1,2 +1,2 @@ bower_components/ -node_modules/ \ No newline at end of file +node_modules/ diff --git a/components/echo-server/main.py b/components/echo-server/main.py index b17a0d55ebf..42a17804d6c 100644 --- a/components/echo-server/main.py +++ b/components/echo-server/main.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -11,8 +13,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -"""Google Cloud Endpoints sample application. +""" +Google Cloud Endpoints sample application. Demonstrates how to create a simple echo API as well as how to deal with various authentication methods. @@ -26,73 +28,75 @@ from flask_cors import cross_origin from six.moves import http_client - app = Flask(__name__) def _base64_decode(encoded_str): - # Add paddings manually if necessary. - num_missed_paddings = 4 - len(encoded_str) % 4 - if num_missed_paddings != 4: - encoded_str += b'=' * num_missed_paddings - return base64.b64decode(encoded_str).decode('utf-8') + # Add paddings manually if necessary. + num_missed_paddings = 4 - len(encoded_str) % 4 + if num_missed_paddings != 4: + encoded_str += b'=' * num_missed_paddings + return base64.b64decode(encoded_str).decode('utf-8') @app.route('/echo', methods=['POST']) def echo(): - """Simple echo service.""" - message = request.get_json().get('message', '') - return jsonify({'message': message}) + """Simple echo service.""" + message = request.get_json().get('message', '') + return jsonify({'message': message}) + @app.route('/') @app.route('/headers') def headers(): - return jsonify({'headers': request.headers.to_list()}) + return jsonify({'headers': request.headers.to_list()}) + def auth_info(): - """Retrieves the authenication information from Google Cloud Endpoints.""" - encoded_info = request.headers.get('X-Endpoint-API-UserInfo', None) + """Retrieves the authenication information from Google Cloud Endpoints.""" + encoded_info = request.headers.get('X-Endpoint-API-UserInfo', None) - if encoded_info: - info_json = _base64_decode(encoded_info) - user_info = json.loads(info_json) - else: - user_info = {'id': 'anonymous'} + if encoded_info: + info_json = _base64_decode(encoded_info) + user_info = json.loads(info_json) + else: + user_info = {'id': 'anonymous'} - return jsonify(user_info) + return jsonify(user_info) @app.route('/auth/info/googlejwt', methods=['GET']) def auth_info_google_jwt(): - """Auth info with Google signed JWT.""" - return auth_info() + """Auth info with Google signed JWT.""" + return auth_info() @app.route('/auth/info/googleidtoken', methods=['GET']) def auth_info_google_id_token(): - """Auth info with Google ID token.""" - return auth_info() + """Auth info with Google ID token.""" + return auth_info() @app.route('/auth/info/firebase', methods=['GET']) @cross_origin(send_wildcard=True) def auth_info_firebase(): - """Auth info with Firebase auth.""" - return auth_info() + """Auth info with Firebase auth.""" + return auth_info() @app.errorhandler(http_client.INTERNAL_SERVER_ERROR) def unexpected_error(e): - """Handle exceptions by returning swagger-compliant json.""" - logging.exception('An error occurred while processing the request.') - response = jsonify({ - 'code': http_client.INTERNAL_SERVER_ERROR, - 'message': 'Exception: {}'.format(e)}) - response.status_code = http_client.INTERNAL_SERVER_ERROR - return response + """Handle exceptions by returning swagger-compliant json.""" + logging.exception('An error occurred while processing the request.') + response = jsonify({ + 'code': http_client.INTERNAL_SERVER_ERROR, + 'message': 'Exception: {}'.format(e) + }) + response.status_code = http_client.INTERNAL_SERVER_ERROR + return response if __name__ == '__main__': - # This is used when running locally. Gunicorn is used to run the - # application on Google App Engine. See entrypoint in app.yaml. - app.run(host='127.0.0.1', port=8080, debug=True) + # This is used when running locally. Gunicorn is used to run the application + # on Google App Engine. See entrypoint in app.yaml. + app.run(host='127.0.0.1', port=8080, debug=True) diff --git a/components/echo-server/main_test.py b/components/echo-server/main_test.py index 6ca6b5093d3..d700fa4fd9b 100644 --- a/components/echo-server/main_test.py +++ b/components/echo-server/main_test.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +17,6 @@ import base64 import json import os - import pytest import main @@ -23,60 +24,50 @@ @pytest.fixture def client(monkeypatch): - monkeypatch.chdir(os.path.dirname(main.__file__)) - main.app.testing = True - client = main.app.test_client() - return client + monkeypatch.chdir(os.path.dirname(main.__file__)) + main.app.testing = True + client = main.app.test_client() + return client def test_echo(client): - r = client.post( - '/echo', - data='{"message": "Hello"}', - headers={ - 'Content-Type': 'application/json' - }) + r = client.post( + '/echo', + data='{"message": "Hello"}', + headers={'Content-Type': 'application/json'}) + + assert r.status_code == 200 + data = json.loads(r.data.decode('utf-8')) + assert data['message'] == 'Hello' + + +def test_auth_info(client): + endpoints = [ + '/auth/info/googlejwt', '/auth/info/googleidtoken', '/auth/info/firebase' + ] + + encoded_info = base64.b64encode(json.dumps({'id': '123'}).encode('utf-8')) + + for endpoint in endpoints: + r = client.get(endpoint, headers={'Content-Type': 'application/json'}) assert r.status_code == 200 data = json.loads(r.data.decode('utf-8')) - assert data['message'] == 'Hello' + assert data['id'] == 'anonymous' + r = client.get( + endpoint, + headers={ + 'Content-Type': 'application/json', + 'X-Endpoint-API-UserInfo': encoded_info + }) -def test_auth_info(client): - endpoints = [ - '/auth/info/googlejwt', - '/auth/info/googleidtoken', - '/auth/info/firebase'] - - encoded_info = base64.b64encode(json.dumps({ - 'id': '123' - }).encode('utf-8')) - - for endpoint in endpoints: - r = client.get( - endpoint, - headers={ - 'Content-Type': 'application/json' - }) - - assert r.status_code == 200 - data = json.loads(r.data.decode('utf-8')) - assert data['id'] == 'anonymous' - - r = client.get( - endpoint, - headers={ - 'Content-Type': 'application/json', - 'X-Endpoint-API-UserInfo': encoded_info - }) - - assert r.status_code == 200 - data = json.loads(r.data.decode('utf-8')) - assert data['id'] == '123' + assert r.status_code == 200 + data = json.loads(r.data.decode('utf-8')) + assert data['id'] == '123' def test_cors(client): - r = client.options( - '/auth/info/firebase', headers={'Origin': 'example.com'}) - assert r.status_code == 200 - assert r.headers['Access-Control-Allow-Origin'] == '*' + r = client.options('/auth/info/firebase', headers={'Origin': 'example.com'}) + assert r.status_code == 200 + assert r.headers['Access-Control-Allow-Origin'] == '*' diff --git a/components/https-redirect/main.py b/components/https-redirect/main.py index 05bfd4d677b..d2b5ffacf9c 100644 --- a/components/https-redirect/main.py +++ b/components/https-redirect/main.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -11,9 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -"""A simple flask app to redirect all requests to https. -""" +"""A simple flask app to redirect all requests to https.""" import logging @@ -21,10 +21,12 @@ app = Flask(__name__) + @app.route('/healthz') def health_check(): return jsonify({'isHealthy': True}) + @app.route('/') @app.route('/') def all_handler(path=None): @@ -36,8 +38,8 @@ def all_handler(path=None): response = redirect(new_url) - # For "/" we return a 200 (ok) and not a 302 (redirect) because on GKE - # we want to be able to use this to redirect http://mydomain.com/ to + # For "/" we return a 200 (ok) and not a 302 (redirect) because on GKE we want + # to be able to use this to redirect http://mydomain.com/ to # https://mydomain.com/. However, the Ingress sets up the GCP loadbalancer # health check requires that a 200 be served on "/". So if we return a 302 # the backend will be considered unhealthy. @@ -45,11 +47,13 @@ def all_handler(path=None): response.status_code = 200 return response + if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format=('%(levelname)s|%(asctime)s' '|%(pathname)s|%(lineno)d| %(message)s'), - datefmt='%Y-%m-%dT%H:%M:%S',) + datefmt='%Y-%m-%dT%H:%M:%S', + ) logging.getLogger().setLevel(logging.INFO) app.run(host='127.0.0.1', port=8080, debug=False) diff --git a/components/https-redirect/main_test.py b/components/https-redirect/main_test.py index f8c2e24e090..d9b543dc55f 100644 --- a/components/https-redirect/main_test.py +++ b/components/https-redirect/main_test.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,51 +15,47 @@ # limitations under the License. import unittest + import main + class TestRedirect(unittest.TestCase): + def test_non_empty_path(self): main.app.testing = True client = main.app.test_client() endpoint = '/hello/world' - r = client.get( - endpoint, - headers={ - 'Content-Type': 'application/json' - }) + r = client.get(endpoint, headers={'Content-Type': 'application/json'}) self.assertEqual(302, r.status_code) - self.assertEqual(r.data, - '\nRedirecting...\n

Redirecting...

\n

You should be redirected automatically to target URL: https://localhost/hello/world. If not click the link.') + self.assertEqual( + r.data, + '\nRedirecting...\n

Redirecting...

\n

You should be redirected automatically to target URL: https://localhost/hello/world. If not click the link.' # noqa: E501 + ) def test_empty_path(self): main.app.testing = True client = main.app.test_client() endpoint = '/' - r = client.get( - endpoint, - headers={ - 'Content-Type': 'application/json' - }) + r = client.get(endpoint, headers={'Content-Type': 'application/json'}) self.assertEqual(200, r.status_code) - self.assertEqual(r.data, - '\nRedirecting...\n

Redirecting...

\n

You should be redirected automatically to target URL: https://localhost/. If not click the link.') + self.assertEqual( + r.data, + '\nRedirecting...\n

Redirecting...

\n

You should be redirected automatically to target URL: https://localhost/. If not click the link.' # noqa: E501 + ) def test_health_check(self): main.app.testing = True client = main.app.test_client() endpoint = '/healthz' - r = client.get( - endpoint, - headers={ - 'Content-Type': 'application/json' - }) + r = client.get(endpoint, headers={'Content-Type': 'application/json'}) self.assertEqual(200, r.status_code) + if __name__ == "__main__": unittest.main() diff --git a/components/jupyter-web-app/Makefile b/components/jupyter-web-app/Makefile index bc81209f72b..77164648c9d 100644 --- a/components/jupyter-web-app/Makefile +++ b/components/jupyter-web-app/Makefile @@ -35,4 +35,4 @@ push: build push-latest: push gcloud container images add-tag --quiet $(IMG):$(TAG) $(IMG):latest --verbosity=info - echo created $(IMG):latest \ No newline at end of file + echo created $(IMG):latest diff --git a/components/jupyter-web-app/README.md b/components/jupyter-web-app/README.md index fccef770787..3d411dc2d4f 100644 --- a/components/jupyter-web-app/README.md +++ b/components/jupyter-web-app/README.md @@ -3,19 +3,19 @@ - allow Users to create, connect to and delete Notebooks by specifying custom resources ## Design -The new Jupyter UI uses [Python Flask](http://flask.pocoo.org/) for the backend and HTML/jQuery/Material Design Lite for the frontend. A privileged `ServiceAccount` along with proper `RBAC` resources are associated with the Pod hosting the Flask server. In this manner, the `jupyter-web-app` Pod is allowed to manage Notebook CRs and PVCs in the `kubeflow` namespace. +The new Jupyter UI uses [Python Flask](http://flask.pocoo.org/) for the backend and HTML/jQuery/Material Design Lite for the frontend. A privileged `ServiceAccount` along with proper `RBAC` resources are associated with the Pod hosting the Flask server. In this manner, the `jupyter-web-app` Pod is allowed to manage Notebook CRs and PVCs in the `kubeflow` namespace. Please note that as soon as the Profile Controller supports automatic creation of read/write ServiceAccounts for each `Profile`, the new Jupyter UI will be updated to use the respective JWTs and perform all K8s API requests via [K8s Impersonation](https://kubernetes.io/docs/reference/access-authn-authz/authentication/#user-impersonation). This will ultimately provide isolation of resources between Users and avoid any possible conflicts. For more information about User authentication and e2e workflow see the [Jupyter design doc](http://bit.ly/kf_jupyter_design_doc) ## User Interaction -As soon as the User points his/her browser to http:///jupyter/ he/she will be directed to the new Jupyter UI. By default the UI will try to list the Notebooks in the namespace of its `ServiceAccount`, currently `kubeflow`, as mentioned above. If something goes wrong the UI will notify the user with an appropriate message containing info about the API call error. +As soon as the User points his/her browser to http:///jupyter/ he/she will be directed to the new Jupyter UI. By default the UI will try to list the Notebooks in the namespace of its `ServiceAccount`, currently `kubeflow`, as mentioned above. If something goes wrong the UI will notify the user with an appropriate message containing info about the API call error. -From the Noteooks table he/she can either click the `+` button to create a new Notebook or perform `Delete`/`Connect` actions to an existing one. The UI only performs requests regarding the Notebook CRs to the K8s API server. The management of all child resources(`Service`, `Deployment`) is performed by the Notebook CR Controller. +From the Noteooks table he/she can either click the `+` button to create a new Notebook or perform `Delete`/`Connect` actions to an existing one. The UI only performs requests regarding the Notebook CRs to the K8s API server. The management of all child resources(`Service`, `Deployment`) is performed by the Notebook CR Controller. By pressing the `+` button to create a Notebook the user is redirected to a form that allows him to configure the `PodTemplateSpec` params of the new Notebook. The User can specify the following options regarding his/her Jupyter Notebook: `name`, `namespace`, `cpu`, `memory`, `workspace volume`, `data volumes`, `extra resources`. Notably, he/she can create new Volumes from scratch (type set to `New`) or mount existing ones (type set to `Existing`). By clicking the `SPAWN` button, a new Notebook with the aforementioned options will be created in the `kubeflow` namespace. If some option is not specified, a default value will be used. **NOTE:** Please wait for the Notebook Pod to be successfully created and reach Ready state before trying to connect to it. Otherwise, Ambassador won't be able to route traffic to the correct endpoint and will fail -with "upstream connect error or disconnect/reset before headers". \ No newline at end of file +with "upstream connect error or disconnect/reset before headers". diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/routes.py b/components/jupyter-web-app/default/kubeflow/jupyter/routes.py index dd0fc4d7509..355c7123121 100644 --- a/components/jupyter-web-app/default/kubeflow/jupyter/routes.py +++ b/components/jupyter-web-app/default/kubeflow/jupyter/routes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import json from flask import jsonify, render_template, request from kubernetes.client.rest import ApiException @@ -19,160 +20,162 @@ # Helper function for getting the prefix of the webapp def prefix(): - if request.headers.get("x-forwarded-prefix"): - return request.headers.get("x-forwarded-prefix") - else: - return "" + if request.headers.get("x-forwarded-prefix"): + return request.headers.get("x-forwarded-prefix") + else: + return "" @app.route("/post-notebook", methods=['POST']) def post_notebook_route(): - data = {"success": True, "log": ""} - body = request.form - - # Template - notebook = create_notebook_template() - notebook_cont = notebook["spec"]['template']['spec']['containers'][0] - - # Set Name and Namespace - set_notebook_names(notebook, body) - - # Set Image - set_notebook_image(notebook, body) - - # CPU/RAM - set_notebook_cpu_ram(notebook, body) - - # Workspacae Volume - if body["ws_type"] == "New": - pvc = create_pvc_template() - pvc['metadata']['name'] = body['ws_name'] - pvc['metadata']['namespace'] = body['ns'] - pvc['spec']['accessModes'].append(body['ws_access_modes']) - pvc['spec']['resources']['requests']['storage'] = \ - body['ws_size'] + 'Gi' - - try: - create_pvc(pvc) - except ApiException as e: - data["success"] = False - data["log"] = parse_error(e) - return jsonify(data) - - # Create the Workspace Volume in the Pod - if body["ws_type"] != "None": - add_notebook_volume(notebook, - "volume-" + body["nm"], - body["ws_name"], - "/home/jovyan",) - - # Add the Data Volumes - counter = 1 - while ("vol_name" + str(counter)) in body: - i = str(counter) - vol_nm = 'data-volume-' + i - pvc_nm = body['vol_name' + i] - mnt = body['vol_mount_path' + i] - - # Create a PVC if its a new Data Volume - if body["vol_type" + i] == "New": - size = body['vol_size' + i] + 'Gi' - mode = body['vol_access_modes' + i] - pvc = create_pvc_template() - - pvc['metadata']['name'] = pvc_nm - pvc['metadata']['namespace'] = body['ns'] - pvc['spec']['accessModes'].append(mode) - pvc['spec']['resources']['requests']['storage'] = size - - try: - create_pvc(pvc) - except ApiException as e: - data["success"] = False - data["log"] = parse_error(e) - return jsonify(data) - - add_notebook_volume(notebook, vol_nm, pvc_nm, mnt) - counter += 1 - - # Add Extra Resources - try: - extra = json.loads(body["extraResources"]) - except Exception as e: - data["success"] = False - data["log"] = parse_error(e) - return jsonify(data) + data = {"success": True, "log": ""} + body = request.form + + # Template + notebook = create_notebook_template() + notebook_cont = notebook["spec"]['template']['spec']['containers'][0] + + # Set Name and Namespace + set_notebook_names(notebook, body) + + # Set Image + set_notebook_image(notebook, body) - notebook_cont['resources']['limits'] = extra + # CPU/RAM + set_notebook_cpu_ram(notebook, body) + + # Workspacae Volume + if body["ws_type"] == "New": + pvc = create_pvc_template() + pvc['metadata']['name'] = body['ws_name'] + pvc['metadata']['namespace'] = body['ns'] + pvc['spec']['accessModes'].append(body['ws_access_modes']) + pvc['spec']['resources']['requests']['storage'] = \ + body['ws_size'] + 'Gi' - # If all the parameters are given, then we try to create the notebook - # return try: - create_notebook(notebook) + create_pvc(pvc) except ApiException as e: + data["success"] = False + data["log"] = parse_error(e) + return jsonify(data) + + # Create the Workspace Volume in the Pod + if body["ws_type"] != "None": + add_notebook_volume( + notebook, + "volume-" + body["nm"], + body["ws_name"], + "/home/jovyan", + ) + + # Add the Data Volumes + counter = 1 + while ("vol_name" + str(counter)) in body: + i = str(counter) + vol_nm = 'data-volume-' + i + pvc_nm = body['vol_name' + i] + mnt = body['vol_mount_path' + i] + + # Create a PVC if its a new Data Volume + if body["vol_type" + i] == "New": + size = body['vol_size' + i] + 'Gi' + mode = body['vol_access_modes' + i] + pvc = create_pvc_template() + + pvc['metadata']['name'] = pvc_nm + pvc['metadata']['namespace'] = body['ns'] + pvc['spec']['accessModes'].append(mode) + pvc['spec']['resources']['requests']['storage'] = size + + try: + create_pvc(pvc) + except ApiException as e: data["success"] = False data["log"] = parse_error(e) return jsonify(data) + add_notebook_volume(notebook, vol_nm, pvc_nm, mnt) + counter += 1 + + # Add Extra Resources + try: + extra = json.loads(body["extraResources"]) + except Exception as e: + data["success"] = False + data["log"] = parse_error(e) + return jsonify(data) + + notebook_cont['resources']['limits'] = extra + + # If all the parameters are given, then we try to create the notebook + # return + try: + create_notebook(notebook) + except ApiException as e: + data["success"] = False + data["log"] = parse_error(e) return jsonify(data) + return jsonify(data) + @app.route("/add-notebook", methods=['GET']) def add_notebook_route(): - # A default value for the namespace to add the notebook - if request.args.get("namespace"): - ns = request.args.get("namespace") - else: - ns = "kubeflow" + # A default value for the namespace to add the notebook + if request.args.get("namespace"): + ns = request.args.get("namespace") + else: + ns = "kubeflow" - form_defaults = spawner_ui_config("notebook") - return render_template('add_notebook.html', prefix=prefix(), ns=ns, - form_defaults=form_defaults) + form_defaults = spawner_ui_config("notebook") + return render_template( + 'add_notebook.html', prefix=prefix(), ns=ns, form_defaults=form_defaults) @app.route("/delete-notebook", methods=['GET', 'POST']) def del_notebook_route(): - nb = request.args.get("notebook") - ns = request.args.get("namespace") + nb = request.args.get("notebook") + ns = request.args.get("namespace") - # try to delete the notebook - data = {"success": True, "log": ""} - try: - delete_notebook(nb, ns) - except ApiException as e: - data["success"] = False - data["log"] = parse_error(e) + # try to delete the notebook + data = {"success": True, "log": ""} + try: + delete_notebook(nb, ns) + except ApiException as e: + data["success"] = False + data["log"] = parse_error(e) - return jsonify(data) + return jsonify(data) @app.route("/list-notebooks") def list_notebooks_route(): - ns = request.args.get("namespace") + ns = request.args.get("namespace") - # Get the list of Notebooks in the given Namespace - data = {"notebooks": [], "success": True} - try: - data['notebooks'] = get_notebooks(ns) - except ApiException as e: - data['notebooks'] = [] - data['success'] = False - data["log"] = parse_error(e) + # Get the list of Notebooks in the given Namespace + data = {"notebooks": [], "success": True} + try: + data['notebooks'] = get_notebooks(ns) + except ApiException as e: + data['notebooks'] = [] + data['success'] = False + data["log"] = parse_error(e) - return jsonify(data) + return jsonify(data) @app.route("/") @app.route("/home") @app.route("/notebooks") def notebooks_route(): - base_ns = "kubeflow" + base_ns = "kubeflow" - # Get the namespaces the token can see - try: - nmsps = get_namespaces() - except ApiException: - nmsps = [base_ns] + # Get the namespaces the token can see + try: + nmsps = get_namespaces() + except ApiException: + nmsps = [base_ns] - return render_template('notebooks.html', prefix=prefix(), - title='Notebooks', namespaces=nmsps) + return render_template( + 'notebooks.html', prefix=prefix(), title='Notebooks', namespaces=nmsps) diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/server.py b/components/jupyter-web-app/default/kubeflow/jupyter/server.py index 8e293cad4c8..23809ff0b59 100755 --- a/components/jupyter-web-app/default/kubeflow/jupyter/server.py +++ b/components/jupyter-web-app/default/kubeflow/jupyter/server.py @@ -1,14 +1,14 @@ +# -*- coding: utf-8 -*- import json from kubernetes import client, config from kubernetes.config import ConfigException - try: - # Load configuration inside the Pod - config.load_incluster_config() + # Load configuration inside the Pod + config.load_incluster_config() except ConfigException: - # Load configuration for testing - config.load_kube_config() + # Load configuration for testing + config.load_kube_config() # Create the Apis v1_core = client.CoreV1Api() @@ -16,49 +16,49 @@ def parse_error(e): - try: - err = json.loads(e.body)['message'] - except json.JSONDecodeError: - err = str(e) - except KeyError: - err = str(e) + try: + err = json.loads(e.body)['message'] + except json.JSONDecodeError: + err = str(e) + except KeyError: + err = str(e) - return err + return err def get_secret(nm, ns): - return v1_core.read_namespaced_secret(nm, ns) + return v1_core.read_namespaced_secret(nm, ns) def get_namespaces(): - nmsps = v1_core.list_namespace() - return [ns.metadata.name for ns in nmsps.items] + nmsps = v1_core.list_namespace() + return [ns.metadata.name for ns in nmsps.items] def get_notebooks(ns): - custom_api = client.CustomObjectsApi() + custom_api = client.CustomObjectsApi() - notebooks = \ - custom_api.list_namespaced_custom_object("kubeflow.org", "v1alpha1", - ns, "notebooks") - return [nb['metadata']['name'] for nb in notebooks['items']] + notebooks = \ + custom_api.list_namespaced_custom_object("kubeflow.org", "v1alpha1", + ns, "notebooks") + return [nb['metadata']['name'] for nb in notebooks['items']] def delete_notebook(nb, ns): - body = client.V1DeleteOptions() + body = client.V1DeleteOptions() - return \ - custom_api.delete_namespaced_custom_object("kubeflow.org", "v1alpha1", - ns, "notebooks", nb, body) + return \ + custom_api.delete_namespaced_custom_object("kubeflow.org", "v1alpha1", + ns, "notebooks", nb, body) def create_notebook(body): - ns = body['metadata']['namespace'] - return \ - custom_api.create_namespaced_custom_object("kubeflow.org", "v1alpha1", - ns, "notebooks", body) + ns = body['metadata']['namespace'] + return \ + custom_api.create_namespaced_custom_object("kubeflow.org", "v1alpha1", + ns, "notebooks", body) def create_pvc(body): - ns = body['metadata']['namespace'] - return v1_core.create_namespaced_persistent_volume_claim(ns, body) + ns = body['metadata']['namespace'] + return v1_core.create_namespaced_persistent_volume_claim(ns, body) diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/static/css/main.css b/components/jupyter-web-app/default/kubeflow/jupyter/static/css/main.css index 7620246892c..c19848f6123 100644 --- a/components/jupyter-web-app/default/kubeflow/jupyter/static/css/main.css +++ b/components/jupyter-web-app/default/kubeflow/jupyter/static/css/main.css @@ -29,7 +29,7 @@ top: 10px; } -.mdl-textfield__label{ +.mdl-textfield__label{ margin-bottom:2px; } @@ -44,7 +44,7 @@ .alert-danger { color: #a94442; background-color: #f2dede; - border-color: #ebccd1; + border-color: #ebccd1; } .alert-success { @@ -113,4 +113,4 @@ .mdl-card { overflow: visible; z-index: 100; -} \ No newline at end of file +} diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/static/css/notebooks.css b/components/jupyter-web-app/default/kubeflow/jupyter/static/css/notebooks.css index 2fc5954ef87..b1a7474d2e0 100644 --- a/components/jupyter-web-app/default/kubeflow/jupyter/static/css/notebooks.css +++ b/components/jupyter-web-app/default/kubeflow/jupyter/static/css/notebooks.css @@ -16,4 +16,4 @@ body { padding: 20px; background: #fafafa; position: relative; -} \ No newline at end of file +} diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/static/js/add_notebook.js b/components/jupyter-web-app/default/kubeflow/jupyter/static/js/add_notebook.js index f98ed6183e6..61c6acd8610 100644 --- a/components/jupyter-web-app/default/kubeflow/jupyter/static/js/add_notebook.js +++ b/components/jupyter-web-app/default/kubeflow/jupyter/static/js/add_notebook.js @@ -1,11 +1,11 @@ $(window).on('load', function () { - // Fix the required inputs: + // Fix the required inputs: // https://github.com/google/material-design-lite/issues/1502#issuecomment-211702642 $('[data-required=true]').each(function() { $(this).attr('required', true); }); - + // Configure Image input elements setImageType() @@ -85,7 +85,7 @@ function setWorkspaceEventListeners() { $("#error-msg-vol").fadeOut("fast", function() { $(this).hide() }) - } + } else if (this.value == 'New') { unsetAttributes(workspaceName, 'readonly data-toggle data-placement title'); unsetAttributes(workspaceSize, 'readonly data-toggle data-placement title'); @@ -95,12 +95,12 @@ function setWorkspaceEventListeners() { document.querySelector("#size-input-div").MaterialTextfield.enable() document.querySelector("#access-sel-div").MaterialTextfield.enable() document.querySelector("#mountpath-input-div").MaterialTextfield.enable() - + // Hide the warning from None option $("#error-msg-vol").fadeOut("fast", function() { $(this).hide() }) - } + } else if (this.value == "None") { var attrs = { 'readonly': true, @@ -222,7 +222,7 @@ function setDefaultFormValues() { $('#ws_type').attr({ 'readonly': defaultWorkspace.type.readOnly || defaultWorkspaceReadOnly, 'immutable': defaultWorkspace.type.readOnly || defaultWorkspaceReadOnly, - }); + }); if ($('#ws_type').attr('readonly')) { $('#ws_type').on('mousedown', function(e) { @@ -684,7 +684,7 @@ function removeVolume(id) { 'onclick': 'removeVolume(' + i + ')' }); - // The next two are for disabling the Size and Mount Textfields + // The next two are for disabling the Size and Mount Textfields volumeElement.find('#vol_size_textfield' + (i+1)).attr({ id: "vol_size_textfield" + i }) @@ -734,10 +734,10 @@ function postNotebook(form, data) { × Error: ` - + const $e = $("#error-msgs").html(innerHTML) $('.danger-log', $e).text(res.log) - + window.scrollTo(0, 0); } }) diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/static/js/notebooks.js b/components/jupyter-web-app/default/kubeflow/jupyter/static/js/notebooks.js index 512fb39f36a..ba1146b54eb 100644 --- a/components/jupyter-web-app/default/kubeflow/jupyter/static/js/notebooks.js +++ b/components/jupyter-web-app/default/kubeflow/jupyter/static/js/notebooks.js @@ -148,4 +148,4 @@ function updateNotebooksInNamespace(ns) { function searchOut() { $("#ns-select").text("") -} \ No newline at end of file +} diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/templates/add_notebook.html b/components/jupyter-web-app/default/kubeflow/jupyter/templates/add_notebook.html index 00973048786..bbad267ac13 100644 --- a/components/jupyter-web-app/default/kubeflow/jupyter/templates/add_notebook.html +++ b/components/jupyter-web-app/default/kubeflow/jupyter/templates/add_notebook.html @@ -37,15 +37,15 @@

Name
- - +
- @@ -56,20 +56,20 @@
Name
Image

A starter Docker image for JupyterHub with a baseline deployment and typical ML packages. -

+

- - @@ -80,7 +80,7 @@
Image
- @@ -93,7 +93,7 @@
Image
id="customImage" name="customImage" placeholder="repo/image:tag" oninvalid="this.setCustomValidity('Select the Custom Image for the Notebook')"> -
+
@@ -107,12 +107,12 @@
CPU
- -
+
Memory
@@ -123,8 +123,8 @@
Memory
- @@ -145,10 +145,10 @@
Workspace Volume
-
- @@ -171,9 +171,9 @@
Workspace Volume
- @@ -182,9 +182,9 @@
Workspace Volume
-
-
-
-{% endblock content %} \ No newline at end of file +{% endblock content %} diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/templates/notebooks.html b/components/jupyter-web-app/default/kubeflow/jupyter/templates/notebooks.html index 4dd880bc1c9..fbff3bbe881 100644 --- a/components/jupyter-web-app/default/kubeflow/jupyter/templates/notebooks.html +++ b/components/jupyter-web-app/default/kubeflow/jupyter/templates/notebooks.html @@ -10,7 +10,7 @@
- +
@@ -43,10 +43,10 @@
- +

Notebooks

- diff --git a/components/jupyter-web-app/default/kubeflow/jupyter/utils.py b/components/jupyter-web-app/default/kubeflow/jupyter/utils.py index da8d32231f9..70df113e0e9 100755 --- a/components/jupyter-web-app/default/kubeflow/jupyter/utils.py +++ b/components/jupyter-web-app/default/kubeflow/jupyter/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import yaml CONFIG = "/etc/config/spawner_ui_config.yaml" @@ -5,125 +6,115 @@ # Functions for handling the JWT token def load_file(filepath): - with open(filepath, 'r') as f: - file_data = f.read().replace('\n', '') + with open(filepath, 'r') as f: + file_data = f.read().replace('\n', '') - return file_data + return file_data # Functions for handling the config from which we load the default values # for the form when adding a Notebook def spawner_ui_config(username): - c = None - try: - with open(CONFIG, 'r') as f: - c = f.read().format(username=username) - except IOError: - print('Error opening Spawner UI config file') - - try: - if yaml.safe_load(c) is None: - # YAML exists but is empty - return {} - else: - # YAML exists and is not empty - return yaml.safe_load(c)["spawnerFormDefaults"] - except yaml.YAMLError: - return None + c = None + try: + with open(CONFIG, 'r') as f: + c = f.read().format(username=username) + except IOError: + print('Error opening Spawner UI config file') + + try: + if yaml.safe_load(c) is None: + # YAML exists but is empty + return {} + else: + # YAML exists and is not empty + return yaml.safe_load(c)["spawnerFormDefaults"] + except yaml.YAMLError: + return None # Helper functions for the /post-notebook route. def create_notebook_template(): - notebook = { - "apiVersion": "kubeflow.org/v1alpha1", - "kind": "Notebook", - "metadata": { - "name": "", - "namespace": "", - "labels": { - "app": "" - } - }, - "spec": { - "template": { - "spec": { - "containers": [ - { - "name": "", - "volumeMounts": [], - } - ], - "ttlSecondsAfterFinished": 300, - "volumes": [], - } - } - } - } - return notebook + notebook = { + "apiVersion": "kubeflow.org/v1alpha1", + "kind": "Notebook", + "metadata": { + "name": "", + "namespace": "", + "labels": { + "app": "" + } + }, + "spec": { + "template": { + "spec": { + "containers": [{ + "name": "", + "volumeMounts": [], + }], + "ttlSecondsAfterFinished": 300, + "volumes": [], + } + } + } + } + return notebook def create_pvc_template(): - pvc = { - "apiVersion": "v1", - "kind": "PersistentVolumeClaim", - "metadata": { - "name": "", - "namespace": "", - }, - "spec": { - "accessModes": [], - "resources": { - "requests": { - "storage": "" - } - }, - } - } - return pvc + pvc = { + "apiVersion": "v1", + "kind": "PersistentVolumeClaim", + "metadata": { + "name": "", + "namespace": "", + }, + "spec": { + "accessModes": [], + "resources": { + "requests": { + "storage": "" + } + }, + } + } + return pvc def set_notebook_names(nb, body): - nb['metadata']['name'] = body["nm"] - nb['metadata']['labels']['app'] = body["nm"] - nb['spec']['template']['spec']['containers'][0]['name'] = body["nm"] - nb['metadata']['namespace'] = body["ns"] + nb['metadata']['name'] = body["nm"] + nb['metadata']['labels']['app'] = body["nm"] + nb['spec']['template']['spec']['containers'][0]['name'] = body["nm"] + nb['metadata']['namespace'] = body["ns"] def set_notebook_image(nb, body): - if body["imageType"] == "standard": - image = body["standardImages"] - else: - image = body["customImage"] - nb["spec"]['template']['spec']['containers'][0]['image'] = image + if body["imageType"] == "standard": + image = body["standardImages"] + else: + image = body["customImage"] + nb["spec"]['template']['spec']['containers'][0]['image'] = image def set_notebook_cpu_ram(nb, body): - notebook_cont = nb["spec"]['template']['spec']['containers'][0] + notebook_cont = nb["spec"]['template']['spec']['containers'][0] - notebook_cont['resources'] = { - 'requests': { - 'cpu': body['cpu'], - 'memory': body['memory'] - } - } + notebook_cont['resources'] = { + 'requests': { + 'cpu': body['cpu'], + 'memory': body['memory'] + } + } def add_notebook_volume(nb, vol, claim, mnt_path): - # Create the volume in the Pod - notebook_spec = nb["spec"]['template']['spec'] - notebook_cont = nb["spec"]['template']['spec']['containers'][0] - - volume = { - "name": vol, - "persistentVolumeClaim": { - "claimName": claim - } - } - notebook_spec['volumes'].append(volume) - - # Container volumeMounts - mnt = { - "mountPath": mnt_path, - "name": vol - } - notebook_cont["volumeMounts"].append(mnt) + # Create the volume in the Pod + notebook_spec = nb["spec"]['template']['spec'] + notebook_cont = nb["spec"]['template']['spec']['containers'][0] + + volume = {"name": vol, "persistentVolumeClaim": {"claimName": claim}} + notebook_spec['volumes'].append(volume) + + # Container volumeMounts + mnt = {"mountPath": mnt_path, "name": vol} + notebook_cont["volumeMounts"].append(mnt) diff --git a/components/jupyter-web-app/default/run.py b/components/jupyter-web-app/default/run.py index b47b0410aec..1ab05caf11d 100755 --- a/components/jupyter-web-app/default/run.py +++ b/components/jupyter-web-app/default/run.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- from kubeflow.jupyter import app if __name__ == '__main__': - app.run(debug=True, host="0.0.0.0") + app.run(debug=True, host="0.0.0.0") diff --git a/components/jupyter-web-app/rok/kubeflow/rokui/routes.py b/components/jupyter-web-app/rok/kubeflow/rokui/routes.py index b29606cd21c..8077fb4fce7 100644 --- a/components/jupyter-web-app/rok/kubeflow/rokui/routes.py +++ b/components/jupyter-web-app/rok/kubeflow/rokui/routes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import json from flask import jsonify, render_template, request from kubernetes.client.rest import ApiException @@ -19,166 +20,174 @@ # Helper function for getting the prefix of the webapp def prefix(): - if request.headers.get("x-forwarded-prefix"): - return request.headers.get("x-forwarded-prefix") - else: - return "" + if request.headers.get("x-forwarded-prefix"): + return request.headers.get("x-forwarded-prefix") + else: + return "" @app.route("/post-notebook", methods=['POST']) def post_notebook_route(): - data = {"success": True, "log": ""} - body = request.form - - # Template - notebook = create_notebook_template() - notebook_cont = notebook["spec"]['template']['spec']['containers'][0] - - # Set Name and Namespace - set_notebook_names(notebook, body) - - # Set Image - set_notebook_image(notebook, body) - - # CPU/RAM - set_notebook_cpu_ram(notebook, body) - - # Workspacae Volume - if body["ws_type"] == "New": - pvc = create_pvc_template() - pvc['metadata']['name'] = body['ws_name'] - pvc['metadata']['namespace'] = body['ns'] - pvc['spec']['accessModes'].append(body['ws_access_modes']) - pvc['spec']['resources']['requests']['storage'] = \ - body['ws_size'] + 'Gi' - - try: - create_pvc(pvc) - except ApiException as e: - data["success"] = False - data["log"] = parse_error(e) - return jsonify(data) - - # Create the Workspace Volume in the Pod - if body["ws_type"] != "None": - add_notebook_volume(notebook, - "volume-" + body["nm"], - body["ws_name"], - "/home/jovyan",) - - # Add the Data Volumes - counter = 1 - while ("vol_name" + str(counter)) in body: - i = str(counter) - vol_nm = 'data-volume-' + i - pvc_nm = body['vol_name' + i] - mnt = body['vol_mount_path' + i] - - # Create a PVC if its a new Data Volume - if body["vol_type" + i] == "New": - size = body['vol_size' + i] + 'Gi' - mode = body['vol_access_modes' + i] - pvc = create_pvc_template() - - pvc['metadata']['name'] = pvc_nm - pvc['metadata']['namespace'] = body['ns'] - pvc['spec']['accessModes'].append(mode) - pvc['spec']['resources']['requests']['storage'] = size - - try: - create_pvc(pvc) - except ApiException as e: - data["success"] = False - data["log"] = parse_error(e) - return jsonify(data) - - add_notebook_volume(notebook, vol_nm, pvc_nm, mnt) - counter += 1 - - # Add Extra Resources - try: - extra = json.loads(body["extraResources"]) - except Exception as e: - data["success"] = False - data["log"] = parse_error(e) - return jsonify(data) + data = {"success": True, "log": ""} + body = request.form + + # Template + notebook = create_notebook_template() + notebook_cont = notebook["spec"]['template']['spec']['containers'][0] + + # Set Name and Namespace + set_notebook_names(notebook, body) + + # Set Image + set_notebook_image(notebook, body) - notebook_cont['resources']['limits'] = extra + # CPU/RAM + set_notebook_cpu_ram(notebook, body) + + # Workspacae Volume + if body["ws_type"] == "New": + pvc = create_pvc_template() + pvc['metadata']['name'] = body['ws_name'] + pvc['metadata']['namespace'] = body['ns'] + pvc['spec']['accessModes'].append(body['ws_access_modes']) + pvc['spec']['resources']['requests']['storage'] = \ + body['ws_size'] + 'Gi' - # If all the parameters are given, then we try to create the notebook - # return try: - create_notebook(notebook) + create_pvc(pvc) except ApiException as e: + data["success"] = False + data["log"] = parse_error(e) + return jsonify(data) + + # Create the Workspace Volume in the Pod + if body["ws_type"] != "None": + add_notebook_volume( + notebook, + "volume-" + body["nm"], + body["ws_name"], + "/home/jovyan", + ) + + # Add the Data Volumes + counter = 1 + while ("vol_name" + str(counter)) in body: + i = str(counter) + vol_nm = 'data-volume-' + i + pvc_nm = body['vol_name' + i] + mnt = body['vol_mount_path' + i] + + # Create a PVC if its a new Data Volume + if body["vol_type" + i] == "New": + size = body['vol_size' + i] + 'Gi' + mode = body['vol_access_modes' + i] + pvc = create_pvc_template() + + pvc['metadata']['name'] = pvc_nm + pvc['metadata']['namespace'] = body['ns'] + pvc['spec']['accessModes'].append(mode) + pvc['spec']['resources']['requests']['storage'] = size + + try: + create_pvc(pvc) + except ApiException as e: data["success"] = False data["log"] = parse_error(e) return jsonify(data) + add_notebook_volume(notebook, vol_nm, pvc_nm, mnt) + counter += 1 + + # Add Extra Resources + try: + extra = json.loads(body["extraResources"]) + except Exception as e: + data["success"] = False + data["log"] = parse_error(e) return jsonify(data) + notebook_cont['resources']['limits'] = extra + + # If all the parameters are given, then we try to create the notebook + # return + try: + create_notebook(notebook) + except ApiException as e: + data["success"] = False + data["log"] = parse_error(e) + return jsonify(data) + + return jsonify(data) + @app.route("/add-notebook", methods=['GET']) def add_notebook_route(): - # A default value for the namespace to add the notebook - if request.args.get("namespace"): - ns = request.args.get("namespace") - else: - ns = "kubeflow" + # A default value for the namespace to add the notebook + if request.args.get("namespace"): + ns = request.args.get("namespace") + else: + ns = "kubeflow" - # Load the Rok Token - rok_token = {} + # Load the Rok Token + rok_token = {} - form_defaults = spawner_ui_config("notebook") - return render_template('add_notebook.html', prefix=prefix(), ns=ns, - form_defaults=form_defaults, - username="webapp", - rok_token=rok_token) + form_defaults = spawner_ui_config("notebook") + return render_template( + 'add_notebook.html', + prefix=prefix(), + ns=ns, + form_defaults=form_defaults, + username="webapp", + rok_token=rok_token) @app.route("/delete-notebook", methods=['GET', 'POST']) def del_notebook_route(): - nb = request.args.get("notebook") - ns = request.args.get("namespace") + nb = request.args.get("notebook") + ns = request.args.get("namespace") - # try to delete the notebook - data = {"success": True, "log": ""} - try: - delete_notebook(nb, ns) - except ApiException as e: - data["success"] = False - data["log"] = parse_error(e) + # try to delete the notebook + data = {"success": True, "log": ""} + try: + delete_notebook(nb, ns) + except ApiException as e: + data["success"] = False + data["log"] = parse_error(e) - return jsonify(data) + return jsonify(data) @app.route("/list-notebooks") def list_notebooks_route(): - ns = request.args.get("namespace") + ns = request.args.get("namespace") - # Get the list of Notebooks in the given Namespace - data = {"notebooks": [], "success": True} - try: - data['notebooks'] = get_notebooks(ns) - except ApiException as e: - data['notebooks'] = [] - data['success'] = False - data["log"] = parse_error(e) + # Get the list of Notebooks in the given Namespace + data = {"notebooks": [], "success": True} + try: + data['notebooks'] = get_notebooks(ns) + except ApiException as e: + data['notebooks'] = [] + data['success'] = False + data["log"] = parse_error(e) - return jsonify(data) + return jsonify(data) @app.route("/") @app.route("/home") @app.route("/notebooks") def notebooks_route(): - base_ns = "kubeflow" - - # Get the namespaces the token can see - try: - nmsps = get_namespaces() - except ApiException: - nmsps = [base_ns] - - return render_template('notebooks.html', prefix=prefix(), - title='Notebooks', namespaces=nmsps, - username="webapp") + base_ns = "kubeflow" + + # Get the namespaces the token can see + try: + nmsps = get_namespaces() + except ApiException: + nmsps = [base_ns] + + return render_template( + 'notebooks.html', + prefix=prefix(), + title='Notebooks', + namespaces=nmsps, + username="webapp") diff --git a/components/jupyter-web-app/rok/kubeflow/rokui/server.py b/components/jupyter-web-app/rok/kubeflow/rokui/server.py index 5bfb32bcbb8..483697eef1e 100755 --- a/components/jupyter-web-app/rok/kubeflow/rokui/server.py +++ b/components/jupyter-web-app/rok/kubeflow/rokui/server.py @@ -1,14 +1,14 @@ +# -*- coding: utf-8 -*- import json from kubernetes import client, config from kubernetes.config import ConfigException - try: - # Load configuration inside the Pod - config.load_incluster_config() + # Load configuration inside the Pod + config.load_incluster_config() except ConfigException: - # Load configuration for testing - config.load_kube_config() + # Load configuration for testing + config.load_kube_config() # Create the Apis v1_core = client.CoreV1Api() @@ -16,47 +16,47 @@ def parse_error(e): - try: - err = json.loads(e.body)['message'] - except KeyError: - err = str(e) + try: + err = json.loads(e.body)['message'] + except KeyError: + err = str(e) - return err + return err def get_secret(nm, ns): - return v1_core.read_namespaced_secret(nm, ns) + return v1_core.read_namespaced_secret(nm, ns) def get_namespaces(): - nmsps = v1_core.list_namespace() - return [ns.metadata.name for ns in nmsps.items] + nmsps = v1_core.list_namespace() + return [ns.metadata.name for ns in nmsps.items] def get_notebooks(ns): - custom_api = client.CustomObjectsApi() + custom_api = client.CustomObjectsApi() - notebooks = \ - custom_api.list_namespaced_custom_object("kubeflow.org", "v1alpha1", - ns, "notebooks") - return [nb['metadata']['name'] for nb in notebooks['items']] + notebooks = \ + custom_api.list_namespaced_custom_object("kubeflow.org", "v1alpha1", + ns, "notebooks") + return [nb['metadata']['name'] for nb in notebooks['items']] def delete_notebook(nb, ns): - body = client.V1DeleteOptions() + body = client.V1DeleteOptions() - return \ - custom_api.delete_namespaced_custom_object("kubeflow.org", "v1alpha1", - ns, "notebooks", nb, body) + return \ + custom_api.delete_namespaced_custom_object("kubeflow.org", "v1alpha1", + ns, "notebooks", nb, body) def create_notebook(body): - ns = body['metadata']['namespace'] - return \ - custom_api.create_namespaced_custom_object("kubeflow.org", "v1alpha1", - ns, "notebooks", body) + ns = body['metadata']['namespace'] + return \ + custom_api.create_namespaced_custom_object("kubeflow.org", "v1alpha1", + ns, "notebooks", body) def create_pvc(body): - ns = body['metadata']['namespace'] - return v1_core.create_namespaced_persistent_volume_claim(ns, body) + ns = body['metadata']['namespace'] + return v1_core.create_namespaced_persistent_volume_claim(ns, body) diff --git a/components/jupyter-web-app/rok/kubeflow/rokui/static/css/main.css b/components/jupyter-web-app/rok/kubeflow/rokui/static/css/main.css index 7620246892c..c19848f6123 100644 --- a/components/jupyter-web-app/rok/kubeflow/rokui/static/css/main.css +++ b/components/jupyter-web-app/rok/kubeflow/rokui/static/css/main.css @@ -29,7 +29,7 @@ top: 10px; } -.mdl-textfield__label{ +.mdl-textfield__label{ margin-bottom:2px; } @@ -44,7 +44,7 @@ .alert-danger { color: #a94442; background-color: #f2dede; - border-color: #ebccd1; + border-color: #ebccd1; } .alert-success { @@ -113,4 +113,4 @@ .mdl-card { overflow: visible; z-index: 100; -} \ No newline at end of file +} diff --git a/components/jupyter-web-app/rok/kubeflow/rokui/static/css/notebooks.css b/components/jupyter-web-app/rok/kubeflow/rokui/static/css/notebooks.css index 2fc5954ef87..b1a7474d2e0 100644 --- a/components/jupyter-web-app/rok/kubeflow/rokui/static/css/notebooks.css +++ b/components/jupyter-web-app/rok/kubeflow/rokui/static/css/notebooks.css @@ -16,4 +16,4 @@ body { padding: 20px; background: #fafafa; position: relative; -} \ No newline at end of file +} diff --git a/components/jupyter-web-app/rok/kubeflow/rokui/static/js/add_notebook.js b/components/jupyter-web-app/rok/kubeflow/rokui/static/js/add_notebook.js index e37a360b7a4..a3f26322ec7 100644 --- a/components/jupyter-web-app/rok/kubeflow/rokui/static/js/add_notebook.js +++ b/components/jupyter-web-app/rok/kubeflow/rokui/static/js/add_notebook.js @@ -1,6 +1,6 @@ $(window).on('load', function () { - // Fix the required inputs: + // Fix the required inputs: // https://github.com/google/material-design-lite/issues/1502#issuecomment-211702642 $('[data-required=true]').each(function() { $(this).attr('required', true); @@ -50,7 +50,7 @@ $(window).on('load', function () { autofillForm(); }, 250); }); - + // Configure Image input elements setImageType() @@ -106,7 +106,7 @@ function setWorkspaceEventListeners() { workspaceType.on('change', function() { // Set attributes for the Volume fields if (this.value == 'Existing') { - + setAttributes(workspaceSize, { 'readonly': true, 'data-toggle': 'tooltip', 'data-placement': 'top', @@ -125,7 +125,7 @@ function setWorkspaceEventListeners() { $("#error-msg-vol").fadeOut("fast", function() { $(this).hide() }) - } + } else if (this.value == 'New') { unsetAttributes(workspaceName, 'readonly data-toggle data-placement title'); unsetAttributes(workspaceSize, 'readonly data-toggle data-placement title'); @@ -142,12 +142,12 @@ function setWorkspaceEventListeners() { 'title': 'You can only enter a Rok URL when mounting an existing Rok Volume' }); document.querySelector("#wsrokurl-input-div").MaterialTextfield.disable() - + // Hide the warning from None option $("#error-msg-vol").fadeOut("fast", function() { $(this).hide() }) - } + } else if (this.value == "None") { var attrs = { 'readonly': true, @@ -275,7 +275,7 @@ function setDefaultFormValues() { $('#ws_type').attr({ 'readonly': defaultWorkspace.type.readOnly || defaultWorkspaceReadOnly, 'immutable': defaultWorkspace.type.readOnly || defaultWorkspaceReadOnly, - }); + }); if ($('#ws_type').attr('readonly')) { $('#ws_type').on('mousedown', function(e) { @@ -701,10 +701,10 @@ function addVolume() { 'title': 'Size is autofilled when mounting existing Volumes' }); document.querySelector("#vol_size_textfield"+vol_id).MaterialTextfield.disable() - + unsetAttributes(volumeMode, 'readonly data-toggle data-placement title'); - document.querySelector("#vol_rokurl_textfield"+vol_id).MaterialTextfield.enable() - + document.querySelector("#vol_rokurl_textfield"+vol_id).MaterialTextfield.enable() + } else if (this.value == 'New') { unsetAttributes(volumeSize, 'readonly data-toggle data-placement title'); document.querySelector("#vol_size_textfield"+vol_id).MaterialTextfield.enable() @@ -751,7 +751,7 @@ function removeVolume(id) { 'onclick': 'removeVolume(' + i + ')' }); - // The next two are for disabling the Size and Mount Textfields + // The next two are for disabling the Size and Mount Textfields volumeElement.find('#vol_size_textfield' + (i+1)).attr({ id: "vol_size_textfield" + i }) @@ -801,7 +801,7 @@ function postNotebook(form, data) { × Error:
` - + const $e = $("#error-msgs").html(innerHTML) $('.danger-log', $e).text(res.log) @@ -977,4 +977,4 @@ function formCleanup() { // Restore rokLabURL input field style $('#rokLabURL').removeClass('valid invalid'); -} \ No newline at end of file +} diff --git a/components/jupyter-web-app/rok/kubeflow/rokui/static/js/notebooks.js b/components/jupyter-web-app/rok/kubeflow/rokui/static/js/notebooks.js index 512fb39f36a..ba1146b54eb 100644 --- a/components/jupyter-web-app/rok/kubeflow/rokui/static/js/notebooks.js +++ b/components/jupyter-web-app/rok/kubeflow/rokui/static/js/notebooks.js @@ -148,4 +148,4 @@ function updateNotebooksInNamespace(ns) { function searchOut() { $("#ns-select").text("") -} \ No newline at end of file +} diff --git a/components/jupyter-web-app/rok/kubeflow/rokui/templates/add_notebook.html b/components/jupyter-web-app/rok/kubeflow/rokui/templates/add_notebook.html index fc2f1f10ee1..42844ec7ccb 100644 --- a/components/jupyter-web-app/rok/kubeflow/rokui/templates/add_notebook.html +++ b/components/jupyter-web-app/rok/kubeflow/rokui/templates/add_notebook.html @@ -39,7 +39,7 @@
Rok Jupyter Lab URL
-
Memory
@@ -161,8 +161,8 @@
Memory
- @@ -183,10 +183,10 @@
Workspace Volume
-
- @@ -209,9 +209,9 @@
Workspace Volume
- @@ -220,9 +220,9 @@
Workspace Volume
- @@ -231,9 +231,9 @@
Workspace Volume
-