From 3eae8cbf792524c69be11c983d124a78d787527e Mon Sep 17 00:00:00 2001 From: Dan Buch Date: Mon, 22 Jun 2020 14:52:23 -0400 Subject: [PATCH 1/6] Enable docs building + artifact storage --- .github/workflows/main.yml | 27 ++++---- .gitignore | 1 + Makefile | 122 +++++++++++++------------------------ docs/build-doc.sh | 4 +- selenium/Makefile | 2 +- 5 files changed, 64 insertions(+), 92 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bff7fb55..10f96df4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -49,7 +49,11 @@ jobs: - run: pipenv install --dev - run: pipenv run pip freeze - run: pipenv run python setup.py --version - - run: pipenv run python -Wi setup.py test + - run: make install + - run: make test + - run: make image${{ matrix.python-version }} + - run: make -C selenium build + # - run: make test-selenium distributions: needs: test runs-on: ubuntu-latest @@ -115,13 +119,14 @@ jobs: with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} -# docs: - # needs: test - # runs-on: ubuntu-latest - # steps: - # - uses: actions/checkout@v2 - # - run: make docs-build - # - uses: actions/upload-artifact@v2 - # with: - # name: docs - # path: docs/site/ + docs: + needs: test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - run: make docs-image + - run: make docs-build + - uses: actions/upload-artifact@v2 + with: + name: docs + path: docs/out/ diff --git a/.gitignore b/.gitignore index a55507df..429f09e1 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ .vagrant /rsconnect_jupyter/static/version.json /rsconnect_jupyter/version.py +/docs/out/ diff --git a/Makefile b/Makefile index 81f18b78..a9544de9 100644 --- a/Makefile +++ b/Makefile @@ -1,22 +1,22 @@ -.PHONY: clean all-images image% launch notebook% package dist run test all-tests test% shell shell% dist-run dist-run% pypi-run pypi-run% mock-server docs-build docs-image version-frontend +NB_UID := $(shell id -u) +NB_GID := $(shell id -g) -NB_UID=$(shell id -u) -NB_GID=$(shell id -g) - -IMAGE=rstudio/rsconnect-jupyter-py +IMAGE := rstudio/rsconnect-jupyter-py VERSION := $(shell pipenv run python setup.py --version) BDIST_WHEEL := dist/rsconnect_jupyter-$(VERSION)-py2.py3-none-any.whl S3_PREFIX := s3://rstudio-connect-downloads/connect/rsconnect-jupyter -PORT = $(shell printenv PORT || echo 9999) +PORT := $(shell printenv PORT || echo 9999) # NOTE: See the `dist` target for why this exists. SOURCE_DATE_EPOCH := $(shell date +%s) export SOURCE_DATE_EPOCH +.PHONY: clean clean: - rm -rf build/ dist/ rsconnect_jupyter.egg-info/ + rm -rf build/ dist/ docs/out/ rsconnect_jupyter.egg-info/ -all-images: image2 image3.5 image3.6 image3.7 +.PHONY: all-images +all-images: image2.7 image3.5 image3.6 image3.7 image3.8 image%: docker build \ @@ -28,6 +28,7 @@ image%: --build-arg PY_VERSION=$* \ . +.PHONY: launch launch: docker run --rm -i -t \ -v $(CURDIR)/notebooks$(PY_VERSION):/notebooks \ @@ -43,16 +44,18 @@ launch: notebook%: make DOCKER_IMAGE=$(IMAGE)$* PY_VERSION=$* TARGET=run launch -all-tests: test2 test3.5 test3.6 test3.7 +.PHONY: all-tests +all-tests: test2.7 test3.5 test3.6 test3.7 test3.8 +.PHONY: test test: version-frontend - pip install --extra-index-url=https://test.pypi.org/simple rsconnect-python - python -V - python -Wi setup.py test + pipenv run python -V + pipenv run python -Wi setup.py test test%: version-frontend make DOCKER_IMAGE=rstudio/rsconnect-jupyter-py$* PY_VERSION=$* TARGET=test launch +.PHONY: test-selenium test-selenium: $(MAKE) -C selenium clean test-env-up jupyter-up test || EXITCODE=$$? ; \ $(MAKE) -C selenium jupyter-down || true ; \ @@ -62,6 +65,7 @@ test-selenium: # NOTE: Wheels won't get built if _any_ file it tries to touch has a timestamp # before 1980 (system files) so the $(SOURCE_DATE_EPOCH) current timestamp is # exported as a point of reference instead. +.PHONY: dist dist: version-frontend pipenv run python setup.py bdist_wheel pipenv run twine check $(BDIST_WHEEL) @@ -69,56 +73,16 @@ dist: version-frontend @echo "::set-output name=whl::$(BDIST_WHEEL)" @echo "::set-output name=whl_basename::$(notdir $(BDIST_WHEEL))" -package: - make DOCKER_IMAGE=$(IMAGE)3 PY_VERSION=3 TARGET=dist launch - -run: -# link python package - pipenv install --dev -# install rsconnect_jupyter as a jupyter extension - pipenv run jupyter-nbextension install --symlink --user --py rsconnect_jupyter -# enable js extension - pipenv run jupyter-nbextension enable --py rsconnect_jupyter -# enable python extension - pipenv run jupyter-serverextension enable --py rsconnect_jupyter -# start notebook - pipenv run jupyter-notebook -y --notebook-dir=/notebooks --ip='0.0.0.0' --port=9999 --no-browser --NotebookApp.token='' - -shell: - bash - -shell%: - make DOCKER_IMAGE=$(IMAGE)$* PY_VERSION=$* TARGET=shell launch - -dist-run%: - make DOCKER_IMAGE=$(IMAGE)$* PY_VERSION=$* TARGET=dist-run launch - -dist-run: dist - pipenv run pip install dist/rsconnect_jupyter-$(VERSION)-py2.py3-none-any.whl - pipenv run jupyter-nbextension install --symlink --user --py rsconnect_jupyter - pipenv run jupyter-nbextension enable --py rsconnect_jupyter - pipenv run jupyter-serverextension enable --py rsconnect_jupyter - pipenv run jupyter-notebook -y --notebook-dir=/notebooks --ip='0.0.0.0' --port=9999 --no-browser --NotebookApp.token='' - -pypi-run%: - make DOCKER_IMAGE=$(IMAGE)$* PY_VERSION=$* TARGET=pypi-run launch - -pypi-run: - pipenv run pip install rsconnect_jupyter==$(VERSION) - pipenv run jupyter-nbextension install --symlink --user --py rsconnect_jupyter - pipenv run jupyter-nbextension enable --py rsconnect_jupyter - pipenv run jupyter-serverextension enable --py rsconnect_jupyter +.PHONY: run +run: install pipenv run jupyter-notebook -y --notebook-dir=/notebooks --ip='0.0.0.0' --port=9999 --no-browser --NotebookApp.token='' -pypi-test-run%: - make DOCKER_IMAGE=$(IMAGE)$* PY_VERSION=$* TARGET=pypi-test-run launch - -pypi-test-run: - pipenv run pip install --index-url https://test.pypi.org/simple/ rsconnect_jupyter==$(VERSION) +.PHONY: install +install: + pipenv install --dev pipenv run jupyter-nbextension install --symlink --user --py rsconnect_jupyter pipenv run jupyter-nbextension enable --py rsconnect_jupyter pipenv run jupyter-serverextension enable --py rsconnect_jupyter - pipenv run jupyter-notebook -y --notebook-dir=/notebooks --ip='0.0.0.0' --port=9999 --no-browser --NotebookApp.token='' build/mock-connect/bin/flask: bash -c '\ @@ -127,51 +91,53 @@ build/mock-connect/bin/flask: . build/mock-connect/bin/activate && \ pip install flask' +.PHONY: mock-server mock-server: build/mock-connect/bin/flask bash -c '\ . build/mock-connect/bin/activate && \ FLASK_APP=mock_connect.py flask run --host=0.0.0.0' -## Code quality tools - +.PHONY: yarn yarn: yarn install +.PHONY: lint lint: lint-js +.PHONY: lint-js lint-js: npm run lint ## Specify that Docker runs with the calling user's uid/gid to avoid file ## permission issues on Linux dev hosts. -DOCKER_RUN_AS= +DOCKER_RUN_AS = ifeq (Linux,$(shell uname)) - DOCKER_RUN_AS=-u $(shell id -u):$(shell id -g) + DOCKER_RUN_AS = -u $(shell id -u):$(shell id -g) endif -## Inside Jenkins (when JOB_NAME is defined), we are in the right type of -## Docker container. Otherwise, launch pandoc inside a -## rstudio/connect:docs container. -BUILD_DOC=env VERSION=${VERSION} ./docs/build-doc.sh -ifeq (${JOB_NAME},) - BUILD_DOC=docker run --rm=true ${DOCKER_RUN_AS} \ - -e VERSION=${VERSION} \ - ${DOCKER_ARGS} \ - -v $(CURDIR):/rsconnect_jupyter \ - -w /rsconnect_jupyter \ - rsconnect-jupyter-docs docs/build-doc.sh -endif +DOCS_IMAGE := rsconnect-jupyter-docs:local +BUILD_DOC := docker run --rm=true $(DOCKER_RUN_AS) \ + -e VERSION=$(VERSION) \ + $(DOCKER_ARGS) \ + -v $(CURDIR):/rsconnect_jupyter \ + -w /rsconnect_jupyter \ + $(DOCS_IMAGE) docs/build-doc.sh +.PHONY: docs-image docs-image: - docker build -t rsconnect-jupyter-docs ./docs + docker build -t $(DOCS_IMAGE) ./docs -docs-build: - ${BUILD_DOC} +.PHONY: docs-build +docs-build: docs/out + $(BUILD_DOC) +docs/out: + mkdir -p $@ -dist/rsconnect-jupyter-${VERSION}.pdf: docs/README.md docs/*.gif - ${BUILD_DOC} +dist/rsconnect-jupyter-$(VERSION).pdf: docs/README.md docs/*.gif docs/out + $(BUILD_DOC) +.PHONY: version-frontend version-frontend: printf '{"version":"%s"}\n' $(VERSION) >rsconnect_jupyter/static/version.json diff --git a/docs/build-doc.sh b/docs/build-doc.sh index 5248ebf7..d8259d2c 100755 --- a/docs/build-doc.sh +++ b/docs/build-doc.sh @@ -6,14 +6,14 @@ TITLE='rsconnect-jupyter User Guide' pandoc -f markdown-implicit_figures \ --self-contained \ - -o dist/rsconnect_jupyter-${VERSION}.html \ + -o docs/out/rsconnect_jupyter-${VERSION}.html \ -H docs/images/style.fragment.html \ -T "${TITLE}" \ -M "title:${TITLE}" \ README.md pandoc -f markdown-implicit_figures \ - -o dist/rsconnect_jupyter-${VERSION}.pdf \ + -o docs/out/rsconnect_jupyter-${VERSION}.pdf \ -T "${TITLE}" \ -M "title:${TITLE}" \ README.md diff --git a/selenium/Makefile b/selenium/Makefile index 1654f741..7f50f20c 100644 --- a/selenium/Makefile +++ b/selenium/Makefile @@ -23,7 +23,7 @@ NB_GID=$(shell id -g) NETWORK=${PROJECT}_default NOTEBOOKS_DIR=/notebooks PROJECT=rscjnet -PY_VERSION=2 +PY_VERSION=3.8 PYTESTLOG?=selenium_tests.log PYTESTOPTS?= RERUN_FAILURES?=0 From c5d115c7c44908a15d1df15b9971c241c98419fe Mon Sep 17 00:00:00 2001 From: Dan Buch Date: Mon, 22 Jun 2020 18:00:42 -0400 Subject: [PATCH 2/6] Enable fmt, lint, selenium, and docs --- .flake8 | 17 ++ .github/workflows/main.yml | 18 +- .gitignore | 19 +- Makefile | 33 ++- conftest.py | 6 + mock_connect.py | 169 ++++++------ rsconnect_jupyter/__init__.py | 257 ++++++++++-------- rsconnect_jupyter/tests/__init__.py | 0 rsconnect_jupyter/tests/data/pip1/dummy.ipynb | 52 ---- .../tests/data/pip1/requirements.txt | 3 - rsconnect_jupyter/tests/data/pip2/data.csv | 9 - rsconnect_jupyter/tests/data/pip2/dummy.ipynb | 52 ---- selenium/Makefile | 7 +- selenium/conftest.py | 63 ++--- selenium/t/pages/add_server_form.py | 4 +- selenium/t/pages/checkbox.py | 5 - selenium/t/pages/content_selection.py | 2 - selenium/t/pages/form_base.py | 13 +- selenium/t/pages/main_toolbar.py | 1 - selenium/t/pages/publish_content_form.py | 10 +- selenium/t/pages/select_list.py | 3 +- selenium/t/test_add_server.py | 31 +-- selenium/t/test_publish_source.py | 21 +- selenium/t/test_publish_static.py | 21 +- selenium/t/test_republish.py | 35 +-- selenium/t/test_switch_mode.py | 30 +- selenium/tools/systemstat/gridstat | 18 +- selenium/tools/systemstat/gridstat.py | 39 ++- selenium/tools/systemstat/sutstat | 15 +- selenium/tools/systemstat/sutstat.py | 26 +- selenium/tools/systemstat/systemstat.py | 13 +- selenium/tools/systemstat/systemstattool.py | 81 ++---- selenium/tools/systemstat/test_systemstat.py | 42 +-- selenium/wait_for_systems_up.sh | 2 +- tests/test_rsconnect_jupyter.py | 41 +++ 35 files changed, 529 insertions(+), 629 deletions(-) create mode 100644 .flake8 create mode 100644 conftest.py delete mode 100644 rsconnect_jupyter/tests/__init__.py delete mode 100644 rsconnect_jupyter/tests/data/pip1/dummy.ipynb delete mode 100644 rsconnect_jupyter/tests/data/pip1/requirements.txt delete mode 100644 rsconnect_jupyter/tests/data/pip2/data.csv delete mode 100644 rsconnect_jupyter/tests/data/pip2/dummy.ipynb create mode 100644 tests/test_rsconnect_jupyter.py diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..a2acd909 --- /dev/null +++ b/.flake8 @@ -0,0 +1,17 @@ +[flake8] +max_line_length = 120 +show_source = true +exclude = .git,.venv,.venv2,.venv3,__pycache__,.cache,.eggs + +# The following codes are ignored so that `flake8` plays nicer with how `black` +# likes to format: +# - E203: whitespace before ':' +# - E231: missing whitespace after ',', ';', or ':' +# - E302: expected 2 blank lines, found 0 +# +# ref: +# https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes +# +extend_ignore = E203,E231,E302 + +# vim:filetype=dosini diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 10f96df4..e538d0d2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -50,10 +50,11 @@ jobs: - run: pipenv run pip freeze - run: pipenv run python setup.py --version - run: make install + - run: make lint - run: make test - run: make image${{ matrix.python-version }} - run: make -C selenium build - # - run: make test-selenium + - run: make test-selenium distributions: needs: test runs-on: ubuntu-latest @@ -130,3 +131,18 @@ jobs: with: name: docs path: docs/out/ + - uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET }} + aws-region: us-east-1 + - if: github.event_name == 'push' && github.ref == 'refs/heads/master' + run: make sync-latest-docs-to-s3 + - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.DOCS_AWS_ID }} + aws-secret-access-key: ${{ secrets.DOCS_AWS_SECRET }} + aws-region: us-east-1 + - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + run: make promote-docs-in-s3 diff --git a/.gitignore b/.gitignore index 429f09e1..574dc3ce 100644 --- a/.gitignore +++ b/.gitignore @@ -1,17 +1,18 @@ *.pyc -/build/ -/dist/ +.DS_Store +.coverage +.vagrant +/*.egg +/*.egg-info +/*.eggs /.conda/ +/.idea/ /.jupyter/ /.local/ -/.idea/ +/build/ +/dist/ +/docs/out/ /node_modules/ /notebooks*/ -/*.egg-info -/*.egg -/*.eggs -.DS_Store -.vagrant /rsconnect_jupyter/static/version.json /rsconnect_jupyter/version.py -/docs/out/ diff --git a/Makefile b/Makefile index a9544de9..93958718 100644 --- a/Makefile +++ b/Makefile @@ -49,8 +49,7 @@ all-tests: test2.7 test3.5 test3.6 test3.7 test3.8 .PHONY: test test: version-frontend - pipenv run python -V - pipenv run python -Wi setup.py test + pipenv run pytest -vv --cov=rsconnect_jupyter tests/ test%: version-frontend make DOCKER_IMAGE=rstudio/rsconnect-jupyter-py$* PY_VERSION=$* TARGET=test launch @@ -80,6 +79,7 @@ run: install .PHONY: install install: pipenv install --dev + pipenv run pip install -e . pipenv run jupyter-nbextension install --symlink --user --py rsconnect_jupyter pipenv run jupyter-nbextension enable --py rsconnect_jupyter pipenv run jupyter-serverextension enable --py rsconnect_jupyter @@ -102,12 +102,21 @@ yarn: yarn install .PHONY: lint -lint: lint-js +lint: lint-js lint-py .PHONY: lint-js lint-js: npm run lint +.PHONY: lint-py +lint-py: + pipenv run black --check --diff . + pipenv run flake8 . + +.PHONY: fmt +fmt: + pipenv run black . + ## Specify that Docker runs with the calling user's uid/gid to avoid file ## permission issues on Linux dev hosts. DOCKER_RUN_AS = @@ -146,3 +155,21 @@ sync-latest-to-s3: aws s3 cp --acl bucket-owner-full-control \ $(BDIST_WHEEL) \ $(S3_PREFIX)/latest/rsconnect_jupyter-latest-py2.py3-none-any.whl + +.PHONY: sync-latest-docs-to-s3 +sync-latest-docs-to-s3: + aws s3 cp --acl bucket-owner-full-control \ + docs/out/rsconnect_jupyter-$(VERSION).html \ + $(S3_PREFIX)/latest/rsconnect_jupyter-latest.html + aws s3 cp --acl bucket-owner-full-control \ + docs/out/rsconnect_jupyter-$(VERSION).pdf \ + $(S3_PREFIX)/latest/rsconnect_jupyter-latest.pdf + +.PHONY: promote-docs-in-s3 +promote-docs-in-s3: + aws s3 cp --acl bucket-owner-full-control \ + docs/out/rsconnect_jupyter-$(VERSION).html \ + s3://docs.rstudio.com/rsconnect-jupyter/rsconnect_jupyter-$(VERSION).html + aws s3 cp --acl bucket-owner-full-control \ + docs/out/rsconnect_jupyter-$(VERSION).html \ + s3://docs.rstudio.com/rsconnect-jupyter/index.html diff --git a/conftest.py b/conftest.py new file mode 100644 index 00000000..8b8d91ac --- /dev/null +++ b/conftest.py @@ -0,0 +1,6 @@ +import os.path +import sys + +HERE = os.path.dirname(os.path.abspath(__file__)) + +sys.path.insert(0, HERE) diff --git a/mock_connect.py b/mock_connect.py index 52c85d68..41ed19b7 100644 --- a/mock_connect.py +++ b/mock_connect.py @@ -1,4 +1,3 @@ - # Installation: # virtualenv flask # source flask/bin/activate @@ -28,9 +27,7 @@ def set_code(response): after_this_request(set_code) - return { - 'error': reason - } + return {"error": reason} class IdGenerator(object): @@ -48,13 +45,11 @@ def next(self): apps, app_id_generator = {}, IdGenerator() bundles, bundle_id_generator = {}, IdGenerator() tasks, task_id_generator = {}, IdGenerator() -api_keys = { - '0123456789abcdef0123456789abcdef': 'admin' -} +api_keys = {"0123456789abcdef0123456789abcdef": "admin"} # noinspection SpellCheckingInspection users = { - 'admin': { + "admin": { "username": "admin", "active_time": "2018-08-30T23:49:18.421238194Z", "first_name": "Super", @@ -79,7 +74,7 @@ def next(self): "remove_users", "remove_vanities", "view_app_settings", - "view_apps" + "view_apps", ], "guid": "29a74070-2c13-4ef9-a898-cfc6bcf0f275", "user_role": "administrator", @@ -87,7 +82,7 @@ def next(self): "confirmed": True, "created_time": "2018-08-29T19:25:23.68280816Z", "password": "", - "email": "admin@example.com" + "email": "admin@example.com", } } @@ -95,8 +90,8 @@ def next(self): def authenticated(f): @wraps(f) def wrapper(*args, **kw): - auth = request.headers.get('Authorization') - if auth is None or not auth.startswith('Key '): + auth = request.headers.get("Authorization") + if auth is None or not auth.startswith("Key "): abort(401) key = auth[4:] if key not in api_keys: @@ -104,6 +99,7 @@ def wrapper(*args, **kw): g.user = users[api_keys[key]] return f(*args, **kw) + return wrapper @@ -111,6 +107,7 @@ def json(f): @wraps(f) def wrapper(*args, **kw): return jsonify(f(*args, **kw)) + return wrapper @@ -120,21 +117,23 @@ def decorator(f): def wrapper(object_id, *args, **kw): item = d.get(object_id) if item is None: - return dumps(error(404, 'Not found')) + return dumps(error(404, "Not found")) return f(item, *args, **kw) + return wrapper + return decorator -api = Blueprint('api', __name__) +api = Blueprint("api", __name__) -@app.route('/') +@app.route("/") def index(): - return 'Welcome to Mock Connect!' + return "Welcome to Mock Connect!" -@api.route('me') +@api.route("me") @authenticated @json def me(): @@ -142,58 +141,58 @@ def me(): def timestamp(): - return datetime.utcnow().replace(microsecond=0).isoformat() + 'Z' + return datetime.utcnow().replace(microsecond=0).isoformat() + "Z" -@api.route('applications', methods=['GET', 'POST']) +@api.route("applications", methods=["GET", "POST"]) @authenticated @json def applications(): - if request.method == 'POST': + if request.method == "POST": connect_app = request.get_json(force=True) - name = connect_app.get('name') - if name and [existing_app for existing_app in apps.values() if existing_app.get('name') == name]: - return error(409, 'An object with that name already exists.') - - connect_app['id'] = app_id_generator.next() - connect_app['guid'] = str(uuid.uuid4()) - connect_app['url'] = '{0}content/{1}'.format(url_for('index', _external=True), connect_app['id']) - connect_app['owner_username'] = g.user.get('username') - connect_app['owner_first_name'] = g.user.get('first_name') - connect_app['owner_last_name'] = g.user.get('last_name') - connect_app['owner_email'] = g.user.get('email') - connect_app['owner_locked'] = g.user.get('locked') - connect_app['bundle_id'] = None - connect_app['needs_config'] = True - connect_app['access_type'] = None - connect_app['description'] = '' - connect_app['app_mode'] = None - connect_app['created_time'] = timestamp() - connect_app.setdefault('title', '') - apps[str(connect_app['id'])] = connect_app + name = connect_app.get("name") + if name and [existing_app for existing_app in apps.values() if existing_app.get("name") == name]: + return error(409, "An object with that name already exists.") + + connect_app["id"] = app_id_generator.next() + connect_app["guid"] = str(uuid.uuid4()) + connect_app["url"] = "{0}content/{1}".format(url_for("index", _external=True), connect_app["id"]) + connect_app["owner_username"] = g.user.get("username") + connect_app["owner_first_name"] = g.user.get("first_name") + connect_app["owner_last_name"] = g.user.get("last_name") + connect_app["owner_email"] = g.user.get("email") + connect_app["owner_locked"] = g.user.get("locked") + connect_app["bundle_id"] = None + connect_app["needs_config"] = True + connect_app["access_type"] = None + connect_app["description"] = "" + connect_app["app_mode"] = None + connect_app["created_time"] = timestamp() + connect_app.setdefault("title", "") + apps[str(connect_app["id"])] = connect_app return connect_app else: - count = int(request.args.get('count', 10000)) - search = request.args.get('search') + count = int(request.args.get("count", 10000)) + search = request.args.get("search") def match(app_to_match): - return search is None or (app_to_match.get('title') or '').startswith(search) + return search is None or (app_to_match.get("title") or "").startswith(search) matches = list(filter(match, apps.values()))[:count] return { - 'count': len(matches), - 'total': len(matches), - 'applications': matches, + "count": len(matches), + "total": len(matches), + "applications": matches, } # noinspection PyUnresolvedReferences -@api.route('applications/', methods=['GET', 'POST']) +@api.route("applications/", methods=["GET", "POST"]) @authenticated @json @item_by_id(apps) def application(connect_app): - if request.method == 'GET': + if request.method == "GET": return connect_app else: connect_app.update(request.get_json(force=True)) @@ -201,18 +200,16 @@ def application(connect_app): # noinspection PyUnresolvedReferences -@api.route('applications//config') +@api.route("applications//config") @authenticated @json @item_by_id(apps) def config(connect_app): - return { - 'config_url': '{0}content/apps/{1}'.format(url_for('index', _external=True), connect_app['id']) - } + return {"config_url": "{0}content/apps/{1}".format(url_for("index", _external=True), connect_app["id"])} # noinspection PyUnresolvedReferences -@api.route('applications//upload', methods=['POST']) +@api.route("applications//upload", methods=["POST"]) @authenticated @json @item_by_id(apps) @@ -221,10 +218,10 @@ def upload(connect_app): ts = timestamp() bundle = { - 'id': bundle_id, - 'app_id': connect_app['id'], - 'created_time': ts, - 'updated_time': ts, + "id": bundle_id, + "app_id": connect_app["id"], + "created_time": ts, + "updated_time": ts, } bundles[bundle_id] = (bundle, request.data) return bundle @@ -232,74 +229,74 @@ def upload(connect_app): def read_bundle_file(tarball, filename): bio = io.BytesIO(tarball) - with tarfile.open('r:gz', fileobj=bio) as tar: + with tarfile.open("r:gz", fileobj=bio) as tar: return tar.extractfile(filename).read() def read_manifest(tarball): - manifest_data = read_bundle_file(tarball, 'manifest.json').decode('utf-8') + manifest_data = read_bundle_file(tarball, "manifest.json").decode("utf-8") return loads(manifest_data) def read_html(tarball): manifest = read_manifest(tarball) - meta = manifest['metadata'] + meta = manifest["metadata"] # noinspection SpellCheckingInspection - filename = meta.get('primary_html') or meta.get('entrypoint') - return read_bundle_file(tarball, filename).decode('utf-8') + filename = meta.get("primary_html") or meta.get("entrypoint") + return read_bundle_file(tarball, filename).decode("utf-8") app_modes = { - 'static': 4, - 'jupyter-static': 7, + "static": 4, + "jupyter-static": 7, } # noinspection PyUnresolvedReferences -@api.route('applications//deploy', methods=['POST']) +@api.route("applications//deploy", methods=["POST"]) @authenticated @json @item_by_id(apps) def deploy(connect_app): - bundle_id = request.get_json(force=True).get('bundle') + bundle_id = request.get_json(force=True).get("bundle") if bundle_id is None: - return error(400, 'bundle_id is required') # message and status code probably wrong + return error(400, "bundle_id is required") # message and status code probably wrong if bundle_id not in bundles: - return error(404, 'bundle %s not found' % bundle_id) # message and status code probably wrong + return error(404, "bundle %s not found" % bundle_id) # message and status code probably wrong bundle, tarball = bundles[bundle_id] manifest = read_manifest(tarball) pprint(manifest) - old_app_mode = connect_app['app_mode'] + old_app_mode = connect_app["app_mode"] # noinspection SpellCheckingInspection - new_app_mode = app_modes[manifest['metadata']['appmode']] + new_app_mode = app_modes[manifest["metadata"]["appmode"]] if old_app_mode is not None and old_app_mode != new_app_mode: - return error(400, 'Cannot change app mode once deployed') # message and status code probably wrong + return error(400, "Cannot change app mode once deployed") # message and status code probably wrong - connect_app['app_mode'] = new_app_mode - connect_app['bundle_id'] = bundle_id - connect_app['last_deployed_time'] = timestamp() + connect_app["app_mode"] = new_app_mode + connect_app["bundle_id"] = bundle_id + connect_app["last_deployed_time"] = timestamp() task_id = task_id_generator.next() task = { - 'id': task_id, - 'user_id': 0, - 'finished': True, - 'code': 0, - 'error': '', - 'last_status': 0, - 'status': ['Building static content', 'Deploying static content'], + "id": task_id, + "user_id": 0, + "finished": True, + "code": 0, + "error": "", + "last_status": 0, + "status": ["Building static content", "Deploying static content"], } tasks[str(task_id)] = task return task # noinspection PyUnresolvedReferences -@api.route('tasks/') +@api.route("tasks/") @authenticated @json @item_by_id(tasks) @@ -307,7 +304,7 @@ def get_task(task): return task -@api.route('server_settings') +@api.route("server_settings") @json def server_settings(): # for our purposes, any non-error response will do @@ -315,11 +312,11 @@ def server_settings(): # noinspection PyUnresolvedReferences -@app.route('/content/apps/') +@app.route("/content/apps/") @item_by_id(apps) def content(connect_app): - bundle, tarball = bundles[connect_app['bundle_id']] + bundle, tarball = bundles[connect_app["bundle_id"]] return read_html(tarball) -app.register_blueprint(api, url_prefix='/__api__') +app.register_blueprint(api, url_prefix="/__api__") diff --git a/rsconnect_jupyter/__init__.py b/rsconnect_jupyter/__init__.py index e14a468c..c82a6856 100644 --- a/rsconnect_jupyter/__init__.py +++ b/rsconnect_jupyter/__init__.py @@ -4,7 +4,7 @@ import sys from six.moves.urllib.parse import unquote_plus -from os.path import dirname, join +from os.path import dirname from notebook.base.handlers import APIHandler from notebook.utils import url_path_join @@ -12,9 +12,18 @@ from rsconnect import VERSION from rsconnect.actions import test_server -from rsconnect.api import verify_api_key, RSConnect, RSConnectException, RSConnectServer, \ - override_title_search -from rsconnect.bundle import make_notebook_html_bundle, make_notebook_source_bundle, write_manifest +from rsconnect.api import ( + RSConnect, + RSConnectException, + RSConnectServer, + override_title_search, + verify_api_key, +) +from rsconnect.bundle import ( + make_notebook_html_bundle, + make_notebook_source_bundle, + write_manifest, +) from rsconnect.http_support import CookieJar from ssl import SSLError @@ -26,26 +35,27 @@ def _jupyter_server_extension_paths(): - return [{ - "module": "rsconnect_jupyter" - }] + return [{"module": "rsconnect_jupyter"}] # Jupyter Extension points def _jupyter_nbextension_paths(): - return [dict( - section="notebook", - # the path is relative to the `rsconnect` directory - src="static", - # directory in the `nbextension/` namespace - dest="rsconnect_jupyter", - # _also_ in the `nbextension/` namespace - require="rsconnect_jupyter/index")] + return [ + dict( + section="notebook", + # the path is relative to the `rsconnect` directory + src="static", + # directory in the `nbextension/` namespace + dest="rsconnect_jupyter", + # _also_ in the `nbextension/` namespace + require="rsconnect_jupyter/index", + ) + ] def md5(s): - if hasattr(s, 'encode'): - s = s.encode('utf-8') + if hasattr(s, "encode"): + s = s.encode("utf-8") h = hashlib.md5() h.update(s) @@ -54,130 +64,135 @@ def md5(s): # https://github.com/jupyter/notebook/blob/master/notebook/base/handlers.py class EndpointHandler(APIHandler): - @web.authenticated def post(self, action): data = self.get_json_body() - if action == 'verify_server': - server_address = data['server_address'] - api_key = data['api_key'] - disable_tls_check = data['disable_tls_check'] - cadata = data.get('cadata', None) + if action == "verify_server": + server_address = data["server_address"] + api_key = data["api_key"] + disable_tls_check = data["disable_tls_check"] + cadata = data.get("cadata", None) try: - canonical_address, result = test_server(RSConnectServer(server_address, api_key, disable_tls_check, cadata)) + canonical_address, result = test_server( + RSConnectServer(server_address, api_key, disable_tls_check, cadata) + ) except SSLError as exc: - if exc.reason == u'UNKNOWN_PROTOCOL': - raise web.HTTPError(400, - u'Received an "SSL:UNKNOWN_PROTOCOL" error when trying to connect securely ' + - u'to the RStudio Connect server.\n' + - u'* Try changing "https://" in the "Server Address" field to "http://".\n' + - u'* If the condition persists, contact your RStudio Connect server ' + - u'administrator.') - raise web.HTTPError(400, u'A TLS error occurred when trying to reach the RStudio Connect server.\n' + - u'* Ensure that the server address you entered is correct.\n' + - u'* Ask your RStudio Connect administrator if you need a certificate bundle and\n' + - u' upload it using "Upload TLS Certificate Bundle" below.') + if exc.reason == u"UNKNOWN_PROTOCOL": + raise web.HTTPError( + 400, + u'Received an "SSL:UNKNOWN_PROTOCOL" error when trying to connect securely ' + + u"to the RStudio Connect server.\n" + + u'* Try changing "https://" in the "Server Address" field to "http://".\n' + + u"* If the condition persists, contact your RStudio Connect server " + + u"administrator.", + ) + raise web.HTTPError( + 400, + u"A TLS error occurred when trying to reach the RStudio Connect server.\n" + + u"* Ensure that the server address you entered is correct.\n" + + u"* Ask your RStudio Connect administrator if you need a certificate bundle and\n" + + u' upload it using "Upload TLS Certificate Bundle" below.', + ) except Exception as err: - raise web.HTTPError(400, u'Unable to verify that the provided server is running RStudio Connect: %s' % err) + raise web.HTTPError( + 400, u"Unable to verify that the provided server is running RStudio Connect: %s" % err, + ) if canonical_address is not None: uri = canonical_address.url try: verify_api_key(RSConnectServer(uri, api_key, disable_tls_check, cadata)) address_hash = md5(server_address) - self.finish(json.dumps({ - 'status': 'Provided server is running RStudio Connect', - 'address_hash': address_hash, - 'server_address': canonical_address.url, - })) + self.finish( + json.dumps( + { + "status": "Provided server is running RStudio Connect", + "address_hash": address_hash, + "server_address": canonical_address.url, + } + ) + ) except RSConnectException: - raise web.HTTPError(401, u'Unable to verify the provided API key') + raise web.HTTPError(401, u"Unable to verify the provided API key") return - if action == 'app_search': - uri = data['server_address'] - api_key = data['api_key'] - title = data['notebook_title'] - app_id = data.get('app_id') - disable_tls_check = data['disable_tls_check'] - cadata = data.get('cadata', None) + if action == "app_search": + uri = data["server_address"] + api_key = data["api_key"] + title = data["notebook_title"] + app_id = data.get("app_id") + disable_tls_check = data["disable_tls_check"] + cadata = data.get("cadata", None) try: - server = RSConnectServer( - uri, - api_key, - disable_tls_check, - cadata - ) - retval = override_title_search( - server, - app_id, - title - ) + server = RSConnectServer(uri, api_key, disable_tls_check, cadata) + retval = override_title_search(server, app_id, title) except RSConnectException as exc: raise web.HTTPError(400, exc.message) self.finish(json.dumps(retval)) return - if action == 'deploy': - uri = data['server_address'] - app_id = data.get('app_id') - nb_title = data['notebook_title'] - nb_name = data['notebook_name'] - nb_path = unquote_plus(data['notebook_path'].strip('/')) - api_key = data['api_key'] - app_mode = data['app_mode'] - environment = data.get('environment') - disable_tls_check = data['disable_tls_check'] - cadata = data.get('cadata', None) - extra_files = data.get('files', []) + if action == "deploy": + uri = data["server_address"] + app_id = data.get("app_id") + nb_title = data["notebook_title"] + nb_name = data["notebook_name"] + nb_path = unquote_plus(data["notebook_path"].strip("/")) + api_key = data["api_key"] + app_mode = data["app_mode"] + environment = data.get("environment") + disable_tls_check = data["disable_tls_check"] + cadata = data.get("cadata", None) + extra_files = data.get("files", []) model = self.contents_manager.get(path=nb_path) - if model['type'] != 'notebook': + if model["type"] != "notebook": # not a notebook raise web.HTTPError(400, u"Not a notebook: %s" % nb_path) - if not hasattr(self.contents_manager, '_get_os_path'): + if not hasattr(self.contents_manager, "_get_os_path"): raise web.HTTPError(400, u"Notebook does not live on a mounted filesystem") os_path = self.contents_manager._get_os_path(nb_path) - if app_mode == 'static': + if app_mode == "static": try: bundle = make_notebook_html_bundle(os_path, sys.executable) except Exception as exc: - self.log.exception('Bundle creation failed') + self.log.exception("Bundle creation failed") raise web.HTTPError(500, u"Bundle creation failed: %s" % exc) - elif app_mode == 'jupyter-static': + elif app_mode == "jupyter-static": if not environment: - raise web.HTTPError(400, 'environment is required for jupyter-static app_mode') + raise web.HTTPError(400, "environment is required for jupyter-static app_mode") try: bundle = make_notebook_source_bundle(os_path, environment, extra_files) except Exception as exc: - self.log.exception('Bundle creation failed') + self.log.exception("Bundle creation failed") raise web.HTTPError(500, u"Bundle creation failed: %s" % exc) else: - raise web.HTTPError(400, 'Invalid app_mode: %s, must be "static" or "jupyter-static"' % app_mode) + raise web.HTTPError( + 400, 'Invalid app_mode: %s, must be "static" or "jupyter-static"' % app_mode, + ) try: server = RSConnectServer(uri, api_key, disable_tls_check, cadata) with RSConnect(server) as api_client: retval = api_client.deploy(app_id, nb_name, nb_title, nb_title is not None, bundle) - retval['cookies'] = server.cookie_jar.as_dict() + retval["cookies"] = server.cookie_jar.as_dict() except RSConnectException as exc: raise web.HTTPError(400, exc.message) self.finish(json.dumps(retval)) return - if action == 'app_get': - uri = data['server_address'] - api_key = data['api_key'] - app_id = data['app_id'] - disable_tls_check = data['disable_tls_check'] - cadata = data.get('cadata', None) + if action == "app_get": + uri = data["server_address"] + api_key = data["api_key"] + app_id = data["app_id"] + disable_tls_check = data["disable_tls_check"] + cadata = data.get("cadata", None) try: server = RSConnectServer(uri, api_key, disable_tls_check, cadata) @@ -188,14 +203,14 @@ def post(self, action): self.finish(json.dumps(retval)) return - if action == 'get_log': - uri = data['server_address'] - api_key = data['api_key'] - task_id = data['task_id'] - last_status = data['last_status'] - cookie_source = data.get('cookies', {}) - disable_tls_check = data['disable_tls_check'] - cadata = data.get('cadata', None) + if action == "get_log": + uri = data["server_address"] + api_key = data["api_key"] + task_id = data["task_id"] + last_status = data["last_status"] + cookie_source = data.get("cookies", {}) + disable_tls_check = data["disable_tls_check"] + cadata = data.get("cadata", None) try: rs_connect_server = RSConnectServer(uri, api_key, disable_tls_check, cadata) @@ -208,12 +223,12 @@ def post(self, action): self.finish(json.dumps(retval)) return - if action == 'app_config': - uri = data['server_address'] - api_key = data['api_key'] - app_id = data['app_id'] - disable_tls_check = data['disable_tls_check'] - cadata = data.get('cadata', None) + if action == "app_config": + uri = data["server_address"] + api_key = data["api_key"] + app_id = data["app_id"] + disable_tls_check = data["disable_tls_check"] + cadata = data.get("cadata", None) try: server = RSConnectServer(uri, api_key, disable_tls_check, cadata) @@ -225,22 +240,22 @@ def post(self, action): self.finish(json.dumps(retval)) return - if action == 'write_manifest': - environment = data['environment'] - nb_path = unquote_plus(data['notebook_path'].strip('/')) - relative_dir = os.path.dirname(nb_path) + if action == "write_manifest": + environment = data["environment"] + nb_path = unquote_plus(data["notebook_path"].strip("/")) + relative_dir = dirname(nb_path) os_path = self.contents_manager._get_os_path(nb_path) - output_dir = os.path.dirname(os_path) + output_dir = dirname(os_path) nb_name = os.path.basename(os_path) created, skipped = write_manifest(relative_dir, nb_name, environment, output_dir) self.finish(json.dumps({"created": created, "skipped": skipped})) return - if action == 'get_python_settings': - uri = data['server_address'] - api_key = data['api_key'] - disable_tls_check = data['disable_tls_check'] - cadata = data.get('cadata', None) + if action == "get_python_settings": + uri = data["server_address"] + api_key = data["api_key"] + disable_tls_check = data["disable_tls_check"] + cadata = data.get("cadata", None) try: server = RSConnectServer(uri, api_key, disable_tls_check, cadata) @@ -254,19 +269,23 @@ def post(self, action): @web.authenticated def get(self, action): - if action == 'plugin_version': + if action == "plugin_version": rsconnect_jupyter_server_extension = __version__ rsconnect_python_version = VERSION - self.finish(json.dumps({ - "rsconnect_jupyter_server_extension": rsconnect_jupyter_server_extension, - "rsconnect_python_version": rsconnect_python_version - })) + self.finish( + json.dumps( + { + "rsconnect_jupyter_server_extension": rsconnect_jupyter_server_extension, + "rsconnect_python_version": rsconnect_python_version, + } + ) + ) def load_jupyter_server_extension(nb_app): nb_app.log.info("rsconnect_jupyter enabled!") web_app = nb_app.web_app - host_pattern = '.*$' - action_pattern = r'(?P\w+)' - route_pattern = url_path_join(web_app.settings['base_url'], r'/rsconnect_jupyter/%s' % action_pattern) + host_pattern = ".*$" + action_pattern = r"(?P\w+)" + route_pattern = url_path_join(web_app.settings["base_url"], r"/rsconnect_jupyter/%s" % action_pattern) web_app.add_handlers(host_pattern, [(route_pattern, EndpointHandler)]) diff --git a/rsconnect_jupyter/tests/__init__.py b/rsconnect_jupyter/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rsconnect_jupyter/tests/data/pip1/dummy.ipynb b/rsconnect_jupyter/tests/data/pip1/dummy.ipynb deleted file mode 100644 index 76fe3342..00000000 --- a/rsconnect_jupyter/tests/data/pip1/dummy.ipynb +++ /dev/null @@ -1,52 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'this is a notebook'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "\"this is a notebook\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.6" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/rsconnect_jupyter/tests/data/pip1/requirements.txt b/rsconnect_jupyter/tests/data/pip1/requirements.txt deleted file mode 100644 index 40218701..00000000 --- a/rsconnect_jupyter/tests/data/pip1/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -numpy -pandas -matplotlib diff --git a/rsconnect_jupyter/tests/data/pip2/data.csv b/rsconnect_jupyter/tests/data/pip2/data.csv deleted file mode 100644 index 2763b7f8..00000000 --- a/rsconnect_jupyter/tests/data/pip2/data.csv +++ /dev/null @@ -1,9 +0,0 @@ -Label,Value -black,0 -blue,1 -green,2 -cyan,3 -red,4 -magenta,5 -yellow,6 -white,7 diff --git a/rsconnect_jupyter/tests/data/pip2/dummy.ipynb b/rsconnect_jupyter/tests/data/pip2/dummy.ipynb deleted file mode 100644 index 76fe3342..00000000 --- a/rsconnect_jupyter/tests/data/pip2/dummy.ipynb +++ /dev/null @@ -1,52 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'this is a notebook'" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "\"this is a notebook\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.6" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/selenium/Makefile b/selenium/Makefile index 7f50f20c..fc9ea7cf 100644 --- a/selenium/Makefile +++ b/selenium/Makefile @@ -50,7 +50,8 @@ DOCKER_RUN_COMMAND=docker run --rm --init \ --workdir=${RSCONNECT_DIR}/selenium \ ${TRE_IMAGE} -TEST_RUNNER_COMMAND=pytest \ +TEST_RUNNER_COMMAND = \ + pytest \ --junitxml=${RESULT_XML} \ --driver=Remote \ --host=${GRID_HOST} \ @@ -60,10 +61,10 @@ TEST_RUNNER_COMMAND=pytest \ --connect-url='${CONNECT_SCHEME}://${CONNECT_HOST}:${CONNECT_PORT}' \ --data-dir=${RSCONNECT_DIR}/selenium/data \ --notebooks-dir=${NOTEBOOKS_DIR} \ - --verbose \ + -vv \ --tb=short \ --reruns=${RERUN_FAILURES} \ - -m "not (fail or systemstat)" \ + -m rsconnect_jupyter \ ${PYTESTOPTS} ifdef DEBUG diff --git a/selenium/conftest.py b/selenium/conftest.py index 916c274b..4036eed2 100644 --- a/selenium/conftest.py +++ b/selenium/conftest.py @@ -1,5 +1,4 @@ import argparse -import logging import os import pytest import secrets @@ -9,8 +8,6 @@ import shutil import string -from selene.api import be - # set the default selene reports folder # to the present working directory @@ -49,40 +46,31 @@ def pytest_addoption(parser): """Define and parse command line options""" parser.addoption( - "--selene-reports", - action=ReportsAction, - help="parent directory for storing selene test reports") + "--selene-reports", action=ReportsAction, help="parent directory for storing selene test reports", + ) parser.addoption( - "--selene-timeout", - action=TimeoutAction, - default=4, - type=int, - help="set the default timeout in selene") + "--selene-timeout", action=TimeoutAction, default=4, type=int, help="set the default timeout in selene", + ) parser.addoption( - "--jupyter-url", - action="store", - default="http://jupyter-py2/", - help="URI of the Jupyter system under test") + "--jupyter-url", action="store", default="http://jupyter-py2/", help="URI of the Jupyter system under test", + ) parser.addoption( "--connect-url", action="store", default="http://mock-connect/", - help="URI of the Connect server where content is deployed") + help="URI of the Connect server where content is deployed", + ) parser.addoption( - "--data-dir", - action="store", - default="/selenium/data", - help="Directory where data files are stored") + "--data-dir", action="store", default="/selenium/data", help="Directory where data files are stored", + ) parser.addoption( - "--notebooks-dir", - action="store", - default="/notebooks", - help="Directory where Jupyter Notebooks are stored") + "--notebooks-dir", action="store", default="/notebooks", help="Directory where Jupyter Notebooks are stored", + ) def log_web_error(msg): @@ -93,8 +81,10 @@ def log_web_error(msg): """ screenshot = selene.helpers.take_screenshot(selene.browser.driver(),) - msg = '''{original_msg} - screenshot: file://{screenshot}'''.format(original_msg=msg, screenshot=screenshot) + msg = """{original_msg} + screenshot: file://{screenshot}""".format( + original_msg=msg, screenshot=screenshot + ) return msg @@ -147,16 +137,18 @@ def browser_config(driver): @pytest.fixture(autouse=True) def skip_by_browser(request, session_capabilities): - if request.node.get_marker('skip_browser'): - if request.node.get_marker('skip_browser').args[0] == session_capabilities['browserName']: - pytest.skip('skipped on this browser: {}'.format(session_capabilities['browserName'])) + if request.node.get_marker("skip_browser"): + if request.node.get_marker("skip_browser").args[0] == session_capabilities["browserName"]: + pytest.skip("skipped on this browser: {}".format(session_capabilities["browserName"])) -def generate_random_string(length=8, charset="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()"): +def generate_random_string( + length=8, charset="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()", +): """Randomly pick chars from an alphabet """ - return ''.join(secrets.choice(charset) for i in range(length)) + return "".join(secrets.choice(charset) for i in range(length)) def generate_content_name(): @@ -166,10 +158,9 @@ def generate_content_name(): # start with a letter for safety alphabet1 = string.ascii_letters - alphabet2 = string.ascii_letters + string.digits + '-_' + alphabet2 = string.ascii_letters + string.digits + "-_" - name = generate_random_string(1,alphabet1) \ - + generate_random_string(10,alphabet2) + name = generate_random_string(1, alphabet1) + generate_random_string(10, alphabet2) return name @@ -180,10 +171,10 @@ def notebook(data_dir, notebooks_dir): """ # file that will be used to generate the new notebook - template_path = os.path.join(data_dir,'spiro.ipynb') + template_path = os.path.join(data_dir, "spiro.ipynb") # name of the new notebook - notebook_fname = generate_content_name() + '.ipynb' + notebook_fname = generate_content_name() + ".ipynb" notebook_path = os.path.join(notebooks_dir, notebook_fname) # copy the template to create the new notebook diff --git a/selenium/t/pages/add_server_form.py b/selenium/t/pages/add_server_form.py index 4cf0ee7e..9eeced39 100644 --- a/selenium/t/pages/add_server_form.py +++ b/selenium/t/pages/add_server_form.py @@ -2,10 +2,10 @@ from .form_base import FormBase -class AddServerForm(FormBase): +class AddServerForm(FormBase): def __init__(self): - self._fields = ['address', 'api_key', 'name'] + self._fields = ["address", "api_key", "name"] @property def close(self): diff --git a/selenium/t/pages/checkbox.py b/selenium/t/pages/checkbox.py index 782522c9..df00f404 100644 --- a/selenium/t/pages/checkbox.py +++ b/selenium/t/pages/checkbox.py @@ -1,12 +1,7 @@ class Checkbox(object): - def __init__(self, element): - - # type: (SeleneElement) -> None self._element = element - def set(self, value): - if self._element.is_selected() is not value: self._element.click() diff --git a/selenium/t/pages/content_selection.py b/selenium/t/pages/content_selection.py index d7d52010..c3c3c9e0 100644 --- a/selenium/t/pages/content_selection.py +++ b/selenium/t/pages/content_selection.py @@ -1,9 +1,7 @@ from selene.api import s, by -from .form_base import FormBase class ContentSelectionDialog: - @property def title(self): return s(by.css(".modal-title")) diff --git a/selenium/t/pages/form_base.py b/selenium/t/pages/form_base.py index f1ba599e..bbd5cbe1 100644 --- a/selenium/t/pages/form_base.py +++ b/selenium/t/pages/form_base.py @@ -1,39 +1,34 @@ class FormBase(object): - def __init__(self): - self._locators = { - } + self._locators = {} self._fields = [] - @property def submit(self): return None - def populate_form(self, data): """populate the form with data from the data parameter""" - if hasattr(data,'items'): + if hasattr(data, "items"): # convert dictionaries to lists # so we can support filling out forms in order data = data.items() - for (k,v) in data: + for (k, v) in data: if v is None: continue if k not in self._fields: # bail, the key is not a field raise ValueError("invalid form field: %s" % (k)) # find the widget in the object's dictionary and set its value - widget = getattr(self,k) + widget = getattr(self, k) widget.set(v) return self - def submit_form(self, data=None): if data is not None: diff --git a/selenium/t/pages/main_toolbar.py b/selenium/t/pages/main_toolbar.py index c5194817..ce879bfb 100644 --- a/selenium/t/pages/main_toolbar.py +++ b/selenium/t/pages/main_toolbar.py @@ -2,7 +2,6 @@ class MainToolBar(object): - @property def rsconnect_dropdown(self): return s(by.css("[title='Publish to RStudio Connect']")) diff --git a/selenium/t/pages/publish_content_form.py b/selenium/t/pages/publish_content_form.py index 166c6aee..2209f6d3 100644 --- a/selenium/t/pages/publish_content_form.py +++ b/selenium/t/pages/publish_content_form.py @@ -2,10 +2,10 @@ from .form_base import FormBase -class PublishContentForm(FormBase): +class PublishContentForm(FormBase): def __init__(self): - self._fields = ['address', 'name'] + self._fields = ["address", "name"] @property def close(self): @@ -33,12 +33,12 @@ def publish_with_source(self): @property def title(self): - return s(by.css('#rsc-content-title')) + return s(by.css("#rsc-content-title")) @property def title_error(self): - return s(by.css('#rsc-deploy-error')) + return s(by.css("#rsc-deploy-error")) @property def version_info(self): - return s(by.css('#version-info')) + return s(by.css("#version-info")) diff --git a/selenium/t/pages/select_list.py b/selenium/t/pages/select_list.py index 6c4a1a71..2e64dfb7 100644 --- a/selenium/t/pages/select_list.py +++ b/selenium/t/pages/select_list.py @@ -3,14 +3,13 @@ class SelectList(object): def __init__(self, element): - # type: (SeleneElement) -> None self._element = element def open(self): self._element.click() def _options(self): - return self._element.all('option') + return self._element.all("option") def select_by_value(self, value): self._options().element_by(have.value(value)).click() diff --git a/selenium/t/test_add_server.py b/selenium/t/test_add_server.py index 749ddee4..2619f965 100644 --- a/selenium/t/test_add_server.py +++ b/selenium/t/test_add_server.py @@ -11,13 +11,13 @@ from conftest import generate_random_string -pytestmark = [ pytest.mark.rsconnect_jupyter, - pytest.mark.add_server, - ] +pytestmark = [ + pytest.mark.rsconnect_jupyter, + pytest.mark.add_server, +] class TestAddServer(object): - @pytest.fixture(autouse=True) def setup(self, browser_config, jupyter_url, notebook): """Navigate to the front page @@ -28,31 +28,24 @@ def setup(self, browser_config, jupyter_url, notebook): # navigate to the notebook browser.open_url(jupyter_url + notebook) - MainToolBar(). \ - rsconnect_dropdown.click() + MainToolBar().rsconnect_dropdown.click() MainToolBar().rsconnect_publish.should(be.visible) - MainToolBar(). \ - rsconnect_publish.click() + MainToolBar().rsconnect_publish.click() def test_valid_address_valid_name(self, connect_url): """Fill in the add server form with valid address and name """ server_name = generate_random_string() - api_key = '0123456789abcdef0123456789abcdef' + api_key = "0123456789abcdef0123456789abcdef" # WOAH THERE BUDDY, NOT SO FAST sleep(1) - AddServerForm() \ - .populate_form({ - 'address' : connect_url, - 'api_key' : api_key, - 'name' : server_name, - }) + AddServerForm().populate_form( + {"address": connect_url, "api_key": api_key, "name": server_name,} + ) - AddServerForm() \ - .submit_form() + AddServerForm().submit_form() - PublishContentForm() \ - .add_server.should(be.visible) + PublishContentForm().add_server.should(be.visible) diff --git a/selenium/t/test_publish_source.py b/selenium/t/test_publish_source.py index 2cda0d72..ffc6772f 100644 --- a/selenium/t/test_publish_source.py +++ b/selenium/t/test_publish_source.py @@ -4,19 +4,16 @@ from selene.api import browser, be, have from .pages.main_toolbar import MainToolBar -from .pages.add_server_form import AddServerForm from .pages.publish_content_form import PublishContentForm -from conftest import generate_random_string - -pytestmark = [ pytest.mark.rsconnect_jupyter, - pytest.mark.publish_source, - ] +pytestmark = [ + pytest.mark.rsconnect_jupyter, + pytest.mark.publish_source, +] class TestPublishSource(object): - @pytest.fixture(autouse=True) def setup(self, browser_config, jupyter_url, notebook, connect_url): """Navigate to the front page @@ -26,12 +23,10 @@ def setup(self, browser_config, jupyter_url, notebook, connect_url): # navigate to the notebook browser.open_url(jupyter_url + notebook) - MainToolBar(). \ - rsconnect_dropdown.click() + MainToolBar().rsconnect_dropdown.click() MainToolBar().rsconnect_publish.should(be.visible) MainToolBar().rsconnect_publish.click() - def test_publish_source(self, connect_url): """Publish a document with source """ @@ -40,12 +35,12 @@ def test_publish_source(self, connect_url): sleep(1) pf.version_info.should(be.visible) - pf.version_info.should(have.text('rsconnect-python version')) - pf.title.set_value('NotebookSource') + pf.version_info.should(have.text("rsconnect-python version")) + pf.title.set_value("NotebookSource") pf.publish_with_source.click() pf.submit.click() m = MainToolBar() notification = m.rsconnect_notification notification.should(be.visible) - notification.should(have.text('Successfully published content')) + notification.should(have.text("Successfully published content")) diff --git a/selenium/t/test_publish_static.py b/selenium/t/test_publish_static.py index 5608161a..f54deb8c 100644 --- a/selenium/t/test_publish_static.py +++ b/selenium/t/test_publish_static.py @@ -4,19 +4,16 @@ from selene.api import browser, be, have from .pages.main_toolbar import MainToolBar -from .pages.add_server_form import AddServerForm from .pages.publish_content_form import PublishContentForm -from conftest import generate_random_string - -pytestmark = [ pytest.mark.rsconnect_jupyter, - pytest.mark.publish_static, - ] +pytestmark = [ + pytest.mark.rsconnect_jupyter, + pytest.mark.publish_static, +] class TestPublishStatic(object): - @pytest.fixture(autouse=True) def setup(self, browser_config, jupyter_url, notebook, connect_url): """Navigate to the front page @@ -27,11 +24,9 @@ def setup(self, browser_config, jupyter_url, notebook, connect_url): # navigate to the notebook browser.open_url(jupyter_url + notebook) - MainToolBar(). \ - rsconnect_dropdown.click() + MainToolBar().rsconnect_dropdown.click() MainToolBar().rsconnect_publish.click() - def test_publish_static(self, connect_url): """Publish a static document """ @@ -39,12 +34,12 @@ def test_publish_static(self, connect_url): # dialog is racy with event setup sleep(1) - pf.title.set_value('NotebookStatic') + pf.title.set_value("NotebookStatic") pf.publish_without_source.click() pf.submit.click() m = MainToolBar() notification = m.rsconnect_notification - sleep(1) # race + sleep(1) # race notification.should(be.visible) - notification.should(have.text('Successfully published content')) + notification.should(have.text("Successfully published content")) diff --git a/selenium/t/test_republish.py b/selenium/t/test_republish.py index 4ef290b7..3613187b 100644 --- a/selenium/t/test_republish.py +++ b/selenium/t/test_republish.py @@ -4,20 +4,17 @@ from selene.api import browser, be, have from .pages.main_toolbar import MainToolBar -from .pages.add_server_form import AddServerForm from .pages.publish_content_form import PublishContentForm from .pages.content_selection import ContentSelectionDialog -from conftest import generate_random_string - -pytestmark = [ pytest.mark.rsconnect_jupyter, - pytest.mark.publish_static, - ] +pytestmark = [ + pytest.mark.rsconnect_jupyter, + pytest.mark.publish_static, +] class TestRepublish(object): - @pytest.fixture(autouse=True) def setup(self, browser_config, jupyter_url, notebook, connect_url): """Navigate to the front page @@ -28,43 +25,41 @@ def setup(self, browser_config, jupyter_url, notebook, connect_url): # navigate to the notebook browser.open_url(jupyter_url + notebook) - MainToolBar(). \ - rsconnect_dropdown.click() + MainToolBar().rsconnect_dropdown.click() MainToolBar().rsconnect_publish.click() - def test_republish(self, connect_url): """Publish a static document """ pf = PublishContentForm() - sleep(1) # dialog is racy with event setup + sleep(1) # dialog is racy with event setup - pf.title.set_value('NotebookRepublish') + pf.title.set_value("NotebookRepublish") pf.publish_without_source.click() pf.submit.click() pf.close.should(be.not_(be.visible)) m = MainToolBar() notification = m.rsconnect_notification - sleep(1) # race + sleep(1) # race notification.should(be.visible) - notification.should(have.text('Successfully published content')) + notification.should(have.text("Successfully published content")) # republish - sleep(1) # clicking before waiting results in the event not being triggered + sleep(1) # clicking before waiting results in the event not being triggered m.rsconnect_dropdown.click() m.rsconnect_publish.click() - sleep(1) # racy dialog + sleep(1) # racy dialog - pf.title.set_value('') - pf.title.set_value('Notebook') + pf.title.set_value("") + pf.title.set_value("Notebook") pf.publish_without_source.click() pf.submit.click() pf.close.should(be.not_(be.visible)) cs = ContentSelectionDialog() cs.title.should(be.visible) - cs.title.should(have.text('Select deployment location')) + cs.title.should(have.text("Select deployment location")) cs.new_location.should(be.visible) cs.new_location.click() @@ -72,4 +67,4 @@ def test_republish(self, connect_url): cs.close.should(be.not_(be.visible)) m.rsconnect_notification.should(be.visible) - m.rsconnect_notification.should(have.text('Successfully published content')) + m.rsconnect_notification.should(have.text("Successfully published content")) diff --git a/selenium/t/test_switch_mode.py b/selenium/t/test_switch_mode.py index 9f03a9f8..41c4b71d 100644 --- a/selenium/t/test_switch_mode.py +++ b/selenium/t/test_switch_mode.py @@ -4,20 +4,16 @@ from selene.api import browser, be, have from .pages.main_toolbar import MainToolBar -from .pages.add_server_form import AddServerForm from .pages.publish_content_form import PublishContentForm -from .pages.content_selection import ContentSelectionDialog -from conftest import generate_random_string - -pytestmark = [ pytest.mark.rsconnect_jupyter, - pytest.mark.switch_mode, - ] +pytestmark = [ + pytest.mark.rsconnect_jupyter, + pytest.mark.switch_mode, +] class TestSwitchMode(object): - @pytest.fixture(autouse=True) def setup(self, browser_config, jupyter_url, notebook, connect_url): """Navigate to the front page @@ -28,35 +24,33 @@ def setup(self, browser_config, jupyter_url, notebook, connect_url): # navigate to the notebook browser.open_url(jupyter_url + notebook) - MainToolBar(). \ - rsconnect_dropdown.click() + MainToolBar().rsconnect_dropdown.click() MainToolBar().rsconnect_publish.click() - def test_switch_mode(self, connect_url): """Publish a static document """ pf = PublishContentForm() - sleep(1) # dialog is racy with event setup + sleep(1) # dialog is racy with event setup - pf.title.set_value('NotebookSwitchMode2') + pf.title.set_value("NotebookSwitchMode2") pf.publish_without_source.click() pf.submit.click() pf.close.should(be.not_(be.visible)) m = MainToolBar() notification = m.rsconnect_notification - sleep(1) # race + sleep(1) # race notification.should(be.visible) - notification.should(have.text('Successfully published content')) + notification.should(have.text("Successfully published content")) # republish - sleep(1) # clicking before waiting results in the event not being triggered + sleep(1) # clicking before waiting results in the event not being triggered m.rsconnect_dropdown.click() m.rsconnect_publish.click() - sleep(1) # racy dialog + sleep(1) # racy dialog pf.publish_with_source.click() pf.submit.click() - pf.title_error.should(have.text('Cannot change app mode once deployed')) + pf.title_error.should(have.text("Cannot change app mode once deployed")) diff --git a/selenium/tools/systemstat/gridstat b/selenium/tools/systemstat/gridstat index c94aaf6f..a5755ec2 100755 --- a/selenium/tools/systemstat/gridstat +++ b/selenium/tools/systemstat/gridstat @@ -4,13 +4,11 @@ import gridstat import logging import sys import systemstattool -import os class GridStatTool(gridstat.GridStat, systemstattool.SystemStatTool): - - def __init__(self, logfile='gridstat.log', **kwargs): - super(GridStatTool,self).__init__(logfile=logfile) + def __init__(self, logfile="gridstat.log", **kwargs): + super(GridStatTool, self).__init__(logfile=logfile) self.logger = logging.getLogger(__name__) @@ -20,7 +18,8 @@ class GridStatTool(gridstat.GridStat, systemstattool.SystemStatTool): action="store", dest="nodes", default=2, - type=int) + type=int, + ) self.command_parser.add_argument( "--url", @@ -28,7 +27,8 @@ class GridStatTool(gridstat.GridStat, systemstattool.SystemStatTool): action="store", dest="url", default="http://localhost:4444", - type=str) + type=str, + ) # parse command line and config file options self.parse_options() @@ -41,11 +41,11 @@ class GridStatTool(gridstat.GridStat, systemstattool.SystemStatTool): self.start_logging() -if __name__ == '__main__' : +if __name__ == "__main__": tool = GridStatTool() - tool.logger.info('checking status of {}'.format(tool.options.url)) + tool.logger.info("checking status of {}".format(tool.options.url)) system_ready = tool.wait_until_ready() @@ -54,6 +54,6 @@ if __name__ == '__main__' : else: status = 1 - tool.logger.debug('exiting') + tool.logger.debug("exiting") sys.exit(status) diff --git a/selenium/tools/systemstat/gridstat.py b/selenium/tools/systemstat/gridstat.py index 54a129c1..d9c6ff0f 100644 --- a/selenium/tools/systemstat/gridstat.py +++ b/selenium/tools/systemstat/gridstat.py @@ -1,13 +1,11 @@ import logging import requests -import sys import systemstat class GridStat(systemstat.SystemStat): - def __init__(self, url="http://localhost:4444", nodes=2, sleep=1.0, wait=30, **kwargs): - super(GridStat,self).__init__(sleep=sleep, wait=wait, **kwargs) + super(GridStat, self).__init__(sleep=sleep, wait=wait, **kwargs) self._url = url self._nodes = nodes @@ -17,7 +15,6 @@ def __init__(self, url="http://localhost:4444", nodes=2, sleep=1.0, wait=30, **k self.logger.info("url: {}".format(url)) self.logger.info("nodes: {}".format(nodes)) - def is_ready(self): """check if selenium grid is ready @@ -27,56 +24,52 @@ def is_ready(self): 3. all nodes free """ - grid_api_hub_url = self._url + '/grid/api/hub' + grid_api_hub_url = self._url + "/grid/api/hub" try: # query the selenium grid server to see if the nodes have attached. response = requests.get(grid_api_hub_url) except requests.exceptions.ConnectionError: - self.logger.info( - 'waiting for hub to respond at {}'.format(grid_api_hub_url)) + self.logger.info("waiting for hub to respond at {}".format(grid_api_hub_url)) # wait and poll again return False if response.ok: # hub is up - self.logger.info('hub is up at {}'.format(grid_api_hub_url)) + self.logger.info("hub is up at {}".format(grid_api_hub_url)) # check if nodes are attached - slotCounts = response.json()['slotCounts'] + slotCounts = response.json()["slotCounts"] - self.logger.info('{} of {} nodes are attached'.format( - slotCounts['total'], self._nodes)) + self.logger.info("{} of {} nodes are attached".format(slotCounts["total"], self._nodes)) - if slotCounts['total'] == self._nodes: + if slotCounts["total"] == self._nodes: # nodes are attached - self.logger.info('all nodes are attached') + self.logger.info("all nodes are attached") # check if nodes are ready - self.logger.info('{} of {} nodes are ready'.format( - slotCounts['free'], self._nodes)) + self.logger.info("{} of {} nodes are ready".format(slotCounts["free"], self._nodes)) - if slotCounts['free'] == self._nodes: + if slotCounts["free"] == self._nodes: # nodes are ready - self.logger.info('all nodes are ready') + self.logger.info("all nodes are ready") return True else: # nodes are not ready yet - self.logger.info('waiting on {} node(s) to be ready'.format( - self._nodes-slotCounts['free'])) + self.logger.info("waiting on {} node(s) to be ready".format(self._nodes - slotCounts["free"])) else: # nodes are not attached yet - self.logger.info('waiting on {} node(s) to attach'.format( - self._nodes-slotCounts['total'])) + self.logger.info("waiting on {} node(s) to attach".format(self._nodes - slotCounts["total"])) else: # response was not "ok", log error details self.logger.info( 'hub responded at "{}" with error:\n{}\n{}\n{}'.format( - grid_api_hub_url, response.status_code, response.headers, - response.text)) + grid_api_hub_url, response.status_code, response.headers, response.text, + ) + ) # wait and poll again return False diff --git a/selenium/tools/systemstat/sutstat b/selenium/tools/systemstat/sutstat index 04b5e689..f377d1d6 100755 --- a/selenium/tools/systemstat/sutstat +++ b/selenium/tools/systemstat/sutstat @@ -4,13 +4,11 @@ import sutstat import logging import sys import systemstattool -import os class SutStatTool(sutstat.SutStat, systemstattool.SystemStatTool): - - def __init__(self, logfile='sutstat.log', **kwargs): - super(SutStatTool,self).__init__(logfile=logfile) + def __init__(self, logfile="sutstat.log", **kwargs): + super(SutStatTool, self).__init__(logfile=logfile) self.logger = logging.getLogger(__name__) @@ -20,7 +18,8 @@ class SutStatTool(sutstat.SutStat, systemstattool.SystemStatTool): action="store", dest="url", default="http://localhost:6969", - type=str) + type=str, + ) # parse command line and config file options self.parse_options() @@ -32,11 +31,11 @@ class SutStatTool(sutstat.SutStat, systemstattool.SystemStatTool): self.start_logging() -if __name__ == '__main__' : +if __name__ == "__main__": tool = SutStatTool() - tool.logger.info('checking status of {}'.format(tool.options.url)) + tool.logger.info("checking status of {}".format(tool.options.url)) system_ready = tool.wait_until_ready() @@ -45,6 +44,6 @@ if __name__ == '__main__' : else: status = 1 - tool.logger.debug('exiting') + tool.logger.debug("exiting") sys.exit(status) diff --git a/selenium/tools/systemstat/sutstat.py b/selenium/tools/systemstat/sutstat.py index b336badd..a099728f 100644 --- a/selenium/tools/systemstat/sutstat.py +++ b/selenium/tools/systemstat/sutstat.py @@ -1,13 +1,11 @@ import logging import requests -import sys import systemstat class SutStat(systemstat.SystemStat): - - def __init__(self, url="http://localhost:6969", sleep=1.0, wait=30, **kwargs): - super(SutStat,self).__init__(sleep=sleep, wait=wait, **kwargs) + def __init__(self, url="http://localhost:6969", sleep=5.0, wait=60, **kwargs): + super(SutStat, self).__init__(sleep=sleep, wait=wait, **kwargs) self._url = url @@ -15,34 +13,34 @@ def __init__(self, url="http://localhost:6969", sleep=1.0, wait=30, **kwargs): self.logger.info("url: {}".format(url)) - def is_ready(self): """check if the system is ready (accepting requests) """ - ping_url = self._url + '/' + ping_url = self._url + "/" try: # query the system to see if it is up. response = requests.get(ping_url) except requests.exceptions.ConnectionError: - self.logger.info( - 'waiting for sut server to respond at {}'.format(ping_url)) + self.logger.info("waiting for sut server to respond at {}".format(ping_url)) # wait and poll again return False if response.status_code == 200: # system is up - self.logger.info( - 'System under test is up at {}'.format(ping_url)) + self.logger.info("System under test is up at {}".format(ping_url)) return True else: # response was not "ok", log error details self.logger.info( - ('System under test responded at ' + - '"{}" with error:\n{}\n{}\n{}'.format( - ping_url, response.status_code, - response.headers, response.text))) + ( + "System under test responded at " + + '"{}" with error:\n{}\n{}\n{}'.format( + ping_url, response.status_code, response.headers, response.text + ) + ) + ) # wait and poll again return False diff --git a/selenium/tools/systemstat/systemstat.py b/selenium/tools/systemstat/systemstat.py index d781b843..d5272d7d 100644 --- a/selenium/tools/systemstat/systemstat.py +++ b/selenium/tools/systemstat/systemstat.py @@ -1,11 +1,9 @@ import datetime import logging -import sys import time class SystemStat(object): - def __init__(self, sleep=1.0, wait=120, **kwargs): # instrumentation for testing @@ -19,7 +17,6 @@ def __init__(self, sleep=1.0, wait=120, **kwargs): self.logger.info("sleep: {}".format(sleep)) self.logger.info("wait: {}".format(wait)) - def wait_until_ready(self): """Poll and wait for system to be up @@ -33,8 +30,8 @@ def wait_until_ready(self): startTime = nowTime endTime = nowTime + datetime.timedelta(seconds=self._wait) - self.logger.info('starting at {}'.format(startTime)) - self.logger.info('ending at {}'.format(endTime)) + self.logger.info("starting at {}".format(startTime)) + self.logger.info("ending at {}".format(endTime)) while nowTime < endTime: @@ -44,20 +41,18 @@ def wait_until_ready(self): return True else: # wait before polling again - self.logger.debug( - 'sleeing for {} seconds'.format(self._sleep)) + self.logger.debug("sleeing for {} seconds".format(self._sleep)) time.sleep(self._sleep) nowTime = datetime.datetime.now() # timed out waiting for system to come up. - self.logger.info('timed out waiting for system to come up') + self.logger.info("timed out waiting for system to come up") return False except Exception as e: self.logger.exception(e) raise - def is_ready(self): """Check if the system is up. diff --git a/selenium/tools/systemstat/systemstattool.py b/selenium/tools/systemstat/systemstattool.py index 006450e7..15335366 100644 --- a/selenium/tools/systemstat/systemstattool.py +++ b/selenium/tools/systemstat/systemstattool.py @@ -6,8 +6,7 @@ class SystemStatTool(systemstat.SystemStat): - - def __init__(self,logfile='systemstat.log', **kwargs): + def __init__(self, logfile="systemstat.log", **kwargs): self.options = None self.logger = logging.getLogger(__name__) @@ -15,12 +14,8 @@ def __init__(self,logfile='systemstat.log', **kwargs): self.command_parser = argparse.ArgumentParser() self.command_parser.add_argument( - "--sleep", - help="seconds to sleep between polling", - action="store", - dest="sleep", - default=1.0, - type=float) + "--sleep", help="seconds to sleep between polling", action="store", dest="sleep", default=1.0, type=float + ) self.command_parser.add_argument( "--wait", @@ -28,68 +23,52 @@ def __init__(self,logfile='systemstat.log', **kwargs): action="store", dest="wait", default=120.0, - type=float) + type=float, + ) self.command_parser.add_argument( - "--logfile", - help="name of the logfile", - action="store", - dest="logfile", - default=logfile, - type=str) + "--logfile", help="name of the logfile", action="store", dest="logfile", default=logfile, type=str + ) self.command_parser.add_argument( "--logformat", help="logging format", action="store", dest="logformat", - default='%(asctime)s %(message)s', - type=str) + default="%(asctime)s %(message)s", + type=str, + ) self.command_parser.add_argument( - "--verbose", "-v", - help="level of logging verbosity", - dest="verbose", - default=3, - action="count") + "--verbose", "-v", help="level of logging verbosity", dest="verbose", default=3, action="count" + ) self.command_parser.add_argument( - "--stdout", - help="print logs to stdout", - dest="stdout", - default=False, - action="store_true") - + "--stdout", help="print logs to stdout", dest="stdout", default=False, action="store_true" + ) def parse_options(self, args=None, namespace=None): # parse command line options - cl_options, cl_unknown = self.command_parser.parse_known_args( - args, namespace) + cl_options, cl_unknown = self.command_parser.parse_known_args(args, namespace) self.options = cl_options - self.options.__dict__['remainder'] = cl_unknown - - super(SystemStatTool,self).__init__( - sleep=self.options.sleep, wait=self.options.wait) + self.options.__dict__["remainder"] = cl_unknown + super(SystemStatTool, self).__init__(sleep=self.options.sleep, wait=self.options.wait) def start_logging(self): # setup a log file - self.options.logfile = os.path.abspath( - os.path.expanduser( - os.path.expandvars( - self.options.logfile))) + self.options.logfile = os.path.abspath(os.path.expanduser(os.path.expandvars(self.options.logfile))) - loglevel = int((6-self.options.verbose)*10) + loglevel = int((6 - self.options.verbose) * 10) file_hdlr = logging.FileHandler(self.options.logfile) file_hdlr.setFormatter(logging.Formatter(self.options.logformat)) file_hdlr.setLevel(loglevel) self.logger.addHandler(file_hdlr) - # check if we should print the log to stdout as well if self.options.stdout is True: out_hdlr = logging.StreamHandler(sys.stdout) @@ -102,21 +81,21 @@ def start_logging(self): self.logger.info("command line options: %s" % sys.argv[1:]) # print out the parsed options - self.logger.debug('opts = {}'.format(self.options)) + self.logger.debug("opts = {}".format(self.options)) -#file_hdlr = logging.FileHandler('ggg.log') -#file_hdlr.setLevel(logging.DEBUG) -#logger.addHandler(file_hdlr) +# file_hdlr = logging.FileHandler('ggg.log') +# file_hdlr.setLevel(logging.DEBUG) +# logger.addHandler(file_hdlr) # -#out_hdlr = logging.StreamHandler(sys.stdout) -#out_hdlr.setLevel(logging.DEBUG) -#logger.addHandler(out_hdlr) +# out_hdlr = logging.StreamHandler(sys.stdout) +# out_hdlr.setLevel(logging.DEBUG) +# logger.addHandler(out_hdlr) # -#logger.setLevel(logging.DEBUG) -#logger.debug('here') +# logger.setLevel(logging.DEBUG) +# logger.debug('here') -if __name__ == '__main__' : +if __name__ == "__main__": tool = SystemStatTool() @@ -130,6 +109,6 @@ def start_logging(self): else: status = 1 - tool.logger.debug('exiting') + tool.logger.debug("exiting") sys.exit(status) diff --git a/selenium/tools/systemstat/test_systemstat.py b/selenium/tools/systemstat/test_systemstat.py index cb5346c9..4e284c9c 100644 --- a/selenium/tools/systemstat/test_systemstat.py +++ b/selenium/tools/systemstat/test_systemstat.py @@ -3,22 +3,17 @@ from systemstattool import SystemStatTool pytestmark = [ - pytest.mark.systemstat, - ] + pytest.mark.systemstat, +] # default arguments from SystemStatTool systemStatToolArgs = Namespace( - sleep=1.0, - wait=2.0, - logfile='systemstat.log', - logformat='%(asctime)s %(message)s', - verbose=3, - stdout=False) + sleep=1.0, wait=2.0, logfile="systemstat.log", logformat="%(asctime)s %(message)s", verbose=3, stdout=False +) class TestSystemStatTool(object): - def test_default_command_fxn(self): """with default is_ready() method, wait_until_ready() still works.""" @@ -38,15 +33,13 @@ def test_default_command_fxn(self): assert system_ready is True assert tool._iterations == 1 - def test_system_up_no_waiting(self): """if is_ready() returns True, wait_until_ready() doesn't wait. """ class MySystemStatTool(SystemStatTool): - def __init__(self): - super(MySystemStatTool,self).__init__() + super(MySystemStatTool, self).__init__() # parse command line and config file options self.parse_options() @@ -54,12 +47,10 @@ def __init__(self): # start logging self.start_logging() - def is_ready(self): return True - # create a test tool based off the MySystemStatTool class tool = MySystemStatTool() @@ -70,15 +61,13 @@ def is_ready(self): assert system_ready is True assert tool._iterations == 1 - def test_delay_systemup(self): """wait_until_ready() waits and polls while system is down. """ class MySystemStatTool(SystemStatTool): - def __init__(self): - super(MySystemStatTool,self).__init__() + super(MySystemStatTool, self).__init__() # parse command line and config file options self.parse_options() @@ -89,7 +78,6 @@ def __init__(self): # track the number of entries self.counter = 0 - def is_ready(self): self.counter += 1 @@ -100,7 +88,6 @@ def is_ready(self): else: return False - # create a test tool based off the MySystemStatTool class tool = MySystemStatTool() @@ -111,18 +98,16 @@ def is_ready(self): assert system_ready is True assert tool._iterations == 4 - def test_wait_timeout(self): """if wait_until_ready never returns True, then timeout.""" class MySystemStatTool(SystemStatTool): - def __init__(self): - super(MySystemStatTool,self).__init__() + super(MySystemStatTool, self).__init__() # parse command line and config file options # set the wait time to 4 seconds - self.parse_options(['--wait', '4'], systemStatToolArgs) + self.parse_options(["--wait", "4"], systemStatToolArgs) # start logging self.start_logging() @@ -130,13 +115,11 @@ def __init__(self): # track the number of entries self.counter = 0 - def is_ready(self): # keep returning False until we timeout return False - # create a test tool based off the MySystemStatTool class tool = MySystemStatTool() @@ -148,19 +131,16 @@ def is_ready(self): assert system_ready is False assert tool._iterations == 4 - def test_sleep_affects_iterations(self): """sleeping longer means fewer iterations.""" class MySystemStatTool(SystemStatTool): - def __init__(self): - super(MySystemStatTool,self).__init__() + super(MySystemStatTool, self).__init__() # parse command line and config file options # set the wait time to 4 seconds - self.parse_options(['--wait', '4', '--sleep', '2'], - systemStatToolArgs) + self.parse_options(["--wait", "4", "--sleep", "2"], systemStatToolArgs) # start logging self.start_logging() @@ -168,13 +148,11 @@ def __init__(self): # track the number of entries self.counter = 0 - def is_ready(self): # keep returning False until we timeout return False - # create a test tool based off the MySystemStatTool class tool = MySystemStatTool() diff --git a/selenium/wait_for_systems_up.sh b/selenium/wait_for_systems_up.sh index 9dfc2c8b..aba40096 100755 --- a/selenium/wait_for_systems_up.sh +++ b/selenium/wait_for_systems_up.sh @@ -59,7 +59,7 @@ trap shutdown SIGINT SIGTERM # setup reasonable defaults for command line options connect_url="http://mock-connect:5000" -jupyter_url="http://jupyter-py2:9999" +jupyter_url="http://jupyter-py3.8:9999" grid_url="http://selenium-hub:4444" logdir="./" grid_nodes="2" diff --git a/tests/test_rsconnect_jupyter.py b/tests/test_rsconnect_jupyter.py new file mode 100644 index 00000000..2d3ce015 --- /dev/null +++ b/tests/test_rsconnect_jupyter.py @@ -0,0 +1,41 @@ +import logging + +import rsconnect_jupyter + +import pytest + + +class FakeNbApp(object): + def __init__(self): + self.handlers = {} + self.log = logging.getLogger(__name__) + self.settings = {"base_url": "http://nb-app.example.org"} + + @property + def web_app(self): + return self + + def add_handlers(self, host_pattern, handlers): + self.handlers[host_pattern] = handlers + + +@pytest.fixture +def fake_nb_app(): + return FakeNbApp() + + +def test_has_jupyter_extension_funcs(): + assert rsconnect_jupyter._jupyter_server_extension_paths() is not None + assert rsconnect_jupyter._jupyter_nbextension_paths() is not None + + +def test_load_jupyter_server_extension(fake_nb_app): + assert len(fake_nb_app.handlers) == 0 + assert rsconnect_jupyter.load_jupyter_server_extension(fake_nb_app) is None + assert len(fake_nb_app.handlers) == 1 + assert fake_nb_app.handlers.get(".*$") is not None + host_handlers = fake_nb_app.handlers[".*$"] + assert len(host_handlers) == 1 + route_pattern, handler = host_handlers[0] + assert route_pattern == "http://nb-app.example.org/rsconnect_jupyter/(?P\\w+)" + assert handler.__name__ == "EndpointHandler" From 9ca7c74d13d0a295830672aa2f3bd8e3a66e06f6 Mon Sep 17 00:00:00 2001 From: Dan Buch Date: Mon, 22 Jun 2020 18:11:11 -0400 Subject: [PATCH 3/6] Install node things at install time, too --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 93958718..ab4bc71d 100644 --- a/Makefile +++ b/Makefile @@ -77,7 +77,7 @@ run: install pipenv run jupyter-notebook -y --notebook-dir=/notebooks --ip='0.0.0.0' --port=9999 --no-browser --NotebookApp.token='' .PHONY: install -install: +install: yarn pipenv install --dev pipenv run pip install -e . pipenv run jupyter-nbextension install --symlink --user --py rsconnect_jupyter From 657475a0705e0324fa6612f517edacc8076acfd8 Mon Sep 17 00:00:00 2001 From: Dan Buch Date: Mon, 22 Jun 2020 18:21:31 -0400 Subject: [PATCH 4/6] Disable the busted stuff to see if the other things work at least maybe?? --- .github/workflows/main.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e538d0d2..bfb8114e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -52,9 +52,9 @@ jobs: - run: make install - run: make lint - run: make test - - run: make image${{ matrix.python-version }} - - run: make -C selenium build - - run: make test-selenium + # - run: make image${{ matrix.python-version }} + # - run: make -C selenium build + # - run: make test-selenium distributions: needs: test runs-on: ubuntu-latest From 8ee63bb26507fcdf812ce77d9f5339ae9d9e025a Mon Sep 17 00:00:00 2001 From: Dan Buch Date: Mon, 22 Jun 2020 23:30:17 -0400 Subject: [PATCH 5/6] More work towards fixing the selenium tests --- .dockerignore | 10 ++-------- .github/workflows/main.yml | 12 +++++++++--- Dockerfile | 26 +++++++++++++++++++------- Makefile | 22 ++-------------------- mock_connect.py | 14 +++++++++++--- rsconnect_jupyter/__init__.py | 11 +++++++++++ selenium/Makefile | 31 ++++++++++++++----------------- 7 files changed, 68 insertions(+), 58 deletions(-) diff --git a/.dockerignore b/.dockerignore index 39979854..c7eca478 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,8 +1,2 @@ -build/ -dist/ -notebooks*/ -*/node_modules/ -*.egg-info/ -.eggs/ -*.pyc -.git/ +* +!Pipfile* diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bfb8114e..25c77ec2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -52,9 +52,15 @@ jobs: - run: make install - run: make lint - run: make test - # - run: make image${{ matrix.python-version }} - # - run: make -C selenium build - # - run: make test-selenium + - if: false + name: currently busted selenium test stuff + run: > + make image${{ matrix.python-version }} + make -C selenium build + make -C selenium clean + make -C selenium test-env-up + make -C selenium jupyter-up + make -C selenium test distributions: needs: test runs-on: ubuntu-latest diff --git a/Dockerfile b/Dockerfile index 72bd8faa..29b8870c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,18 +5,30 @@ LABEL maintainer="RStudio Connect " ARG NB_UID ARG NB_GID ARG PY_VERSION -RUN apt-get update -qq \ - && apt-get install -y make +RUN apt-get update -qq && \ + apt-get install -y make curl xz-utils git && \ + curl -fsSL "https://nodejs.org/dist/v12.18.1/node-v12.18.1-linux-x64.tar.xz" | \ + tar --strip-components=1 -C /usr/local -xJf - && \ + npm install -g yarn + RUN getent group ${NB_GID} || groupadd -g ${NB_GID} builder RUN useradd --password password \ --create-home \ --home-dir /home/builder \ --uid ${NB_UID} \ --gid ${NB_GID} \ - builder - -RUN conda update conda + builder && \ + mkdir -p /rsconnect_jupyter && \ + chown ${NB_UID}:${NB_GID} /rsconnect_jupyter USER ${NB_UID}:${NB_GID} -RUN bash -c 'cd /home/builder \ - && conda create --yes --channel conda-forge --name py${PY_VERSION/./} python=${PY_VERSION} jupyter numpy matplotlib setuptools pip pipenv' +WORKDIR /rsconnect_jupyter +ENV WORKON_HOME=/home/builder \ + PIPENV_DONT_LOAD_ENV=1 \ + PIPENV_SHELL=/bin/bash \ + PATH=/home/builder/.local/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +COPY Pipfile Pipfile +COPY Pipfile.lock Pipfile.lock +RUN python -m pip install -I -U pip pipenv && \ + pipenv install --dev --python=/usr/local/bin/python && \ + rm -vf Pipfile* diff --git a/Makefile b/Makefile index ab4bc71d..b9cc600c 100644 --- a/Makefile +++ b/Makefile @@ -22,28 +22,12 @@ image%: docker build \ --tag $(IMAGE)$* \ --file Dockerfile \ - --build-arg BASE_IMAGE=continuumio/miniconda:4.4.10 \ + --build-arg BASE_IMAGE=python:$*-slim \ --build-arg NB_UID=$(NB_UID) \ --build-arg NB_GID=$(NB_GID) \ --build-arg PY_VERSION=$* \ . -.PHONY: launch -launch: - docker run --rm -i -t \ - -v $(CURDIR)/notebooks$(PY_VERSION):/notebooks \ - -v $(CURDIR):/rsconnect_jupyter \ - -e NB_UID=$(NB_UID) \ - -e NB_GID=$(NB_GID) \ - -e PY_VERSION=$(PY_VERSION) \ - -p :$(PORT):9999 \ - $(DOCKER_IMAGE) \ - /rsconnect_jupyter/run.sh $(TARGET) - - -notebook%: - make DOCKER_IMAGE=$(IMAGE)$* PY_VERSION=$* TARGET=run launch - .PHONY: all-tests all-tests: test2.7 test3.5 test3.6 test3.7 test3.8 @@ -51,9 +35,6 @@ all-tests: test2.7 test3.5 test3.6 test3.7 test3.8 test: version-frontend pipenv run pytest -vv --cov=rsconnect_jupyter tests/ -test%: version-frontend - make DOCKER_IMAGE=rstudio/rsconnect-jupyter-py$* PY_VERSION=$* TARGET=test launch - .PHONY: test-selenium test-selenium: $(MAKE) -C selenium clean test-env-up jupyter-up test || EXITCODE=$$? ; \ @@ -79,6 +60,7 @@ run: install .PHONY: install install: yarn pipenv install --dev + $(MAKE) version-frontend pipenv run pip install -e . pipenv run jupyter-nbextension install --symlink --user --py rsconnect_jupyter pipenv run jupyter-nbextension enable --py rsconnect_jupyter diff --git a/mock_connect.py b/mock_connect.py index 41ed19b7..d95b2f1c 100644 --- a/mock_connect.py +++ b/mock_connect.py @@ -17,7 +17,16 @@ from pprint import pprint # noinspection PyPackageRequirements -from flask import Flask, Blueprint, abort, after_this_request, g, request, url_for, jsonify +from flask import ( + Flask, + Blueprint, + abort, + after_this_request, + g, + request, + url_for, + jsonify, +) def error(code, reason): @@ -307,8 +316,7 @@ def get_task(task): @api.route("server_settings") @json def server_settings(): - # for our purposes, any non-error response will do - return {} + return {"not_empty": True} # noinspection PyUnresolvedReferences diff --git a/rsconnect_jupyter/__init__.py b/rsconnect_jupyter/__init__.py index c82a6856..3e0d3e52 100644 --- a/rsconnect_jupyter/__init__.py +++ b/rsconnect_jupyter/__init__.py @@ -74,6 +74,15 @@ def post(self, action): disable_tls_check = data["disable_tls_check"] cadata = data.get("cadata", None) + self.log.warning( + "server_address=%r api_key=%r disable_tls_check=%r cadata=%r", + server_address, + api_key, + disable_tls_check, + cadata, + ) + canonical_address = None + result = None try: canonical_address, result = test_server( RSConnectServer(server_address, api_key, disable_tls_check, cadata) @@ -96,6 +105,8 @@ def post(self, action): + u' upload it using "Upload TLS Certificate Bundle" below.', ) except Exception as err: + self.log.exception("Unable to verify that the provided server is running RStudio Connect") + self.log.warning("canonical_address=%r result=%r", canonical_address, result) raise web.HTTPError( 400, u"Unable to verify that the provided server is running RStudio Connect: %s" % err, ) diff --git a/selenium/Makefile b/selenium/Makefile index fc9ea7cf..d1216002 100644 --- a/selenium/Makefile +++ b/selenium/Makefile @@ -34,7 +34,7 @@ SCALE_FIREFOX=${SCALE} SCALE_CHROME=${SCALE} SELENIUM_VERSION=3.8.1-dubnium TMP_PIPE=tmp.pipe -TRE_IMAGE?=rstudio/checkrs-tew:0.1.0 +TRE_IMAGE?=rstudio/checkrs-tew:0.3.2 # Allocate a tty and keep stdin open when running locally # Jenkins nodes don't have input tty, so we set this to "" @@ -46,7 +46,7 @@ DOCKER_RUN_COMMAND=docker run --rm --init \ --network=$(NETWORK) \ --volume=$(CURDIR)/..:${RSCONNECT_DIR} \ --volume=$(CURDIR)/../notebooks$(PY_VERSION):${NOTEBOOKS_DIR} \ - --user=`id -u`:`id -g` \ + --user=$(NB_UID):$(NB_GID) \ --workdir=${RSCONNECT_DIR}/selenium \ ${TRE_IMAGE} @@ -82,8 +82,8 @@ build-mock-connect: docker build -t ${CONNECT_IMAGE} ${CONNECT_DOCKERFILE_DIR} clean: - rm -f *.png *.log *.xml ${TMP_PIPE}; - rm -rf .pytest_cache; + rm -f *.png *.log *.xml ${TMP_PIPE} + rm -rf .pytest_cache distclean: clean @@ -123,7 +123,7 @@ wait-for-systems-up: --name=systemstat \ --network=$(NETWORK) \ --volume=${CURDIR}/..:${RSCONNECT_DIR} \ - --user=`id -u`:`id -g` \ + --user=$(NB_UID):$(NB_GID) \ --workdir=${RSCONNECT_DIR}/selenium \ ${TRE_IMAGE} \ ./wait_for_systems_up.sh \ @@ -156,7 +156,7 @@ network-up: @if [ "${NETWORK_EXISTS}" = "1" ] ; then \ echo "Creating network: ${NETWORK}"; \ docker network create --driver bridge ${NETWORK} ; \ - fi; + fi network-down: connect-down grid-down $(eval NETWORK_EXISTS=$(shell docker network inspect ${NETWORK} > /dev/null 2>&1 && echo 0 || echo 1)) @@ -167,13 +167,10 @@ network-down: connect-down grid-down done; \ echo "Removing network: ${NETWORK}"; \ docker network rm ${NETWORK}; \ - fi; - -# I think we need TINI_SUBREAPER set because we are using --init, but run.sh -# calls a make command which launches another command. + fi jupyter-up: - @echo -n "Creating ${JUPYTER_HOST} ..." + @echo "Creating ${JUPYTER_HOST} ..." @docker run --rm -d --init \ ${DOCKER_TTY_FLAGS} \ --name=${JUPYTER_HOST} \ @@ -187,7 +184,7 @@ jupyter-up: --publish=:${JUPYTER_PORT}:${JUPYTER_PORT} \ --workdir=${RSCONNECT_DIR} \ $(JUPYTER_IMAGE) \ - ${RSCONNECT_DIR}/run.sh run \ + make -C ${RSCONNECT_DIR} run \ 1>/dev/null @docker logs -f ${JUPYTER_HOST} > ${JUPYTER_LOG} & @echo " done" @@ -195,13 +192,13 @@ jupyter-up: jupyter-down: $(eval JUPYTER_EXISTS=$(shell docker container inspect ${JUPYTER_HOST} > /dev/null 2>&1 && echo 0 || echo 1)) @if [ "${JUPYTER_EXISTS}" = "0" ] ; then \ - echo -n "Stopping ${JUPYTER_HOST} ..."; \ + echo "Stopping ${JUPYTER_HOST} ..."; \ docker stop ${JUPYTER_HOST} 1>/dev/null; \ echo " done"; \ - fi; + fi connect-up: - @echo -n "Creating ${CONNECT_HOST} ..." + @echo "Creating ${CONNECT_HOST} ..." @docker run --rm -d --init \ ${DOCKER_TTY_FLAGS} \ --name=${CONNECT_HOST} \ @@ -219,10 +216,10 @@ connect-up: connect-down: $(eval CONNECT_EXISTS=$(shell docker container inspect ${CONNECT_HOST} > /dev/null 2>&1 && echo 0 || echo 1)) @if [ "${CONNECT_EXISTS}" = "0" ] ; then \ - echo -n "Stopping ${CONNECT_HOST} ..."; \ + echo "Stopping ${CONNECT_HOST} ..."; \ docker stop ${CONNECT_HOST} 1>/dev/null; \ echo " done"; \ - fi; + fi .PHONY: all From 2f60386b519f605e818977eee33983b0acb28f8e Mon Sep 17 00:00:00 2001 From: Dan Buch Date: Mon, 22 Jun 2020 23:45:43 -0400 Subject: [PATCH 6/6] Use official pandoc image instead --- .github/workflows/main.yml | 1 - Makefile | 7 +--- docs/Dockerfile | 81 -------------------------------------- 3 files changed, 1 insertion(+), 88 deletions(-) delete mode 100644 docs/Dockerfile diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 25c77ec2..9e3da039 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -131,7 +131,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - run: make docs-image - run: make docs-build - uses: actions/upload-artifact@v2 with: diff --git a/Makefile b/Makefile index b9cc600c..e7611bd9 100644 --- a/Makefile +++ b/Makefile @@ -106,17 +106,12 @@ ifeq (Linux,$(shell uname)) DOCKER_RUN_AS = -u $(shell id -u):$(shell id -g) endif -DOCS_IMAGE := rsconnect-jupyter-docs:local BUILD_DOC := docker run --rm=true $(DOCKER_RUN_AS) \ -e VERSION=$(VERSION) \ $(DOCKER_ARGS) \ -v $(CURDIR):/rsconnect_jupyter \ -w /rsconnect_jupyter \ - $(DOCS_IMAGE) docs/build-doc.sh - -.PHONY: docs-image -docs-image: - docker build -t $(DOCS_IMAGE) ./docs + pandoc/latex:2.9 docs/build-doc.sh .PHONY: docs-build docs-build: docs/out diff --git a/docs/Dockerfile b/docs/Dockerfile deleted file mode 100644 index 9f41af29..00000000 --- a/docs/Dockerfile +++ /dev/null @@ -1,81 +0,0 @@ -# Using dated tags from https://hub.docker.com/_/ubuntu/ -FROM ubuntu:trusty-20180420 -MAINTAINER RStudio Connect - -ARG AWS_REGION=us-east-1 - -# Use EC2 (Cloudfront) apt source instead of default redirecting mirror. -RUN set -x \ - && sed -i "s/archive.ubuntu.com/$AWS_REGION.ec2.archive.ubuntu.com/" /etc/apt/sources.list \ - && export DEBIAN_FRONTEND=noninteractive \ - && apt-get update - -# Install packages aside from R and TeX (because they are large) -RUN export DEBIAN_FRONTEND=noninteractive && \ - apt-get update && \ - apt-get install -y \ - git \ - libcurl4-gnutls-dev \ - libssl-dev \ - libxml2-dev \ - make \ - curl && \ - rm -rf /var/lib/apt/lists/* - -# First install some non-texlive packages which are recommended but will be skipped when we install texlive -# in order to not install the documentation. -# -# biber depends on libwww-perl which has a tree of recommended packages, and recommends libreadonly-xs-perl -# texlive-base depends on xdg-utils which has a tree of recommended packages -# texinfo depends upon libxml-libxml-perl which has a tree of recommended packages -RUN export DEBIAN_FRONTEND=noninteractive && \ - apt-get update && \ - apt-get install -y \ - libreadonly-xs-perl \ - libwww-perl \ - libxml-libxml-perl \ - ruby \ - tcl \ - tk \ - xdg-utils && \ - rm -rf /var/lib/apt/lists/* - -# First part of texlive itself. Use --no-install-recommends to avoid installing ~750MB of documentation -RUN export DEBIAN_FRONTEND=noninteractive && \ - apt-get update && \ - apt-get install -y --no-install-recommends \ - texlive \ - texlive-fonts-extra \ - texlive-generic-recommended && \ - rm -rf /var/lib/apt/lists/* - -# Second part of texlive itself. Use --no-install-recommends to avoid installing ~750MB of documentation -RUN export DEBIAN_FRONTEND=noninteractive && \ - apt-get update && \ - apt-get install -y --no-install-recommends \ - biber \ - latex-beamer \ - lmodern \ - prosper \ - ps2eps \ - tex-gyre \ - texinfo \ - texlive-bibtex-extra \ - texlive-extra-utils \ - texlive-font-utils \ - texlive-latex-extra \ - texlive-luatex \ - texlive-pstricks \ - texlive-xetex && \ - rm -rf /var/lib/apt/lists/* - -# Install pre-compiled pandoc -# Inspired by /connect/dependencies/install-pandoc -RUN export PANDOC_VERSION=2.1.3 && \ - cd /usr/local/bin && \ - curl -L -O https://s3.amazonaws.com/rstudio-buildtools/pandoc/${PANDOC_VERSION}/linux-64/pandoc.gz && \ - curl -L -O https://s3.amazonaws.com/rstudio-buildtools/pandoc/${PANDOC_VERSION}/linux-64/pandoc-citeproc.gz && \ - gzip -d pandoc.gz pandoc-citeproc.gz && \ - chmod 0755 pandoc pandoc-citeproc - -WORKDIR /rsconnect_jupyter