diff --git a/.github/workflows/build-test-push.yml b/.github/workflows/build-test-push.yml new file mode 100644 index 0000000..2f99a3f --- /dev/null +++ b/.github/workflows/build-test-push.yml @@ -0,0 +1,69 @@ +name: Build, Test, Push for Development Scenarios and Release + +on: + push: + branches: [ develop, "fix-*", "feature-*" ] + release: + types: [published] + +jobs: + docker_build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7] + steps: + - name: Check out GitHub Repo + uses: actions/checkout@v2 + + # Imports many useful environment variables + - uses: FranzDiebold/github-env-vars-action@v2 + + # Unit (ish) tests + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Pip installation + run: python -m pip install --upgrade pip poetry + - name: Poetry installation + run: poetry install + - name: Create test image + env: + TAG: ${{ env.CI_ACTION_REF_NAME }} + run: docker-compose build + - name: Run tests + run: scripts/run_tests + - name: Codecov + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: ./coverage.xml + fail_ci_if_error: true + + # Build and Push (if we get here) + - name: Get current date + id: date + run: echo "::set-output name=date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')" + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to GitHub Container Registry (GHCR) + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ secrets.GHCR_USERNAME }} + password: ${{ secrets.GHCR_TOKEN }} + - name: Build and push + uses: docker/build-push-action@v2 + with: + context: . + file: ./Dockerfile + pull: true + push: true + build-args: | + BUILD_CONFIG=ci + BUILD_DATE=${{ steps.date.outputs.date }} + VCS_REF=${{ github.sha }} + BRANCH=${{ github.ref }} + TAG=${{ github.ref }} + tags: ghcr.io/${{ github.repository_owner }}/searchapi2:${{ env.CI_ACTION_REF_NAME }} diff --git a/.github/workflows/run-tests.yml b/.github/workflows/test-pull-request.yml similarity index 93% rename from .github/workflows/run-tests.yml rename to .github/workflows/test-pull-request.yml index e90c65f..91bb1cb 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/test-pull-request.yml @@ -4,8 +4,6 @@ name: Run tests on: - push: - branches: [ develop, master ] pull_request: branches: [ "*" ] @@ -25,6 +23,8 @@ jobs: run: python -m pip install --upgrade pip poetry - name: Poetry installation run: poetry install + - name: Create test image + run: docker-compose build - name: Run tests run: scripts/run_tests - name: Codecov @@ -33,4 +33,3 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} file: ./coverage.xml fail_ci_if_error: true - diff --git a/.gitignore b/.gitignore index 26faebf..cb0ea94 100644 --- a/.gitignore +++ b/.gitignore @@ -11,5 +11,22 @@ coverage_report/ .coverage /htmlcov/ +.pytest_cache/ /coverage.xml + +# Testing artifacts +# Captured container stderr and stdout +container.err +container.out + +# Development +.vscode +.idea + +# Handy place to stache temporary work, whilst keeping +# git unencumbered +/_temp + +# Python virtual environment +venv \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..01c5984 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,44 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [1.0.0] - 2021-04-20 +### Fixed +- fix SCT-2930: not honoring withPublic and withPrivate +- fix SCT-2931: maximum reported search results was 10,000 - fixed to report actual search results with > 10K. +- fix SCT-2932: throws error if object has been deleted +- fix SCT-2933, SCT-2956: Searchapi2 legacy endpoint uses jsonrpc 2.0 not 1.1 +- fix SCT-2937: searchapi2/legacy search_objects should be tolerant of inaccessible workspaces +- fix SCT-2947: unauthenticated search not working correctly (no ws or narr info) +- fix SCT-2969: not honoring object_types +- fix SCT-2970: not narrowing search with additional terms +- fix SCT-3001: Data-search ui / Searchapi2 legacy endpoint generates incorrect object landing page links +- fix SCT-3002: Searchpi2 legacy endpoint returns incorrect index and index version + +### Added +- implement SCT-2966: add a "build and test" workflow for GitHub Actions which builds an image, runs tests, and pushes the resulting image to GH Container Registry. + +## [0.4.9] - 2020-09-11 +### Changed +- Use the AND operator for legacy full-text search + +## [0.4.8] - 2020-09-09 +### Fixed +- Set pagination parameters for the `search_workspace` method + +## [0.4.7] - 2020-09-03 +### Fixed +- Include narrative info parameter setting bug + +## [0.4.6] - 2020-08-31 +### Fixed +- Fixed the highlight fields for legacy search + +## [0.4.5] - 2020-08-25 +### Fixed +- Prevent removal of trailing slash in configured user profile URL +- Fix format string in error diff --git a/CODEOWNERS b/CODEOWNERS index d65e940..0a81c04 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1 @@ -* @jayrbolton @slebras +* @slebras @scanon @eapearson diff --git a/Dockerfile b/Dockerfile index da1f024..b0b6dcb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,30 +7,30 @@ ARG BRANCH=develop ENV DOCKERIZE_VERSION v0.6.1 # Install dockerize -RUN apk --update add --virtual build-dependencies curl tar gzip && \ +RUN apk --update add --no-cache --virtual build-dependencies curl tar gzip && \ curl -o dockerize.tar.gz \ - https://raw.githubusercontent.com/kbase/dockerize/master/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz && \ + https://raw.githubusercontent.com/kbase/dockerize/master/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz && \ tar -C /usr/local/bin -xvzf dockerize.tar.gz && \ rm dockerize.tar.gz && \ - apk del build-dependencies + apk del --no-cache build-dependencies # Dockerize related labels LABEL org.label-schema.build-date=$BUILD_DATE \ - org.label-schema.vcs-url="https://github.com/kbaseincubator/search_api_deluxe" \ - org.label-schema.vcs-ref=$VCS_REF \ - org.label-schema.schema-version="1.0.0-rc1" \ - us.kbase.vcs-branch=$BRANCH \ - maintainer="KBase Team" + org.label-schema.vcs-url="https://github.com/kbaseincubator/search_api_deluxe" \ + org.label-schema.vcs-ref=$VCS_REF \ + org.label-schema.schema-version="1.0.0-rc1" \ + us.kbase.vcs-branch=$BRANCH \ + maintainer="KBase Team" WORKDIR /app # Install dependencies COPY pyproject.toml poetry.lock /app/ -RUN apk --update add --virtual build-dependencies libffi-dev libressl-dev musl-dev python3-dev build-base && \ +RUN apk --update add --no-cache --virtual build-dependencies libffi-dev libressl-dev musl-dev python3-dev build-base git rust cargo && \ pip install --upgrade pip poetry==1.0.9 && \ poetry config virtualenvs.create false && \ poetry install --no-dev --no-interaction --no-ansi && \ - apk del build-dependencies + apk del --no-cache build-dependencies COPY . /app diff --git a/LICENSE.md b/LICENSE.md index 4d64461..4aa20d7 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,4 +1,4 @@ -Copyright (c) 2019 The KBase Project and its Contributors +Copyright (c) 2021 The KBase Project and its Contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..1082d6e --- /dev/null +++ b/Makefile @@ -0,0 +1,18 @@ +# Makefile for search_api2 +.PHONY: all test docs + +test: + sh scripts/run_tests + +build-dev-images: + @echo Building integration test images... + sh scripts/build-integration-test-images.sh + @echo Integration test images built + +integration-tests: + @echo Running Integration Tests... + sh scripts/run_integration_tests + +run-dev-server: + @echo Starting dev server... + sh scripts/run-dev-server.sh diff --git a/README.md b/README.md index a29bc65..1952966 100644 --- a/README.md +++ b/README.md @@ -9,16 +9,16 @@ This service has two JSON-RPC 2.0 endpoints: * `/legacy` - mirrors the old Java JSON-RPC 1.1 methods * `/rpc` - newer API using the Elasticsearch Query DSL -The JSON-Schemas for the legacy methods can be found in `legacy-schema.yaml` +The JSON-Schemas for the legacy methods can be found in `src/search1_rpc/schemas` -The JSON-Schemas for the newer methods (/rpc) can be found in `rpc-schema.yaml` +The JSON-Schemas for the newer methods (`/rpc`) can be found in `rpc-schema.yaml` ### Documents and indexes The [search configuration file](https://github.com/kbase/index_runner_spec/blob/master/config.yaml) details all of the indexes and document types found in the KBase Elasticsearch database. * `ws_type_to_indexes` shows which KBase workspace types (without versions) map to which unversioned index names -* `ws_subobjects` is a list of indexes that represent KBase "subobjects", such as genome features, that don't have their own dedicated type in the workspace, but do have a dedicated index in Elasticsearch. +* `ws_subobjects` is a list of indexes that represent KBase "sub objects", such as genome features, that don't have their own dedicated type in the workspace, but do have a dedicated index in Elasticsearch. * `global_mappings` are Elasticsearch type definitions that are reused in many of the indexes below. * `latest_versions` map the unversioned index names to the versioned index names that represent the latest type mapping version. * `aliases` is a list of Elasticsearch index aliases to a list of index names. These are all searchable as index names. @@ -31,8 +31,11 @@ The [search configuration file](https://github.com/kbase/index_runner_spec/blob/ * `-32003` - Elasticsearch response error * `-32004` - User profile service response error * `-32005` - Unknown workspace type +* `-32006` - Access group missing +* `-32007` - User profile missing -### /rpc + +### `/rpc` Uses [JSON RPC 2.0 format](https://www.jsonrpc.org/specification). @@ -80,7 +83,7 @@ Show the names of all indexes, and show what aliases stand for what indexes. ### /legacy -A JSON-RPC 2.0 API that mimics the legacy Java server, [found here](https://github.com/kbase/KBaseSearchEngin://github.com/kbase/KBaseSearchEngine). Refer to the `legacy-schema.yaml` file for a reference on the method parameter types. +A JSON-RPC 1.1 API that mimics the legacy Java server, [found here](https://github.com/kbase/KBaseSearchEngin://github.com/kbase/KBaseSearchEngine). Refer to the `src/search1_rpc/schemas` file for a reference on the method parameter types. ## Development @@ -107,18 +110,8 @@ volume for the Elasticsearch service to see the changes. You can do this with `d ### Running the integration tests -Under `tests/integration`, there is a module of integration tests that run against CI (KBase staging server). +Under `tests/integration`, there is a set of integration tests that run against CI (KBase staging server). These do not run in our CI workflow, but are only run manually/locally. -You need to ssh tunnel to get CI Elasticsearch available locally: - -```sh -ssh -L 9500::9500 username@login1.berkeley.kbase.us -``` - -Then you can run the integration test script: - -```sh -sh scripts/run_integration_tests.sh -``` +Please see the [integration testing docs](docs/integration-testing.md) for instructions and further information. diff --git a/VERSION b/VERSION index 2b7c5ae..3eefcb9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.4.2 +1.0.0 diff --git a/docker-compose.yaml b/docker-compose.yaml index 25b3fe8..07780c6 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -6,6 +6,7 @@ services: # For running the python consumers web: + image: searchapi2:${TAG:-dev} build: context: . depends_on: diff --git a/docs/ez-guide.md b/docs/ez-guide.md new file mode 100644 index 0000000..d81c552 --- /dev/null +++ b/docs/ez-guide.md @@ -0,0 +1,82 @@ +# An E-Z Guide + +This is a set of tasks which worked well for me on macOS. + +## Unit Testing + +### Host dependencies + +You will need to have, at a minimum: + +- make +- python 3.7 +- docker + +### Set up python + +Install virtual environment for python: + +```sh +python -m venv venv +source venv/bin/activate +python -m pip install --upgrade pip +``` + +Install `poetry`: + +```sh +pip install poetry +``` + +Unit tests are run locally, so need to install all python dependencies: + +```sh +poetry install +``` + +### Run Tests + +> TODO: should be able to run unit tests in a container, to avoid the need for any host-level installs. + +Run the tests! + +This will run all the unit tests plus associated code quality evaluations. + +```sh +make test +``` + +or for coverage + +```sh +make test-coverage +``` + +> Note: Ensure that https://ci.kbase.us is accessible from the host machine; some unit tests require this (and should not be unit tests!) + +To run tests in a given directory or individual test modules: + +```sh +WORKSPACE_URL="http://localhost:5555/ws" PYTHONPATH=. poetry run pytest -vv tests/unit/PATH +``` + +e.g. to run all the `es_client` tests: + +```sh +WORKSPACE_URL="http://localhost:5555/ws" PYTHONPATH=. poetry run pytest -vv tests/unit/es_client +``` + +## Integration Testing + +See [Integration Testing](integration-testing.md) + + +## Using with kbase-ui + +This workflow is very handy for working on both the search api back end and search tool front ends. + +``` +IP="" SSHHOST="login1.berkeley.kbase.us" SSHUSER="" SSHPASS="" make run-dev-server +``` + +> TODO: complete this doc! \ No newline at end of file diff --git a/docs/integration-testing.md b/docs/integration-testing.md new file mode 100644 index 0000000..44df32d --- /dev/null +++ b/docs/integration-testing.md @@ -0,0 +1,81 @@ +# Integration Testing + +The integration tests run inside a Docker container. An associated ssh tunnel proxies elastic search requests into KBase. This tunnel is also run in a container. To ease the process of coordinating the startup of these containers, the are scripted in a docker-compose config file. + +### Build the images + +Although the integration test script will build the images if they are missing, the build can take a few minutes, which may cause the integration test script to time out. It is more reliable to simply build the containers first. + +```bash +make build-integration-test-images +``` + +You can view the files `container.out` and `container.err` to monitor progress building the images. + +### Run the integration tests + +First you'll need to set up the required test parameters, which are provided as shell environment variables. How you provide them is up to you, but it is probably easiest (especially for test iteration) to export them: + +```bash +export WS_TOKEN="" +export IP="" +export SHHOST="" +export SSHUSER="" +export SSHPASS="" +``` + +> Note: For now the "test user" is `kbaseuitest`. As a kbase-ui dev for the password or a token. + +> TODO: We should establish a `searchtest` user, use it to create some narratives with data for indexing, and use that account for integration testing. + + +Running the tests is as simple as: + +```bash +make integration-Tests +``` + +The default logging level is "DEBUG" which can emit an annoying level of messages interspersed with test results. I prefer to run the tests like: + +```bash +LOGLEVEL=ERROR make integration-tests +``` + +If all goes well you should see something like: + +```bash +(venv) erikpearson@Eriks-MBP-2 search_api2 % LOGLEVEL=ERROR make integration-tests +Running Integraion Tests... +sh scripts/run_integration_tests ++ path=tests/integration ++ export PYTHONPATH=. ++ PYTHONPATH=. ++ poetry run pytest -vv -s tests/integration +=================================================== test session starts =================================================== +platform darwin -- Python 3.7.9, pytest-5.4.3, py-1.9.0, pluggy-0.13.1 -- /Users/erikpearson/work/kbase/sprints/2020Q4/fixes/fix_search_api2_legacy/search_api2/venv/bin/python +cachedir: .pytest_cache +rootdir: /Users/erikpearson/work/kbase/sprints/2020Q4/fixes/fix_search_api2_legacy/search_api2 +plugins: cov-2.10.1 +collecting ... Logger and level: + +** To see more or less logging information, adjust the +** log level with the LOGLEVEL environment variable set +** to one of: +** CRITICAL ERROR WARNING INFO DEBUG NOTSET +** It is currently set to: +** ERROR + +collected 9 items + +tests/integration/test_integration_legacy.py::test_search_example1 PASSED +tests/integration/test_integration_legacy.py::test_search_example2 PASSED +tests/integration/test_integration_legacy.py::test_search_example3 PASSED +tests/integration/test_integration_legacy.py::test_search_example4 PASSED +tests/integration/test_integration_legacy.py::test_search_example5 PASSED +tests/integration/test_integration_legacy.py::test_search_example6 PASSED +tests/integration/test_integration_legacy.py::test_search_case1 PASSED +tests/integration/test_integration_search_workspaces.py::test_narrative_example PASSED +tests/integration/test_integration_search_workspaces.py::test_dashboard_example PASSED + +=================================================== 9 passed in 24.27s ==================================================== +``` diff --git a/legacy-schema.yaml b/legacy-schema.yaml deleted file mode 100644 index 5ad17fa..0000000 --- a/legacy-schema.yaml +++ /dev/null @@ -1,482 +0,0 @@ -$schema: http://json-schema.org/draft-07/schema# -title: Legacy search methods -definitions: - methods: - - "KBaseSearchEngine.get_objects": - # Legacy Search Objects Method - params: - $schema: http://json-schema.org/draft-07/schema# - type: array - minItems: 1 - maxItems: 1 - items: - type: object - required: [guids] - additionalProperties: false - properties: - guids: - type: array - items: {type: string} - post_processing: - $ref: "#/definitions/postProcessing" - result: - $schema: http://json-schema.org/draft-07/schema# - type: array - minItems: 1 - maxItems: 1 - items: - type: object - required: [objects, search_time] - additionalProperties: false - properties: - objects: - type: array - items: {$ref: "#/definitions/searchResultHit" } - objects_info: - type: object - patternProperties: - '.*': {$ref: "#/definitions/objectInfo"} - search_time: {type: integer} - access_groups_info: - type: object - additionalProperties: false - patternProperties: - '^[1-9][0-9]*$': {$ref: "#/definitions/workspaceInfo"} - access_group_narrative_info: - type: object - additionalProperties: false - patternProperties: - '^[1-9][0-9]*$': {$ref: "#/definitions/narrativeInfo" } - - "KBaseSearchEngine.search_objects": - # Legacy search objects method - params: - $schema: http://json-schema.org/draft-07/schema# - type: array - minItems: 1 - maxItems: 1 - items: - type: object - required: [match_filter] - additionalProperties: false - properties: - match_filter: {$ref: "#/definitions/matchFilter"} - access_filter: {$ref: "#/definitions/accessFilter"} - object_types: - type: array - items: {type: string} - pagination: - start: {type: integer} - count: {type: integer} - post_processing: - type: object - properties: - ids_only: {$ref: "#/definitions/sdk_boolean"} - skip_info: {$ref: "#/definitions/sdk_boolean"} - skip_keys: {$ref: "#/definitions/sdk_boolean"} - skip_data: {$ref: "#/definitions/sdk_boolean"} - include_highlight: {$ref: "#/definitions/sdk_boolean"} - add_narrative_info: {$ref: "#/definitions/sdk_boolean"} - sorting_rules: - type: array - items: - type: object - properties: - is_object_property: {$ref: "#/definitions/sdk_boolean"} - property: {type: string} - ascending: {$ref: "#/definitions/sdk_boolean"} - result: - $schema: http://json-schema.org/draft-07/schema# - type: array - minItems: 1 - maxItems: 1 - items: - type: object - required: [objects, total, search_time] - additionalProperties: false - properties: - objects: - type: array - items: {$ref: "#/definitions/searchResultHit"} - objects_info: - type: object - patternProperties: - '.*': {$ref: "#/definitions/objectInfo"} - total: {type: integer} - search_time: {type: integer} - pagination: - type: object - properties: - start: {type: integer} - count: {type: integer} - sorting_rules: - type: array - items: {$ref: "#/definitions/sortingRule"} - access_groups_info: - type: object - additionalProperties: false - patternProperties: - '^[1-9][0-9]*$': {$ref: "#/definitions/workspaceInfo"} - access_group_narrative_info: - type: object - additionalProperties: false - patternProperties: - '^[1-9][0-9]*$': {$ref: "#/definitions/narrativeInfo"} - - "KBaseSearchEngine.search_types": - params: - $schema: http://json-schema.org/draft-07/schema# - type: array - minItems: 1 - maxItems: 1 - items: - type: object - required: [match_filter] - additionalProperties: false - properties: - object_types: - type: array - items: {type: string} - match_filter: {$ref: "#/definitions/matchFilter"} - access_filter: {$ref: "#/definitions/accessFilter"} - result: - $schema: http://json-schema.org/draft-07/schema# - type: array - minItems: 1 - maxItems: 1 - items: - title: Legacy Search Types Method - type: object - required: [type_to_count, search_time] - additionalProperties: false - properties: - type_to_count: {$ref: "#/definitions/typeCounts"} - search_time: {type: integer} - - # End of methods - - # Generic type defs, used in the definitions for method params/result validation - - sdk_boolean: - type: integer - enum: [0, 1] - - tag: - type: string - enum: [narrative, refdata, noindex] - - data: - title: Search result - type: object - properties: - creator: {type: string} - shared_users: - type: array - items: {type: string} - timestamp: {type: integer} - creation_date: {type: string} - is_public: {type: boolean} - access_group: {type: integer} - obj_id: {type: integer} - version: {type: integer} - copied: {type: [string, "null"]} - tags: - type: array - items: {$ref: "#/definitions/tag"} - - searchResultHit: - type: object - title: Workspace object search result - required: - - object_name - - type - - type_ver - - guid - - kbase_id - - index_name - - data - properties: - object_name: {type: string} - type: {type: string} - type_ver: {type: integer} - guid: {type: string} - kbase_id: {type: string} - index_name: {type: string} - data: {$ref: "#/definitions/data"} - highlight: {$ref: "#/definitions/highlight"} - - searchResult: - type: object - properties: - objects: - type: array - items: {"$ref": "#/definitions/searchResultHit"} - access_groups_info: - type: object - patternProperties: - '^[1-9][0-9]*$': {$ref: "#/definitions/workspaceInfo"} - access_group_narrative_info: - type: object - patternProperties: - '^[1-9][0-9]*$': {$ref: "#/definitions/narrativeInfo"} - total: {type: integer} - search_time: {type: integer} - sortingRules: - type: array - items: {"$ref": "#/definitions/sortingRule"} - - sortingRule: - type: object - additionalProperties: false - required: [property] - properties: - is_object_property: {$ref: "#/definitions/sdk_boolean"} - property: {type: string} - ascending: {$ref: "#/definitions/sdk_boolean"} - - narrativeInfo: - type: array - items: - - {type: string, title: "Narrative Name"} - - {type: integer, title: "Narrative ID"} - - {type: integer, title: "Time Last Saved (Epoch)"} - - {type: string, title: "Owner User Name"} - - {type: string, title: "Owner Display Name"} - - userBrief: - type: object - additionalProperties: false - properties: - username: {type: string} - realname: {type: string} - - workspaceInfo: - type: array - items: - - title: Workspace Id - type: integer - - title: Workspace Name - type: string - - title: Owner - type: string - - title: Save date - type: string - - title: Maximum object id - type: integer - - title: User permission - type: string - - title: Global Permission - type: string - - title: Lock Status - type: string - - title: Metadata - $ref: "#/definitions/metadata" - - objectInfo: - type: array - items: - - title: Object Id - type: integer - - title: Object name - type: string - - title: Type - type: string - - title: Saved Date - type: string - - title: Version - type: integer - - title: Author - type: string - - title: Workspace Id - type: integer - - title: Workspace name - type: string - - title: Checksum - type: string - - title: Object size - type: integer - - title: Metadata - $ref: "#/definitions/metadata" - - metadata: - type: object - additionalProperties: {type: string} - - workspaceInfoDict: - type: object - additionalProperties: false - properties: - id: {type: integer} - name: {type: string} - owner: {type: string} - modification_date: {type: string} - max_object_id: {type: integer} - user_permission: {type: string} - global_permission: {type: string} - lock_status: {type: string} - metadata: {$ref: "#/definitions/metadata"} - - objectInfoDict: - type: object - additionalProperties: false - properties: - id: {type: integer} - name: {type: string} - version: {type: integer} - type: {type: string} - saved_date: {type: string} - saved_by: {type: string} - workspace_id: {type: integer} - checksum: {type: string} - size: {type: integer} - metadata: {$ref: "#/definitions/metadata"} - - workspaceInfoNice: - type: object - additionalProperties: false - properties: - permission: {type: string} - is_public: {type: boolean} - modified_at: {type: integer} - owner: {$ref: "#/definitions/userBrief"} - - highlight: - type: object - additionalProperties: - type: array - items: {type: string} - - workspaceType: - type: string - enum: [narrative, refdata, workspace] - - narrativeWorkspaceInfo: - type: object - additionalProperties: false - properties: - title: {type: string} - - refdataWorkspaceInfo: - type: object - additionalProperties: false - properties: - title: {type: string} - source: {type: string} - - otherWorkspaceInfo: - type: object - additionalProperties: false - properties: - title: {type: string} - - typeCounts: - type: object - additionalProperties: false - patternProperties: - '^[A-Z][a-zA-Z]*$': {type: integer} - - postProcessing: - type: object - additionalProperties: false - properties: - ids_only: {$ref: "#/definitions/sdk_boolean"} - skip_info: {$ref: "#/definitions/sdk_boolean"} - skip_keys: {$ref: "#/definitions/sdk_boolean"} - skip_data: {$ref: "#/definitions/sdk_boolean"} - include_highlight: {$ref: "#/definitions/sdk_boolean"} - add_narrative_info: {$ref: "#/definitions/sdk_boolean"} - - objectRef: - type: object - additionalProperties: false - properties: - ref: {type: string} - workspace_id: {type: integer} - object_id: {type: integer} - version: {type: integer} - - accessFilter: - type: object - required: [] - additionalProperties: false - properties: - with_private: {$ref: "#/definitions/sdk_boolean"} - with_public: {$ref: "#/definitions/sdk_boolean"} - - matchFilter: - type: object - required: [] - additionalProperties: false - properties: - full_text_in_all: {type: string} - timestamp: {$ref: "#/definitions/keyMatchItem"} - lookup_in_keys: {$ref: "#/definitions/keyMatch"} - exclude_subobjects: {$ref: "#/definitions/sdk_boolean"} - source_tags: - type: array - items: {type: string} - source_tags_blacklist: {type: integer} - - keyMatch: - type: object - additionalProperties: false - patternProperties: - '^\w+$': {$ref: "#/definitions/keyMatchItem"} - - # Reference: - # https://github.com/kbase/KBaseSearchEngine/blob/master/KBaseSearchEngine.spec#L30 - keyMatchItem: - oneOf: - - type: object - additionalProperties: false - required: [value] - properties: - value: {type: string} - - type: object - additionalProperties: false - required: [string_value] - properties: - string_value: {type: string} - - type: object - additionalProperties: false - required: [int_value] - properties: - int_value: {type: integer} - - type: object - additionalProperties: false - required: [double_value] - properties: - double_value: {type: number} - - type: object - additionalProperties: false - required: [bool_value] - properties: - bool_value: {type: boolean} - - type: object - additionalProperties: false - anyOf: - - required: [min_int] - - required: [max_int] - - required: [min_int, max_int] - properties: - min_int: {type: integer} - max_int: {type: integer} - - type: object - additionalProperties: false - anyOf: - - required: [min_double] - - required: [max_double] - - required: [min_double, max_double] - properties: - min_double: {type: number} - max_double: {type: number} - - type: object - additionalProperties: false - anyOf: - - required: [min_date] - - required: [max_date] - - required: [min_date, max_date] - properties: - min_boolean: {type: boolean} - max_boolean: {type: boolean} diff --git a/poetry.lock b/poetry.lock index 9b4b45e..7de09f6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,287 +1,275 @@ [[package]] -category = "main" -description = "File support for asyncio." name = "aiofiles" +version = "0.6.0" +description = "File support for asyncio." +category = "main" optional = false python-versions = "*" -version = "0.5.0" [[package]] -category = "dev" -description = "Atomic file writes." -marker = "sys_platform == \"win32\"" name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.0" [[package]] -category = "main" -description = "Classes Without Boilerplate" name = "attrs" +version = "20.3.0" +description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "19.3.0" [package.extras] -azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] -dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] -docs = ["sphinx", "zope.interface"] -tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] [[package]] -category = "dev" -description = "Security oriented static analyser for python code." name = "bandit" +version = "1.7.0" +description = "Security oriented static analyser for python code." +category = "dev" optional = false -python-versions = "*" -version = "1.6.2" +python-versions = ">=3.5" [package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" -PyYAML = ">=3.13" -colorama = ">=0.3.9" +PyYAML = ">=5.3.1" six = ">=1.10.0" stevedore = ">=1.20.0" [[package]] -category = "main" -description = "Python package for providing Mozilla's CA Bundle." name = "certifi" +version = "2020.12.5" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = "*" -version = "2020.6.20" [[package]] -category = "main" -description = "Universal encoding detector for Python 2 and 3" name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "main" optional = false -python-versions = "*" -version = "3.0.4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] -category = "dev" -description = "Cross-platform colored terminal text." -marker = "platform_system == \"Windows\" or sys_platform == \"win32\"" name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.4.3" [[package]] -category = "dev" -description = "Code coverage measurement for Python" name = "coverage" +version = "5.5" +description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "5.1" [package.extras] toml = ["toml"] [[package]] -category = "dev" -description = "the modular source code checker: pep8 pyflakes and co" name = "flake8" +version = "3.8.4" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "3.8.3" [package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.6.0a1,<2.7.0" pyflakes = ">=2.2.0,<2.3.0" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = "*" - [[package]] -category = "dev" -description = "Git Object Database" name = "gitdb" +version = "4.0.5" +description = "Git Object Database" +category = "dev" optional = false python-versions = ">=3.4" -version = "4.0.5" [package.dependencies] smmap = ">=3.0.1,<4" [[package]] -category = "dev" -description = "Python Git Library" name = "gitpython" +version = "3.1.14" +description = "Python Git Library" +category = "dev" optional = false python-versions = ">=3.4" -version = "3.1.3" [package.dependencies] gitdb = ">=4.0.1,<5" [[package]] -category = "main" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" name = "h11" -optional = false -python-versions = "*" version = "0.9.0" - -[[package]] -category = "main" -description = "HTTP/2 State-Machine based protocol implementation" -name = "h2" -optional = false -python-versions = "*" -version = "3.2.0" - -[package.dependencies] -hpack = ">=3.0,<4" -hyperframe = ">=5.2.0,<6" - -[[package]] +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" category = "main" -description = "Pure-Python HPACK header compression" -name = "hpack" optional = false python-versions = "*" -version = "3.0.0" [[package]] +name = "httpcore" +version = "0.11.1" +description = "A minimal low-level HTTP client." category = "main" -description = "Chromium HSTS Preload list as a Python package and updated daily" -name = "hstspreload" optional = false python-versions = ">=3.6" -version = "2020.6.30" + +[package.dependencies] +h11 = ">=0.8,<0.10" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3.0.0,<4.0.0)"] [[package]] -category = "main" -description = "A collection of framework independent HTTP protocol utils." name = "httptools" +version = "0.1.1" +description = "A collection of framework independent HTTP protocol utils." +category = "main" optional = false python-versions = "*" -version = "0.1.1" [package.extras] -test = ["Cython (0.29.14)"] +test = ["Cython (==0.29.14)"] [[package]] -category = "main" -description = "The next generation HTTP client." name = "httpx" +version = "0.15.4" +description = "The next generation HTTP client." +category = "main" optional = false python-versions = ">=3.6" -version = "0.11.1" [package.dependencies] certifi = "*" -chardet = ">=3.0.0,<4.0.0" -h11 = ">=0.8,<0.10" -h2 = ">=3.0.0,<4.0.0" -hstspreload = "*" -idna = ">=2.0.0,<3.0.0" -rfc3986 = ">=1.3,<2" -sniffio = ">=1.0.0,<2.0.0" -urllib3 = ">=1.0.0,<2.0.0" +httpcore = ">=0.11.0,<0.12.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" -[[package]] -category = "main" -description = "HTTP/2 framing layer for Python" -name = "hyperframe" -optional = false -python-versions = "*" -version = "5.2.0" +[package.extras] +brotli = ["brotlipy (>=0.7.0,<0.8.0)"] +http2 = ["h2 (>=3.0.0,<4.0.0)"] [[package]] -category = "main" -description = "Internationalized Domain Names in Applications (IDNA)" name = "idna" +version = "2.10" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.10" [[package]] -category = "main" -description = "Read metadata from Python packages" -marker = "python_version < \"3.8\"" name = "importlib-metadata" +version = "3.7.2" +description = "Read metadata from Python packages" +category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "1.7.0" +python-versions = ">=3.6" [package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "rst.linker"] -testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] -category = "main" -description = "An implementation of JSON Schema validation for Python" -name = "jsonschema" +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = "*" + +[[package]] +name = "jsonschema" version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = "*" [package.dependencies] attrs = ">=17.4.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} pyrsistent = ">=0.14.0" -setuptools = "*" six = ">=1.11.0" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = "*" - [package.extras] format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] [[package]] +name = "kbase-jsonrpc11base" +version = "0.1.6" +description = "Simple JSON-RPC 1.1 service without transport layer" category = "main" -description = "Simple JSON-RPC service without transport layer" +optional = false +python-versions = "^3.7" +develop = false + +[package.dependencies] +jsonschema = "3.2.0" +pyyaml = "5.4.1" + +[package.source] +type = "git" +url = "https://github.com/kbaseIncubator/kbase-jsonrpc11base" +reference = "v0.1.6" +resolved_reference = "5d59ca6b1e1e0f09ec0faab922380e9ce53014d9" + +[[package]] name = "kbase-jsonrpcbase" +version = "0.3.0a6" +description = "Simple JSON-RPC service without transport layer" +category = "main" optional = false python-versions = ">=3.6,<4.0" -version = "0.3.0a5" [package.dependencies] jsonschema = ">=3.2.0,<4.0.0" pyyaml = ">=5.3.1,<6.0.0" [[package]] -category = "dev" -description = "McCabe checker, plugin for flake8" name = "mccabe" -optional = false -python-versions = "*" version = "0.6.1" - -[[package]] +description = "McCabe checker, plugin for flake8" category = "dev" -description = "More routines for operating on iterables, beyond itertools" -name = "more-itertools" optional = false -python-versions = ">=3.5" -version = "8.4.0" +python-versions = "*" [[package]] -category = "main" -description = "multidict implementation" name = "multidict" +version = "5.1.0" +description = "multidict implementation" +category = "main" optional = false -python-versions = ">=3.5" -version = "4.7.6" +python-versions = ">=3.6" [[package]] -category = "dev" -description = "Optional static typing for Python" name = "mypy" +version = "0.812" +description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.5" -version = "0.782" [package.dependencies] mypy-extensions = ">=0.4.3,<0.5.0" @@ -292,412 +280,418 @@ typing-extensions = ">=3.7.4" dmypy = ["psutil (>=4.0)"] [[package]] -category = "dev" -description = "Experimental type system extensions for programs checked with the mypy typechecker." name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" optional = false python-versions = "*" -version = "0.4.3" [[package]] -category = "dev" -description = "Core utilities for Python packages" name = "packaging" +version = "20.9" +description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.4" [package.dependencies] pyparsing = ">=2.0.2" -six = "*" [[package]] -category = "dev" -description = "Python Build Reasonableness" name = "pbr" +version = "5.5.1" +description = "Python Build Reasonableness" +category = "dev" optional = false -python-versions = "*" -version = "5.4.5" +python-versions = ">=2.6" [[package]] -category = "dev" -description = "plugin and hook calling mechanisms for python" name = "pluggy" +version = "0.13.1" +description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.13.1" [package.dependencies] -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] [[package]] -category = "dev" -description = "library with cross-python path, ini-parsing, io, code, log facilities" name = "py" +version = "1.10.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.9.0" [[package]] -category = "dev" -description = "Python style guide checker" name = "pycodestyle" +version = "2.6.0" +description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.6.0" [[package]] -category = "dev" -description = "passive checker of Python programs" name = "pyflakes" +version = "2.2.0" +description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.2.0" [[package]] -category = "dev" -description = "Python parsing module" name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.7" [[package]] -category = "main" -description = "Persistent/Functional/Immutable data structures" name = "pyrsistent" +version = "0.17.3" +description = "Persistent/Functional/Immutable data structures" +category = "main" optional = false -python-versions = "*" -version = "0.16.0" - -[package.dependencies] -six = "*" +python-versions = ">=3.5" [[package]] -category = "dev" -description = "pytest: simple powerful testing with Python" name = "pytest" +version = "6.2.2" +description = "pytest: simple powerful testing with Python" +category = "dev" optional = false -python-versions = ">=3.5" -version = "5.4.3" +python-versions = ">=3.6" [package.dependencies] -atomicwrites = ">=1.0" -attrs = ">=17.4.0" -colorama = "*" -more-itertools = ">=4.0.0" +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<1.0" -py = ">=1.5.0" -wcwidth = "*" - -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" +pluggy = ">=0.12,<1.0.0a1" +py = ">=1.8.2" +toml = "*" [package.extras] -checkqa-mypy = ["mypy (v0.761)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] -category = "dev" -description = "Pytest plugin for measuring coverage." name = "pytest-cov" +version = "2.11.1" +description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.10.0" [package.dependencies] -coverage = ">=4.4" +coverage = ">=5.2.1" pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] [[package]] -category = "main" -description = "YAML parser and emitter for Python" name = "pyyaml" +version = "5.4.1" +description = "YAML parser and emitter for Python" +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "5.3.1" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] -category = "main" -description = "Python HTTP for Humans." name = "requests" +version = "2.25.1" +description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.24.0" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<4" +chardet = ">=3.0.2,<5" idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" +urllib3 = ">=1.21.1,<1.27" [package.extras] security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] -category = "dev" -description = "A utility library for mocking out the `requests` Python library." name = "responses" +version = "0.12.1" +description = "A utility library for mocking out the `requests` Python library." +category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.10.15" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] requests = ">=2.0" six = "*" +urllib3 = ">=1.25.10" [package.extras] -tests = ["coverage (>=3.7.1,<5.0.0)", "pytest-cov", "pytest-localserver", "flake8", "pytest (>=4.6,<5.0)", "pytest"] +tests = ["coverage (>=3.7.1,<6.0.0)", "pytest-cov", "pytest-localserver", "flake8", "pytest (>=4.6,<5.0)", "pytest (>=4.6)"] [[package]] -category = "main" -description = "Validating URI References per RFC 3986" name = "rfc3986" +version = "1.4.0" +description = "Validating URI References per RFC 3986" +category = "main" optional = false python-versions = "*" -version = "1.4.0" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} [package.extras] idna2008 = ["idna"] [[package]] -category = "main" -description = "A web server and web framework that's written to go fast. Build fast. Run fast." name = "sanic" +version = "20.12.2" +description = "A web server and web framework that's written to go fast. Build fast. Run fast." +category = "main" optional = false python-versions = ">=3.6" -version = "20.6.3" [package.dependencies] -aiofiles = ">=0.3.0" +aiofiles = ">=0.6.0" httptools = ">=0.0.10" -httpx = "0.11.1" -multidict = ">=4.0,<5.0" -ujson = ">=1.35" -uvloop = ">=0.5.3" +httpx = "0.15.4" +multidict = ">=5.0,<6.0" +ujson = {version = ">=1.35", markers = "sys_platform != \"win32\" and implementation_name == \"cpython\""} +uvloop = {version = ">=0.5.3,<0.15.0", markers = "sys_platform != \"win32\" and implementation_name == \"cpython\""} websockets = ">=8.1,<9.0" [package.extras] -all = ["pytest (5.2.1)", "multidict (>=4.0,<5.0)", "gunicorn", "pytest-cov", "httpcore (0.3.0)", "beautifulsoup4", "pytest-sanic", "pytest-sugar", "pytest-benchmark", "aiofiles", "tox", "black", "flake8", "bandit", "towncrier", "sphinx (>=2.1.2)", "sphinx-rtd-theme", "recommonmark (>=0.5.0)", "docutils", "pygments", "uvloop (>=0.5.3)", "ujson (>=1.35)"] -dev = ["pytest (5.2.1)", "multidict (>=4.0,<5.0)", "gunicorn", "pytest-cov", "httpcore (0.3.0)", "beautifulsoup4", "pytest-sanic", "pytest-sugar", "pytest-benchmark", "aiofiles", "tox", "black", "flake8", "bandit", "towncrier", "uvloop (>=0.5.3)", "ujson (>=1.35)"] +all = ["pytest (==5.2.1)", "multidict (>=5.0,<6.0)", "gunicorn (==20.0.4)", "pytest-cov", "httpcore (>=0.11.0,<0.12.0)", "beautifulsoup4", "pytest-sanic", "pytest-sugar", "pytest-benchmark", "pytest-dependency", "aiofiles", "tox", "black", "flake8", "bandit", "towncrier", "sphinx (>=2.1.2)", "sphinx-rtd-theme", "recommonmark (>=0.5.0)", "docutils", "pygments", "uvloop (>=0.5.3,<0.15.0)", "ujson (>=1.35)"] +dev = ["pytest (==5.2.1)", "multidict (>=5.0,<6.0)", "gunicorn (==20.0.4)", "pytest-cov", "httpcore (>=0.11.0,<0.12.0)", "beautifulsoup4", "pytest-sanic", "pytest-sugar", "pytest-benchmark", "pytest-dependency", "aiofiles", "tox", "black", "flake8", "bandit", "towncrier", "uvloop (>=0.5.3,<0.15.0)", "ujson (>=1.35)"] docs = ["sphinx (>=2.1.2)", "sphinx-rtd-theme", "recommonmark (>=0.5.0)", "docutils", "pygments"] -test = ["pytest (5.2.1)", "multidict (>=4.0,<5.0)", "gunicorn", "pytest-cov", "httpcore (0.3.0)", "beautifulsoup4", "pytest-sanic", "pytest-sugar", "pytest-benchmark", "uvloop (>=0.5.3)", "ujson (>=1.35)"] +test = ["pytest (==5.2.1)", "multidict (>=5.0,<6.0)", "gunicorn (==20.0.4)", "pytest-cov", "httpcore (>=0.11.0,<0.12.0)", "beautifulsoup4", "pytest-sanic", "pytest-sugar", "pytest-benchmark", "pytest-dependency", "uvloop (>=0.5.3,<0.15.0)", "ujson (>=1.35)"] [[package]] -category = "main" -description = "Python 2 and 3 compatibility utilities" name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "1.15.0" [[package]] -category = "dev" -description = "A pure Python implementation of a sliding window memory map manager" name = "smmap" +version = "3.0.5" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "3.0.4" [[package]] -category = "main" -description = "Sniff out which async library your code is running under" name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.5" -version = "1.1.0" [[package]] -category = "dev" -description = "Manage dynamic plugins for Python applications" name = "stevedore" +version = "3.3.0" +description = "Manage dynamic plugins for Python applications" +category = "dev" optional = false python-versions = ">=3.6" -version = "2.0.1" [package.dependencies] +importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" category = "dev" -description = "a fork of Python 2 and 3 ast modules with type comment support" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] name = "typed-ast" +version = "1.4.2" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" optional = false python-versions = "*" -version = "1.4.1" [[package]] -category = "dev" -description = "Backported and Experimental Type Hints for Python 3.5+" name = "typing-extensions" +version = "3.7.4.3" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "main" optional = false python-versions = "*" -version = "3.7.4.2" [[package]] -category = "main" -description = "Ultra fast JSON encoder and decoder for Python" -marker = "sys_platform != \"win32\" and implementation_name == \"cpython\"" name = "ujson" +version = "4.0.2" +description = "Ultra fast JSON encoder and decoder for Python" +category = "main" optional = false -python-versions = ">=3.5" -version = "3.0.0" +python-versions = ">=3.6" [[package]] -category = "main" -description = "HTTP library with thread-safe connection pooling, file post, and more." name = "urllib3" +version = "1.26.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "1.25.9" [package.extras] brotli = ["brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] -category = "main" -description = "Fast implementation of asyncio event loop on top of libuv" -marker = "sys_platform != \"win32\" and implementation_name == \"cpython\"" name = "uvloop" -optional = false -python-versions = "*" version = "0.14.0" - -[[package]] -category = "dev" -description = "Measures the displayed width of unicode strings in a terminal" -name = "wcwidth" +description = "Fast implementation of asyncio event loop on top of libuv" +category = "main" optional = false python-versions = "*" -version = "0.2.5" [[package]] -category = "main" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" name = "websockets" +version = "8.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +category = "main" optional = false python-versions = ">=3.6.1" -version = "8.1" [[package]] -category = "main" -description = "Backport of pathlib-compatible object wrapper for zip files" -marker = "python_version < \"3.8\"" name = "zipp" +version = "3.4.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.6" -version = "3.1.0" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["jaraco.itertools", "func-timeout"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] -content-hash = "a39ec53d1391a2d28c25401b7d101e6bb450b20caef692af15d134521e29610c" +lock-version = "1.1" python-versions = "^3.7" +content-hash = "ceca37a35537ab53e3e25016eb6b3433a42175801695d94088406151d28641b5" [metadata.files] aiofiles = [ - {file = "aiofiles-0.5.0-py3-none-any.whl", hash = "sha256:377fdf7815cc611870c59cbd07b68b180841d2a2b79812d8c218be02448c2acb"}, - {file = "aiofiles-0.5.0.tar.gz", hash = "sha256:98e6bcfd1b50f97db4980e182ddd509b7cc35909e903a8fe50d8849e02d815af"}, + {file = "aiofiles-0.6.0-py3-none-any.whl", hash = "sha256:bd3019af67f83b739f8e4053c6c0512a7f545b9a8d91aaeab55e6e0f9d123c27"}, + {file = "aiofiles-0.6.0.tar.gz", hash = "sha256:e0281b157d3d5d59d803e3f4557dcc9a3dff28a4dd4829a9ff478adae50ca092"}, ] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, - {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, + {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, + {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, ] bandit = [ - {file = "bandit-1.6.2-py2.py3-none-any.whl", hash = "sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952"}, - {file = "bandit-1.6.2.tar.gz", hash = "sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065"}, + {file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"}, + {file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"}, ] certifi = [ - {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"}, - {file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"}, + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, ] chardet = [ - {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, - {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] colorama = [ - {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, - {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ - {file = "coverage-5.1-cp27-cp27m-macosx_10_12_x86_64.whl", hash = "sha256:0cb4be7e784dcdc050fc58ef05b71aa8e89b7e6636b99967fadbdba694cf2b65"}, - {file = "coverage-5.1-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:c317eaf5ff46a34305b202e73404f55f7389ef834b8dbf4da09b9b9b37f76dd2"}, - {file = "coverage-5.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b83835506dfc185a319031cf853fa4bb1b3974b1f913f5bb1a0f3d98bdcded04"}, - {file = "coverage-5.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5f2294dbf7875b991c381e3d5af2bcc3494d836affa52b809c91697449d0eda6"}, - {file = "coverage-5.1-cp27-cp27m-win32.whl", hash = "sha256:de807ae933cfb7f0c7d9d981a053772452217df2bf38e7e6267c9cbf9545a796"}, - {file = "coverage-5.1-cp27-cp27m-win_amd64.whl", hash = "sha256:bf9cb9a9fd8891e7efd2d44deb24b86d647394b9705b744ff6f8261e6f29a730"}, - {file = "coverage-5.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:acf3763ed01af8410fc36afea23707d4ea58ba7e86a8ee915dfb9ceff9ef69d0"}, - {file = "coverage-5.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:dec5202bfe6f672d4511086e125db035a52b00f1648d6407cc8e526912c0353a"}, - {file = "coverage-5.1-cp35-cp35m-macosx_10_12_x86_64.whl", hash = "sha256:7a5bdad4edec57b5fb8dae7d3ee58622d626fd3a0be0dfceda162a7035885ecf"}, - {file = "coverage-5.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1601e480b9b99697a570cea7ef749e88123c04b92d84cedaa01e117436b4a0a9"}, - {file = "coverage-5.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:dbe8c6ae7534b5b024296464f387d57c13caa942f6d8e6e0346f27e509f0f768"}, - {file = "coverage-5.1-cp35-cp35m-win32.whl", hash = "sha256:a027ef0492ede1e03a8054e3c37b8def89a1e3c471482e9f046906ba4f2aafd2"}, - {file = "coverage-5.1-cp35-cp35m-win_amd64.whl", hash = "sha256:0e61d9803d5851849c24f78227939c701ced6704f337cad0a91e0972c51c1ee7"}, - {file = "coverage-5.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:2d27a3f742c98e5c6b461ee6ef7287400a1956c11421eb574d843d9ec1f772f0"}, - {file = "coverage-5.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:66460ab1599d3cf894bb6baee8c684788819b71a5dc1e8fa2ecc152e5d752019"}, - {file = "coverage-5.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5c542d1e62eece33c306d66fe0a5c4f7f7b3c08fecc46ead86d7916684b36d6c"}, - {file = "coverage-5.1-cp36-cp36m-win32.whl", hash = "sha256:2742c7515b9eb368718cd091bad1a1b44135cc72468c731302b3d641895b83d1"}, - {file = "coverage-5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:dead2ddede4c7ba6cb3a721870f5141c97dc7d85a079edb4bd8d88c3ad5b20c7"}, - {file = "coverage-5.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:01333e1bd22c59713ba8a79f088b3955946e293114479bbfc2e37d522be03355"}, - {file = "coverage-5.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e1ea316102ea1e1770724db01998d1603ed921c54a86a2efcb03428d5417e489"}, - {file = "coverage-5.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:adeb4c5b608574a3d647011af36f7586811a2c1197c861aedb548dd2453b41cd"}, - {file = "coverage-5.1-cp37-cp37m-win32.whl", hash = "sha256:782caea581a6e9ff75eccda79287daefd1d2631cc09d642b6ee2d6da21fc0a4e"}, - {file = "coverage-5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:00f1d23f4336efc3b311ed0d807feb45098fc86dee1ca13b3d6768cdab187c8a"}, - {file = "coverage-5.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:402e1744733df483b93abbf209283898e9f0d67470707e3c7516d84f48524f55"}, - {file = "coverage-5.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a3f3654d5734a3ece152636aad89f58afc9213c6520062db3978239db122f03c"}, - {file = "coverage-5.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6402bd2fdedabbdb63a316308142597534ea8e1895f4e7d8bf7476c5e8751fef"}, - {file = "coverage-5.1-cp38-cp38-win32.whl", hash = "sha256:8fa0cbc7ecad630e5b0f4f35b0f6ad419246b02bc750de7ac66db92667996d24"}, - {file = "coverage-5.1-cp38-cp38-win_amd64.whl", hash = "sha256:79a3cfd6346ce6c13145731d39db47b7a7b859c0272f02cdb89a3bdcbae233a0"}, - {file = "coverage-5.1-cp39-cp39-win32.whl", hash = "sha256:a82b92b04a23d3c8a581fc049228bafde988abacba397d57ce95fe95e0338ab4"}, - {file = "coverage-5.1-cp39-cp39-win_amd64.whl", hash = "sha256:bb28a7245de68bf29f6fb199545d072d1036a1917dca17a1e75bbb919e14ee8e"}, - {file = "coverage-5.1.tar.gz", hash = "sha256:f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052"}, + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] flake8 = [ - {file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"}, - {file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"}, + {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, + {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, ] gitdb = [ {file = "gitdb-4.0.5-py3-none-any.whl", hash = "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac"}, {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, ] gitpython = [ - {file = "GitPython-3.1.3-py3-none-any.whl", hash = "sha256:ef1d60b01b5ce0040ad3ec20bc64f783362d41fa0822a2742d3586e1f49bb8ac"}, - {file = "GitPython-3.1.3.tar.gz", hash = "sha256:e107af4d873daed64648b4f4beb89f89f0cfbe3ef558fc7821ed2331c2f8da1a"}, + {file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"}, + {file = "GitPython-3.1.14.tar.gz", hash = "sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61"}, ] h11 = [ {file = "h11-0.9.0-py2.py3-none-any.whl", hash = "sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1"}, {file = "h11-0.9.0.tar.gz", hash = "sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1"}, ] -h2 = [ - {file = "h2-3.2.0-py2.py3-none-any.whl", hash = "sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5"}, - {file = "h2-3.2.0.tar.gz", hash = "sha256:875f41ebd6f2c44781259005b157faed1a5031df3ae5aa7bcb4628a6c0782f14"}, -] -hpack = [ - {file = "hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89"}, - {file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"}, -] -hstspreload = [ - {file = "hstspreload-2020.6.30-py3-none-any.whl", hash = "sha256:35db8d932228c2782bf0e3fdb143a54263238593f6df431458c89b006898e5f2"}, - {file = "hstspreload-2020.6.30.tar.gz", hash = "sha256:81225e82207ec316a774e5d130454327752853dfaf347b2bf4d21e524cc49efa"}, +httpcore = [ + {file = "httpcore-0.11.1-py3-none-any.whl", hash = "sha256:72cfaa461dbdc262943ff4c9abf5b195391a03cdcc152e636adb4239b15e77e1"}, + {file = "httpcore-0.11.1.tar.gz", hash = "sha256:a35dddd1f4cc34ff37788337ef507c0ad0276241ece6daf663ac9e77c0b87232"}, ] httptools = [ {file = "httptools-0.1.1-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:a2719e1d7a84bb131c4f1e0cb79705034b48de6ae486eb5297a139d6a3296dce"}, @@ -714,91 +708,116 @@ httptools = [ {file = "httptools-0.1.1.tar.gz", hash = "sha256:41b573cf33f64a8f8f3400d0a7faf48e1888582b6f6e02b82b9bd4f0bf7497ce"}, ] httpx = [ - {file = "httpx-0.11.1-py2.py3-none-any.whl", hash = "sha256:1d3893d3e4244c569764a6bae5c5a9fbbc4a6ec3825450b5696602af7a275576"}, - {file = "httpx-0.11.1.tar.gz", hash = "sha256:7d2bfb726eeed717953d15dddb22da9c2fcf48a4d70ba1456aa0a7faeda33cf7"}, -] -hyperframe = [ - {file = "hyperframe-5.2.0-py2.py3-none-any.whl", hash = "sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40"}, - {file = "hyperframe-5.2.0.tar.gz", hash = "sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"}, + {file = "httpx-0.15.4-py3-none-any.whl", hash = "sha256:7b3c07bfdcdadd92020dd4c07b15932abdcf1c898422a4e98de3d19b2223310b"}, + {file = "httpx-0.15.4.tar.gz", hash = "sha256:4c81dbf98a29cb4f51f415140df56542f9d4860798d713e336642e953cddd1db"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, ] importlib-metadata = [ - {file = "importlib_metadata-1.7.0-py2.py3-none-any.whl", hash = "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"}, - {file = "importlib_metadata-1.7.0.tar.gz", hash = "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83"}, + {file = "importlib_metadata-3.7.2-py3-none-any.whl", hash = "sha256:407d13f55dc6f2a844e62325d18ad7019a436c4bfcaee34cda35f2be6e7c3e34"}, + {file = "importlib_metadata-3.7.2.tar.gz", hash = "sha256:18d5ff601069f98d5d605b6a4b50c18a34811d655c55548adc833e687289acde"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] jsonschema = [ {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, ] +kbase-jsonrpc11base = [] kbase-jsonrpcbase = [ - {file = "kbase_jsonrpcbase-0.3.0a5-py3-none-any.whl", hash = "sha256:1306bebf9af7f7911a5b5478eef60d40d7b9098d5467adaa06b17fe2780513e5"}, - {file = "kbase_jsonrpcbase-0.3.0a5.tar.gz", hash = "sha256:6723eccb8a7722a29e6ef99a13b33f33e9735438ee5e4a4d612d448c2820874d"}, + {file = "kbase_jsonrpcbase-0.3.0a6-py3-none-any.whl", hash = "sha256:775e1f8dbc157521c528a1b0bfa63cdae733dc3e8dcf743654c175ebb24bb6df"}, + {file = "kbase_jsonrpcbase-0.3.0a6.tar.gz", hash = "sha256:ad17753b3ff8bcaf2f686495387a4ff8c7d3bc2f268ce5a83afec48d1d57e690"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] -more-itertools = [ - {file = "more-itertools-8.4.0.tar.gz", hash = "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5"}, - {file = "more_itertools-8.4.0-py3-none-any.whl", hash = "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"}, -] multidict = [ - {file = "multidict-4.7.6-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000"}, - {file = "multidict-4.7.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a"}, - {file = "multidict-4.7.6-cp35-cp35m-win32.whl", hash = "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5"}, - {file = "multidict-4.7.6-cp35-cp35m-win_amd64.whl", hash = "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3"}, - {file = "multidict-4.7.6-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87"}, - {file = "multidict-4.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2"}, - {file = "multidict-4.7.6-cp36-cp36m-win32.whl", hash = "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7"}, - {file = "multidict-4.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463"}, - {file = "multidict-4.7.6-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"}, - {file = "multidict-4.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255"}, - {file = "multidict-4.7.6-cp37-cp37m-win32.whl", hash = "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507"}, - {file = "multidict-4.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c"}, - {file = "multidict-4.7.6-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b"}, - {file = "multidict-4.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7"}, - {file = "multidict-4.7.6-cp38-cp38-win32.whl", hash = "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d"}, - {file = "multidict-4.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19"}, - {file = "multidict-4.7.6.tar.gz", hash = "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430"}, + {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, + {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, + {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, + {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, + {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, + {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, + {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, + {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, + {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, + {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, + {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, + {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, + {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, ] mypy = [ - {file = "mypy-0.782-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:2c6cde8aa3426c1682d35190b59b71f661237d74b053822ea3d748e2c9578a7c"}, - {file = "mypy-0.782-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9c7a9a7ceb2871ba4bac1cf7217a7dd9ccd44c27c2950edbc6dc08530f32ad4e"}, - {file = "mypy-0.782-cp35-cp35m-win_amd64.whl", hash = "sha256:c05b9e4fb1d8a41d41dec8786c94f3b95d3c5f528298d769eb8e73d293abc48d"}, - {file = "mypy-0.782-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:6731603dfe0ce4352c555c6284c6db0dc935b685e9ce2e4cf220abe1e14386fd"}, - {file = "mypy-0.782-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:f05644db6779387ccdb468cc47a44b4356fc2ffa9287135d05b70a98dc83b89a"}, - {file = "mypy-0.782-cp36-cp36m-win_amd64.whl", hash = "sha256:b7fbfabdbcc78c4f6fc4712544b9b0d6bf171069c6e0e3cb82440dd10ced3406"}, - {file = "mypy-0.782-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:3fdda71c067d3ddfb21da4b80e2686b71e9e5c72cca65fa216d207a358827f86"}, - {file = "mypy-0.782-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7df6eddb6054d21ca4d3c6249cae5578cb4602951fd2b6ee2f5510ffb098707"}, - {file = "mypy-0.782-cp37-cp37m-win_amd64.whl", hash = "sha256:a4a2cbcfc4cbf45cd126f531dedda8485671545b43107ded25ce952aac6fb308"}, - {file = "mypy-0.782-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6bb93479caa6619d21d6e7160c552c1193f6952f0668cdda2f851156e85186fc"}, - {file = "mypy-0.782-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:81c7908b94239c4010e16642c9102bfc958ab14e36048fa77d0be3289dda76ea"}, - {file = "mypy-0.782-cp38-cp38-win_amd64.whl", hash = "sha256:5dd13ff1f2a97f94540fd37a49e5d255950ebcdf446fb597463a40d0df3fac8b"}, - {file = "mypy-0.782-py3-none-any.whl", hash = "sha256:e0b61738ab504e656d1fe4ff0c0601387a5489ca122d55390ade31f9ca0e252d"}, - {file = "mypy-0.782.tar.gz", hash = "sha256:eff7d4a85e9eea55afa34888dfeaccde99e7520b51f867ac28a48492c0b1130c"}, + {file = "mypy-0.812-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49"}, + {file = "mypy-0.812-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c"}, + {file = "mypy-0.812-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521"}, + {file = "mypy-0.812-cp35-cp35m-win_amd64.whl", hash = "sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb"}, + {file = "mypy-0.812-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a"}, + {file = "mypy-0.812-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c"}, + {file = "mypy-0.812-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6"}, + {file = "mypy-0.812-cp36-cp36m-win_amd64.whl", hash = "sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064"}, + {file = "mypy-0.812-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56"}, + {file = "mypy-0.812-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8"}, + {file = "mypy-0.812-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7"}, + {file = "mypy-0.812-cp37-cp37m-win_amd64.whl", hash = "sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564"}, + {file = "mypy-0.812-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506"}, + {file = "mypy-0.812-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5"}, + {file = "mypy-0.812-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66"}, + {file = "mypy-0.812-cp38-cp38-win_amd64.whl", hash = "sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e"}, + {file = "mypy-0.812-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a"}, + {file = "mypy-0.812-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a"}, + {file = "mypy-0.812-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97"}, + {file = "mypy-0.812-cp39-cp39-win_amd64.whl", hash = "sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df"}, + {file = "mypy-0.812-py3-none-any.whl", hash = "sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4"}, + {file = "mypy-0.812.tar.gz", hash = "sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] packaging = [ - {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, - {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, ] pbr = [ - {file = "pbr-5.4.5-py2.py3-none-any.whl", hash = "sha256:579170e23f8e0c2f24b0de612f71f648eccb79fb1322c814ae6b3c07b5ba23e8"}, - {file = "pbr-5.4.5.tar.gz", hash = "sha256:07f558fece33b05caf857474a366dfcc00562bca13dd8b47b2b3e22d9f9bf55c"}, + {file = "pbr-5.5.1-py2.py3-none-any.whl", hash = "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"}, + {file = "pbr-5.5.1.tar.gz", hash = "sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9"}, ] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] py = [ - {file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, - {file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pycodestyle = [ {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, @@ -813,111 +832,146 @@ pyparsing = [ {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] pyrsistent = [ - {file = "pyrsistent-0.16.0.tar.gz", hash = "sha256:28669905fe725965daa16184933676547c5bb40a5153055a8dee2a4bd7933ad3"}, + {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, ] pytest = [ - {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, - {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, + {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, + {file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"}, ] pytest-cov = [ - {file = "pytest-cov-2.10.0.tar.gz", hash = "sha256:1a629dc9f48e53512fcbfda6b07de490c374b0c83c55ff7a1720b3fccff0ac87"}, - {file = "pytest_cov-2.10.0-py2.py3-none-any.whl", hash = "sha256:6e6d18092dce6fad667cd7020deed816f858ad3b49d5b5e2b1cc1c97a4dba65c"}, + {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, + {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, ] pyyaml = [ - {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, - {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, - {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, - {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, - {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] requests = [ - {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"}, - {file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"}, + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, ] responses = [ - {file = "responses-0.10.15-py2.py3-none-any.whl", hash = "sha256:af94d28cdfb48ded0ad82a5216616631543650f440334a693479b8991a6594a2"}, - {file = "responses-0.10.15.tar.gz", hash = "sha256:7bb697a5fedeb41d81e8b87f152d453d5cab42dcd1691b6a7d6097e94d33f373"}, + {file = "responses-0.12.1-py2.py3-none-any.whl", hash = "sha256:ef265bd3200bdef5ec17912fc64a23570ba23597fd54ca75c18650fa1699213d"}, + {file = "responses-0.12.1.tar.gz", hash = "sha256:2e5764325c6b624e42b428688f2111fea166af46623cb0127c05f6afb14d3457"}, ] rfc3986 = [ {file = "rfc3986-1.4.0-py2.py3-none-any.whl", hash = "sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50"}, {file = "rfc3986-1.4.0.tar.gz", hash = "sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d"}, ] sanic = [ - {file = "sanic-20.6.3-py3-none-any.whl", hash = "sha256:202b75fbf334140cffe559f18772c08263ad97e3534cda3597bc7c3446311526"}, - {file = "sanic-20.6.3.tar.gz", hash = "sha256:30e83d9f677b609d6b8ccab7c9551ca7e9a5f19ac0579f5aa10199ab6d4138ed"}, + {file = "sanic-20.12.2-py3-none-any.whl", hash = "sha256:9f01a3ebfe4bf6c57e164bfa1b66c977cb47421e54ae91cc04173cfc711891ce"}, + {file = "sanic-20.12.2.tar.gz", hash = "sha256:b9d7e24cd293c126f6f4aaf26fcc3b11e35338b86e8c2693b71d05196e7b02b0"}, ] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, ] smmap = [ - {file = "smmap-3.0.4-py2.py3-none-any.whl", hash = "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4"}, - {file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"}, + {file = "smmap-3.0.5-py2.py3-none-any.whl", hash = "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714"}, + {file = "smmap-3.0.5.tar.gz", hash = "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50"}, ] sniffio = [ - {file = "sniffio-1.1.0-py3-none-any.whl", hash = "sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5"}, - {file = "sniffio-1.1.0.tar.gz", hash = "sha256:8e3810100f69fe0edd463d02ad407112542a11ffdc29f67db2bf3771afb87a21"}, + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, ] stevedore = [ - {file = "stevedore-2.0.1-py3-none-any.whl", hash = "sha256:c4724f8d7b8f6be42130663855d01a9c2414d6046055b5a65ab58a0e38637688"}, - {file = "stevedore-2.0.1.tar.gz", hash = "sha256:609912b87df5ad338ff8e44d13eaad4f4170a65b79ae9cb0aa5632598994a1b7"}, + {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, + {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] typed-ast = [ - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, - {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, - {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, - {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, - {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, - {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, - {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, - {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, + {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, + {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, + {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, + {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, + {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, + {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, + {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, + {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, + {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, ] typing-extensions = [ - {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"}, - {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"}, - {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"}, + {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, + {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, + {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] ujson = [ - {file = "ujson-3.0.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:0959a5b569e192459b492b007e3fd63d8f4b4bcb4f69dcddca850a9b9dfe3e7a"}, - {file = "ujson-3.0.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:154f778f0b028390067aaedce8399730d4f528a16a1c214fe4eeb9c4e4f51810"}, - {file = "ujson-3.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:019a17e7162f26e264f1645bb41630f7103d178c092ea4bb8f3b16126c3ea210"}, - {file = "ujson-3.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:670018d4ab4b0755a7234a9f4791723abcd0506c0eed33b2ed50579c4aff31f2"}, - {file = "ujson-3.0.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:634c206f4fb3be7e4523768c636d2dd41cb9c7130e2d219ef8305b8fb6f4838e"}, - {file = "ujson-3.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:3bd791d17a175c1c6566aeaec1755b58e3f021fe9bb62f10f02b656b299199f5"}, - {file = "ujson-3.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0379ffc7484b862a292e924c15ad5f1c5306d4271e2efd162144812afb08ff97"}, - {file = "ujson-3.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f40bb0d0cb534aad3e24884cf864bda7a71eb5984bd1da61d1711bbfb3be2c38"}, - {file = "ujson-3.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:0f33359908df32033195bfdd59ba2bfb90a23cb280ef9a0ba11e5013a53d7fd9"}, - {file = "ujson-3.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:bea2958c7b5bf4f191f0def751b6f7c8b208edb5f7277e21776329f2ca042385"}, - {file = "ujson-3.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f854702a9aff3a445f4a0b715d240f2a3d84014d8ae8aad05a982c7ffab12525"}, - {file = "ujson-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c04d253fec814657fd9f150ef2333dbd0bc6f46208355aa753a29e0696b7fa7e"}, - {file = "ujson-3.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a32f2def62b10e8a19084d17d40363c4da1ac5f52d300a9e99d7efb49fe5f34a"}, - {file = "ujson-3.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:9c68557da3e3ad57e0105aceba0cce5f8f7cd07d207c3860e59c0b3044532830"}, - {file = "ujson-3.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0e2352b60c4ac4fc75b723435faf36ef5e7f3bfb988adb4d589b5e0e6e1d90aa"}, - {file = "ujson-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:c841a6450d64c24c64cbcca429bab22cdb6daef5eaddfdfebe798a5e9e5aff4c"}, - {file = "ujson-3.0.0.tar.gz", hash = "sha256:e0199849d61cc6418f94d52a314c6a27524d65e82174d2a043fb718f73d1520d"}, + {file = "ujson-4.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:e390df0dcc7897ffb98e17eae1f4c442c39c91814c298ad84d935a3c5c7a32fa"}, + {file = "ujson-4.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:84b1dca0d53b0a8d58835f72ea2894e4d6cf7a5dd8f520ab4cbd698c81e49737"}, + {file = "ujson-4.0.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:91396a585ba51f84dc71c8da60cdc86de6b60ba0272c389b6482020a1fac9394"}, + {file = "ujson-4.0.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:eb6b25a7670c7537a5998e695fa62ff13c7f9c33faf82927adf4daa460d5f62e"}, + {file = "ujson-4.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f8aded54c2bc554ce20b397f72101737dd61ee7b81c771684a7dd7805e6cca0c"}, + {file = "ujson-4.0.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:30962467c36ff6de6161d784cd2a6aac1097f0128b522d6e9291678e34fb2b47"}, + {file = "ujson-4.0.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:fc51e545d65689c398161f07fd405104956ec27f22453de85898fa088b2cd4bb"}, + {file = "ujson-4.0.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e6e90330670c78e727d6637bb5a215d3e093d8e3570d439fd4922942f88da361"}, + {file = "ujson-4.0.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:5e1636b94c7f1f59a8ead4c8a7bab1b12cc52d4c21ababa295ffec56b445fd2a"}, + {file = "ujson-4.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:e2cadeb0ddc98e3963bea266cc5b884e5d77d73adf807f0bda9eca64d1c509d5"}, + {file = "ujson-4.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a214ba5a21dad71a43c0f5aef917cd56a2d70bc974d845be211c66b6742a471c"}, + {file = "ujson-4.0.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0190d26c0e990c17ad072ec8593647218fe1c675d11089cd3d1440175b568967"}, + {file = "ujson-4.0.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f273a875c0b42c2a019c337631bc1907f6fdfbc84210cc0d1fff0e2019bbfaec"}, + {file = "ujson-4.0.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d3a87888c40b5bfcf69b4030427cd666893e826e82cc8608d1ba8b4b5e04ea99"}, + {file = "ujson-4.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:7333e8bc45ea28c74ae26157eacaed5e5629dbada32e0103c23eb368f93af108"}, + {file = "ujson-4.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b3a6dcc660220539aa718bcc9dbd6dedf2a01d19c875d1033f028f212e36d6bb"}, + {file = "ujson-4.0.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:0ea07fe57f9157118ca689e7f6db72759395b99121c0ff038d2e38649c626fb1"}, + {file = "ujson-4.0.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4d6d061563470cac889c0a9fd367013a5dbd8efc36ad01ab3e67a57e56cad720"}, + {file = "ujson-4.0.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b5c70704962cf93ec6ea3271a47d952b75ae1980d6c56b8496cec2a722075939"}, + {file = "ujson-4.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:aad6d92f4d71e37ea70e966500f1951ecd065edca3a70d3861b37b176dd6702c"}, + {file = "ujson-4.0.2.tar.gz", hash = "sha256:c615a9e9e378a7383b756b7e7a73c38b22aeb8967a8bfbffd4741f7ffd043c4d"}, ] urllib3 = [ - {file = "urllib3-1.25.9-py2.py3-none-any.whl", hash = "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"}, - {file = "urllib3-1.25.9.tar.gz", hash = "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"}, + {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, + {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, ] uvloop = [ {file = "uvloop-0.14.0-cp35-cp35m-macosx_10_11_x86_64.whl", hash = "sha256:08b109f0213af392150e2fe6f81d33261bb5ce968a288eb698aad4f46eb711bd"}, @@ -930,10 +984,6 @@ uvloop = [ {file = "uvloop-0.14.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4315d2ec3ca393dd5bc0b0089d23101276778c304d42faff5dc4579cb6caef09"}, {file = "uvloop-0.14.0.tar.gz", hash = "sha256:123ac9c0c7dd71464f58f1b4ee0bbd81285d96cdda8bc3519281b8973e3a461e"}, ] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] websockets = [ {file = "websockets-8.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:3762791ab8b38948f0c4d281c8b2ddfa99b7e510e46bd8dfa942a5fff621068c"}, {file = "websockets-8.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:3db87421956f1b0779a7564915875ba774295cc86e81bc671631379371af1170"}, @@ -959,6 +1009,6 @@ websockets = [ {file = "websockets-8.1.tar.gz", hash = "sha256:5c65d2da8c6bce0fca2528f69f44b2f977e06954c8512a952222cea50dad430f"}, ] zipp = [ - {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"}, - {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"}, + {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, + {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, ] diff --git a/pyproject.toml b/pyproject.toml index 465f0ca..ed3dbd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,21 +7,23 @@ license = "MIT" [tool.poetry.dependencies] python = "^3.7" -sanic = "20.6.3" -requests = "2.24.0" +sanic = "20.12.2" +requests = "2.25.1" jsonschema = "3.2.0" -pyyaml = "5.3.1" -kbase-jsonrpcbase = "0.3.0a5" +pyyaml = "5.4.1" +kbase-jsonrpcbase = "0.3.0a6" +kbase-jsonrpc11base = {git="https://github.com/kbaseIncubator/kbase-jsonrpc11base", branch="v0.1.6"} + [tool.poetry.dev-dependencies] -mypy = "^0.782" -bandit = "^1.6.2" -mccabe = "^0.6.1" -flake8 = "^3.8.3" -coverage = "^5.1" -pytest = "^5.4.3" -pytest-cov = "^2.10.0" -responses = "^0.10.15" +mypy = "0.812" +bandit = "1.7.0" +mccabe = "0.6.1" +flake8 = "3.8.4" +coverage = "5.5" +pytest = "6.2.2" +pytest-cov = "2.11.1" +responses = "0.12.1" [build-system] requires = ["poetry>=0.12"] diff --git a/rpc-schema.yaml b/rpc-schema.yaml index fdbd71e..071e0ba 100644 --- a/rpc-schema.yaml +++ b/rpc-schema.yaml @@ -71,7 +71,7 @@ definitions: examples: - [[field1, asc], [field2, desc]] type: array - default: ["_score", "desc"] + default: [["_score", "desc"]] items: type: array minLength: 2 diff --git a/scripts/build-integration-test-images.sh b/scripts/build-integration-test-images.sh new file mode 100644 index 0000000..79ad57b --- /dev/null +++ b/scripts/build-integration-test-images.sh @@ -0,0 +1,3 @@ +cd tests/integration/docker +docker-compose build +cd ../../.. \ No newline at end of file diff --git a/scripts/run-dev-server.sh b/scripts/run-dev-server.sh new file mode 100644 index 0000000..e274878 --- /dev/null +++ b/scripts/run-dev-server.sh @@ -0,0 +1,3 @@ +cd tests/integration/docker +docker-compose up +cd ../../.. \ No newline at end of file diff --git a/scripts/run_integration_tests b/scripts/run_integration_tests index 0eb5ee4..51f34c4 100755 --- a/scripts/run_integration_tests +++ b/scripts/run_integration_tests @@ -2,13 +2,6 @@ set -e set -o xtrace -cleanup () { - echo "Stopping container.." - docker stop $(docker ps -aq) -} - -cleanup -trap cleanup EXIT path=${@:-"tests/integration"} export PYTHONPATH=. poetry run pytest -vv -s $path diff --git a/scripts/run_tests b/scripts/run_tests index bdd5234..982f93c 100755 --- a/scripts/run_tests +++ b/scripts/run_tests @@ -8,7 +8,7 @@ export WORKSPACE_URL="http://localhost:5555/ws" path=${@:-"tests/unit"} -poetry run flake8 +poetry run flake8 src tests poetry run mypy --ignore-missing-imports src/**/*.py poetry run bandit -r src diff --git a/src/es_client/query.py b/src/es_client/query.py index 0bd4960..65d4be3 100644 --- a/src/es_client/query.py +++ b/src/es_client/query.py @@ -21,40 +21,38 @@ def search(params, meta): ES 7 search query documentation: https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html """ - user_query = params.get('query') - authorized_ws_ids = [] - if not params.get('only_public') and meta['auth']: - # Fetch the workspace IDs that the user can read - # Used for simple access control - authorized_ws_ids = ws_auth(meta['auth']) - # Get the index name(s) to include and exclude (used in the URL below) - index_name_str = _construct_index_name(params) - # We insert the user's query as a "must" entry + + # The query object, which we build up in steps below query = {'bool': {}} # type: dict + + # Fetch the workspace IDs that the user can read. + # Used for access control and also to ensure that workspaces which are + # inaccessible, but have not yet been updated in search, are still filtered out. + authorized_ws_ids = ws_auth( + meta['auth'], + params.get('only_public', False), + params.get('only_private', False)) + + query['bool']['filter'] = [ + {'terms': {'access_group': authorized_ws_ids}} + ] + + # We insert the user's query as a "must" entry + user_query = params.get('query') if user_query: query['bool']['must'] = user_query - # Our access control query is then inserted under a "filter" depending on options: - if params.get('only_public'): - # Public workspaces only; most efficient - query['bool']['filter'] = {'term': {'is_public': True}} - elif params.get('only_private'): - # Private workspaces only - query['bool']['filter'] = [ - {'term': {'is_public': False}}, - {'terms': {'access_group': authorized_ws_ids}} - ] - else: - # Find all documents, whether private or public - query['bool']['filter'] = { - 'bool': { - 'should': [ - {'term': {'is_public': True}}, - {'terms': {'access_group': authorized_ws_ids}} - ] - } - } + + # Get the index name(s) to include and exclude (used in the URL below) + index_name_str = _construct_index_name(params) + # Make a query request to elasticsearch url = config['elasticsearch_url'] + '/' + index_name_str + '/_search' + + # TODO: address the performance settings below: + # - 3m for timeout is seems excessive, and many other elements of the + # search process may have 1m timeouts; perhaps default to a lower limit, and + # allow a parameter to set the timeout to an arbitrary value + # - the "allow_expensive_queries" setting has been disabled, why? options = { 'query': query, 'size': 0 if params.get('count') else params.get('size', 10), @@ -63,31 +61,41 @@ def search(params, meta): # Disallow expensive queries, such as joins, to prevent any denial of service # 'search': {'allow_expensive_queries': False}, } + if not params.get('count') and params.get('size', 10) > 0 and not params.get('track_total_hits'): options['terminate_after'] = 10000 + # User-supplied aggregations if params.get('aggs'): options['aggs'] = params['aggs'] + # User-supplied sorting rules if params.get('sort'): options['sort'] = params['sort'] + # User-supplied source filters if params.get('source'): options['_source'] = params.get('source') + # Search results highlighting if params.get('highlight'): - options['highlight'] = {'fields': params['highlight']} + options['highlight'] = params['highlight'] + if params.get('track_total_hits'): options['track_total_hits'] = params.get('track_total_hits') + headers = {'Content-Type': 'application/json'} + # Allows index exclusion; otherwise there is an error params = {'allow_no_indices': 'true'} + resp = requests.post(url, data=json.dumps(options), params=params, headers=headers) + if not resp.ok: _handle_es_err(resp) + resp_json = resp.json() - result = _handle_response(resp_json) - return result + return _handle_response(resp_json) def _handle_es_err(resp): diff --git a/src/exceptions.py b/src/exceptions.py index 93a9444..406e246 100644 --- a/src/exceptions.py +++ b/src/exceptions.py @@ -1,34 +1,70 @@ +from src.utils.obj_utils import get_path +# TODO: we should use the jsonrpc11base libraries base exception for +# server errors class ResponseError(Exception): - def __init__(self, code=-32000, message='Server error', status=400): + def __init__(self, code=-32000, message='Server error'): self.message = message - self.code = code - self.status = status + self.jsonrpc_code = code -class UnknownType(ResponseError): +class AuthError(ResponseError): - def __init__(self, message): - super().__init__(code=-32005, message=message) + def __init__(self, auth_resp_json, resp_text): + # Extract the error message from the auth service's RPC response + msg = get_path(auth_resp_json, ['error', 'message']) + if msg is None: + # Fall back to the full response body + msg = resp_text + super().__init__(code=-32001, message=msg) -class ElasticsearchError(ResponseError): +class UnknownIndex(ResponseError): def __init__(self, message): - msg = f"Elasticsearch request error:\n{message}" - super().__init__(code=-32003, message=msg) + super().__init__(code=-32002, message=message) -class UnknownIndex(ResponseError): +class ElasticsearchError(ResponseError): def __init__(self, message): - super().__init__(code=-32002, message=message) + msg = f"Elasticsearch request error:\n{message}" + super().__init__(code=-32003, message=msg) class UserProfileError(ResponseError): def __init__(self, url, resp_text): - msg = "User profile service error:\nResponse: {resp_text}\nURL: {url}" + self.url = url + self.resp_text = resp_text + msg = f"User profile service error:\nResponse: {resp_text}\nURL: {url}" super().__init__(code=-32004, message=msg) + + +class UnknownType(ResponseError): + + def __init__(self, message) -> object: + super().__init__(code=-32005, message=message) + + +class NoAccessGroupError(ResponseError): + """ + Raised when a search result does not contain an "access_group" + key, which should be impossible. + """ + + def __init__(self): + message = 'A search document does not contain an access group' + super().__init__(code=-32006, message=message) + + +class NoUserProfileError(ResponseError): + """ + Raised when a username does not have an associated user profile. + """ + + def __init__(self, username): + message = f'A user profile could not be found for "{username}"' + super().__init__(code=-32007, message=message) diff --git a/src/search1_conversion/convert_params.py b/src/search1_conversion/convert_params.py index d6d48d5..40e62ba 100644 --- a/src/search1_conversion/convert_params.py +++ b/src/search1_conversion/convert_params.py @@ -33,14 +33,9 @@ entirety. Longer fields are shown as snippets preceded or followed by "...". """ -from src.utils.config import config -from src.utils.obj_utils import get_any -from src.exceptions import ResponseError -# Unversioned feature index name/alias, (eg "genome_features") -_FEATURES_UNVERSIONED = config['global']['genome_features_current_index_name'] -# Versioned feature index name (eg "genome_features_2") -_GENOME_FEATURES_IDX_NAME = config['global']['latest_versions'][_FEATURES_UNVERSIONED] +from src.utils.obj_utils import get_any +from jsonrpc11base.errors import InvalidParamsError # Mapping of special sorting properties names from the Java API to search2 key names _SORT_PROP_MAPPING = { @@ -58,7 +53,8 @@ def search_objects(params): Convert parameters from the "search_objects" RPC method into an Elasticsearch query. """ query = _get_search_params(params) - if params.get('include_highlight'): + post_proc = params.get('post_processing', {}) + if post_proc.get('include_highlight') == 1: # We need a special highlight query so that the main query does not generate # highlights for bits of the query which are not user-generated. highlight_query = {'bool': {}} @@ -67,7 +63,10 @@ def search_objects(params): # Match full text for any field in the objects highlight_query['bool']['must'] = [{ 'match': { - 'agg_fields': match_filter['full_text_in_all'] + 'agg_fields': { + 'query': match_filter['full_text_in_all'], + 'operator': 'AND', + } } }] # Note that search_objects, being used by both the legacy and current @@ -95,7 +94,6 @@ def search_types(params): with_all_history - ignored output: type_to_count - dict where keys are type names and vals are counts - search_time - int - total time performing search This method constructs the same search parameters as `search_objects`, but aggregates results based on `obj_type_name`. """ @@ -115,17 +113,12 @@ def get_objects(params): Convert params from the "get_objects" RPC method into an Elasticsearch query. Retrieve a list of objects based on their upas. params: - guids - list of string - KBase IDs (upas) to fetch - post_processing - object of post-query filters (see PostProcessing def at top of this module) + ids - list of string - Search document ids to fetch; ids are in a specific + format for object indexes: "WS:::" output: - objects - list of ObjectData - see the ObjectData type description in the module docstring above. - search_time - int - time it took to perform the search on ES - access_group_narrative_info - dict of {access_group_id: narrative_info} - - Information about the workspaces in which the objects in the - results reside. This data only applies to workspace objects. + query - elasticsearch query for document ids specified in the params argument """ - query = {'query': {'terms': {'_id': params['guids']}}} - return query + return {'query': {'terms': {'_id': params['ids']}}} def _get_search_params(params): @@ -135,24 +128,54 @@ def _get_search_params(params): match_filter = params.get('match_filter', {}) # Base query object for ES. Will get mutated and expanded below. # query = {'bool': {'must': [], 'must_not': [], 'should': []}} # type: dict - query = {'bool': {}} # type: dict + query = {'bool': { + 'must': [], + 'filter': { + 'bool': {} + } + }} # type: dict + + # Provides for full text search if match_filter.get('full_text_in_all'): # Match full text for any field in the objects - query['bool']['must'] = [] - query['bool']['must'].append({'match': {'agg_fields': match_filter['full_text_in_all']}}) + terms = match_filter.get('full_text_in_all') + query['bool']['must'].append({ + 'match': { + 'agg_fields': { + 'query': terms, + 'operator': 'AND' + } + } + }) + + # Search by object name, precisely, so more of a filter. if match_filter.get('object_name'): - query['bool']['must'] = query['bool'].get('must', []) - query['bool']['must'].append({'match': {'obj_name': str(match_filter['object_name'])}}) + query['bool']['must'].append({ + 'match': { + 'obj_name': str(match_filter['object_name']) + } + }) + + # Search by timestamp range if match_filter.get('timestamp') is not None: ts = match_filter['timestamp'] min_ts = ts.get('min_date') max_ts = ts.get('max_date') if min_ts is not None and max_ts is not None and min_ts < max_ts: - query['bool']['must'] = query['bool'].get('must', []) - query['bool']['must'].append({'range': {'timestamp': {'gte': min_ts, 'lte': max_ts}}}) + query['bool']['must'].append({ + 'range': { + 'timestamp': {'gte': min_ts, 'lte': max_ts} + } + }) else: - raise ResponseError(code=-32602, message="Invalid timestamp range in match_filter/timestamp") - # Handle a search on tags, which corresponds to the generic `tags` field in all indexes. + raise InvalidParamsError( + message="Invalid timestamp range in match_filter/timestamp") + + # Handle a search on tags, which corresponds to the generic `tags` field in all + # indexes. + # search_tags is populated on a workspace to indicate the type of workspace. + # Currently + # supported are "narrative", "refseq", and "noindex" if match_filter.get('source_tags'): # If source_tags_blacklist is `1`, then we are **excluding** these tags. blacklist_tags = bool(match_filter.get('source_tags_blacklist')) @@ -162,26 +185,52 @@ def _get_search_params(params): if blacklist_tags: query['bool']['must_not'] = tag_query else: - query['bool']['must'] = query['bool'].get('must', []) query['bool']['must'] += tag_query + # Handle match_filter/lookupInKeys query = _handle_lookup_in_keys(match_filter, query) + # Handle filtering by object type object_types = params.get('object_types', []) if object_types: # For this fake type, we search on the specific index instead (see lower down). - type_blacklist = ['GenomeFeature'] - query['bool']['should'] = [ + query['bool']['filter']['bool']['should'] = [ {'term': {'obj_type_name': obj_type}} for obj_type in object_types - if obj_type not in type_blacklist ] + + # Translate with_private and with_public to only_private and only_public. + access_filter = params.get('access_filter', {}) + with_private = access_filter.get('with_private') + with_public = access_filter.get('with_public') + if with_private is None and with_public is None: + only_public = False + only_private = False + else: + with_private = bool(with_private) + with_public = bool(with_public) + if with_private: + if with_public: + only_public = False + only_private = False + else: + only_public = False + only_private = True + elif with_public: + only_public = True + only_private = False + else: + # Error condition + raise InvalidParamsError( + message='May not specify no private data and no public data' + ) + # Handle sorting options if 'sorting_rules' not in params: params['sorting_rules'] = [{ - "property": "timestamp", - "is_object_property": 0, - "ascending": 1 + "property": "timestamp", + "is_object_property": 0, + "ascending": 1 }] sort = [] # type: list for sort_rule in params['sorting_rules']: @@ -192,36 +241,48 @@ def _get_search_params(params): if prop in _SORT_PROP_MAPPING: prop = _SORT_PROP_MAPPING[sort_rule['property']] else: - msg = f"Invalid non-object sorting property '{prop}'" - raise ResponseError(code=-32602, message=msg) + raise InvalidParamsError( + message=f"Invalid non-object sorting property '{prop}'" + ) order = 'asc' if ascending else 'desc' sort.append({prop: {'order': order}}) + pagination = params.get('pagination', {}) - access_filter = params.get('access_filter', {}) - with_private = bool(access_filter.get('with_private')) - with_public = bool(access_filter.get('with_public')) + + # remove unused elements from query + if len(query['bool']['filter']['bool']) == 0: + del query['bool']['filter']['bool'] + + if len(query['bool']['filter']) == 0: + del query['bool']['filter'] + + if len(query['bool']['must']) == 0: + del query['bool']['must'] + # Get excluded index names (handles `exclude_subobjects`) search_params = { 'query': query, 'size': pagination.get('count', 20), 'from': pagination.get('start', 0), 'sort': sort, - 'public_only': not with_private and with_public, - 'private_only': not with_public and with_private + 'only_public': only_public, + 'only_private': only_private, + 'track_total_hits': True } - if 'GenomeFeature' in object_types: - search_params['indexes'] = [_GENOME_FEATURES_IDX_NAME] + return search_params def _handle_lookup_in_keys(match_filter, query): """ Handle the match_filter/lookup_in_keys option from the legacy API. - This allows the user to pass a number of field names and term or range values for filtering. + This allows the user to pass a number of field names and term or range values for + filtering. """ if not match_filter.get('lookup_in_keys'): return query - # This will be a dict where each key is a field name and each val is a MatchValue type + # This will be a dict where each key is a field name and each val is a MatchValue + # type lookup_in_keys = match_filter['lookup_in_keys'] for (key, match_value) in lookup_in_keys.items(): # match_value will be a dict with one of these keys set: @@ -245,6 +306,5 @@ def _handle_lookup_in_keys(match_filter, query): if range_max is not None: query_clause['range'][key]['lte'] = range_max if query_clause: - query['bool']['must'] = query['bool'].get('must', []) query['bool']['must'].append(query_clause) return query diff --git a/src/search1_conversion/convert_result.py b/src/search1_conversion/convert_result.py index 383987a..5e864ee 100644 --- a/src/search1_conversion/convert_result.py +++ b/src/search1_conversion/convert_result.py @@ -1,26 +1,55 @@ -import re -from typing import Optional - from src.utils.config import config -from src.utils.formatting import iso8601_to_epoch +from src.utils.formatting import iso8601_to_epoch_ms from src.utils.user_profiles import get_user_profiles -from src.utils.workspace import get_workspace_info, get_object_info -import src.es_client as es_client +from src.utils.workspace import get_workspace_info +from src.exceptions import NoAccessGroupError, NoUserProfileError + +# TODO: The structure of the ES docs and of the API's result +# data should be documented in detail outside of this file, with a +# reference here. -# Mappings from search2 document fields to search1 fields: -_KEY_MAPPING = { +# Mappings from search2 document fields to search1 fields. +_GLOBAL_DOC_KEY_MAPPING = { 'obj_name': 'object_name', - 'access_group': 'access_group', - 'obj_id': 'obj_id', - 'version': 'version', - 'timestamp': 'timestamp', - 'obj_type_name': 'type', - # 'obj_type_version': 'type_ver', - 'creator': 'creator' + 'access_group': 'workspace_id', + 'obj_id': 'object_id', + 'version': 'object_version', + 'obj_type_module': 'workspace_type_module', + 'obj_type_name': 'workspace_type_name', + 'obj_type_version': 'workspace_type_version', + 'timestamp': 'modified_at' } +# These keys are copied over literally without renaming +# keys or transformation +_GLOBAL_DOC_KEY_COPYING = [ + 'creator', + 'copied' +] + +# These keys are copied from the result "hit", not "hit.doc" as +# above. +_GLOBAL_HIT_KEY_COPYING = [ + 'id' +] + +# These keys are to be neither mapped nor copied, but when copying the rest of the +# doc fields into the data field, should be excluded, or omitted. +_GLOBAL_DOC_KEY_EXCLUSION = [ + 'is_public', + 'shared_users', + 'tags', + 'index_runner_ver' +] -def search_objects(params: dict, results: dict, meta: dict): +# Similar to excluded fields, these fields are transformed and copied in code below +# (see the "# Transforms" comment) and should be ignored when copying into the data field. +_GLOBAL_DOC_KEY_TRANSFORMS = [ + 'creation_date' +] + + +def search_objects(params: dict, results: dict, ctx: dict): """ Convert Elasticsearch results into the RPC results conforming to the "search_objects" method @@ -34,30 +63,33 @@ def search_objects(params: dict, results: dict, meta: dict): 'search_time': results['search_time'], 'objects': objects, } - _add_access_group_info(ret, results, meta, post_processing) - _add_objects_and_info(ret, results, meta, post_processing) + _add_access_group_info(ret, results, ctx, post_processing) + _add_objects_and_info(ret, results, ctx, post_processing) + return ret -def search_types(params, results, meta): +def search_types(results: dict): """ - Convert Elasticsearch results into RPC results conforming to the spec for - the "search_types" method. + Convert Elasticsearch results into RPC results conforming to the + "search_types" method. """ - # Now we need to convert the ES result format into the API format + # Convert the ES result format into the API format search_time = results['search_time'] - buckets = results['aggregations']['type_count']['counts'] - counts_dict = {} # type: dict - for count_obj in buckets: - counts_dict[count_obj['key']] = counts_dict.get(count_obj['key'], 0) - counts_dict[count_obj['key']] += count_obj['count'] + type_counts = results['aggregations']['type_count']['counts'] + type_to_count = {} # type: dict + + for type_count in type_counts: + key = type_count['key'] + count = type_count['count'] + type_to_count[key] = count return { - 'type_to_count': counts_dict, + 'type_to_count': type_to_count, 'search_time': int(search_time) } -def get_objects(params, results, meta): +def get_objects(params, results, ctx): """ Convert Elasticsearch results into RPC results conforming to the spec for the "get_objects" method. @@ -66,8 +98,8 @@ def get_objects(params, results, meta): ret = { 'search_time': results['search_time'], } - _add_access_group_info(ret, results, meta, post_processing) - _add_objects_and_info(ret, results, meta, post_processing) + _add_access_group_info(ret, results, ctx, post_processing) + _add_objects_and_info(ret, results, ctx, post_processing) return ret @@ -84,24 +116,21 @@ def _get_post_processing(params: dict) -> dict: return pp -def _add_objects_and_info(ret: dict, search_results: dict, meta: dict, post_processing: dict): +def _add_objects_and_info(ret: dict, search_results: dict, ctx: dict, post_processing: dict): """ - Populate the fields for `objects` and `objects_info`. + Populate the fields for `objects`. Args: ret: final method result object (mutated) search_results: return value from es_client.query.search - meta: RPC meta object (contains auth token) + ctx: RPC context object (contains auth token) post_processing: some query options pulled from the RPC method params """ objects = _get_object_data_from_search_results(search_results, post_processing) ret['objects'] = objects - infos = _get_object_infos(objects, meta) - if infos is not None: - ret['objects_info'] = infos -def _add_access_group_info(ret: dict, search_results: dict, meta: dict, post_processing: dict): +def _add_access_group_info(ret: dict, search_results: dict, ctx: dict, post_processing: dict): """ Populate the fields for `access_group_narrative_info` and/or `access_groups_info` depending on keys from the `post_processing` field. @@ -110,85 +139,82 @@ def _add_access_group_info(ret: dict, search_results: dict, meta: dict, post_pro Args: ret: final method result object (mutated) search_results: return value from es_client.query.search - meta: RPC meta object (contains auth token) + ctx: RPC context object (contains auth token) post_processing: some query options pulled from the RPC method params """ fetch_narratives = post_processing.get('add_narrative_info') == 1 fetch_ws_infos = post_processing.get('add_access_group_info') == 1 if fetch_narratives or fetch_ws_infos: - (ws_infos, narrative_infos) = _fetch_narrative_info(search_results, meta) + (ws_infos, narrative_infos) = _fetch_narrative_info(search_results, ctx) if fetch_narratives: ret['access_group_narrative_info'] = narrative_infos if fetch_ws_infos: ret['access_groups_info'] = ws_infos -def _fetch_narrative_info(results, meta): +def _fetch_narrative_info(es_result, ctx): """ - For each result object, we construct a single bulk query to ES that fetches - the narrative data. We then construct that data into a "narrative_info" - tuple, which contains: (narrative_name, object_id, time_last_saved, - owner_username, owner_displayname) Returns a dictionary of workspace_id - mapped to the narrative_info tuple above. + Returns a to mappings of workspaces, each keyed on the workspace id: + - a subset of workspace info as returned by the workspace: + (id, name, owner, save_date, max_objid, user_perm, global_perm, + lockstat, metadata) + - a subset of narrative info for workspaces which are narratives, a tuple + of selected values: + (narrative title, object id, workspace modification timestamp, + owner username, owner realname) + + The reason for the duplication is historical, not intentional design. + One day we will rectify this. - This also returns a dictionary of workspace infos for each object: (id, name, owner, save_date, max_objid, user_perm, global_perm, lockstat, metadata) """ - hit_docs = [hit['doc'] for hit in results['hits']] - workspace_ids = [] + hit_docs = [hit['doc'] for hit in es_result['hits']] + workspace_ids = set() ws_infos = {} owners = set() + + # Get workspace info for all unique workspaces in the search + # results for hit_doc in hit_docs: if 'access_group' not in hit_doc: - continue + raise NoAccessGroupError() workspace_id = hit_doc['access_group'] - workspace_ids.append(workspace_id) - workspace_info = get_workspace_info(workspace_id, meta['auth']) + workspace_ids.add(workspace_id) + + if len(workspace_ids) == 0: + return {}, {} + + for workspace_id in workspace_ids: + workspace_info = get_workspace_info(workspace_id, ctx['auth']) if len(workspace_info) > 2: owners.add(workspace_info[2]) ws_infos[str(workspace_id)] = workspace_info - if len(workspace_ids) == 0: - return ({}, {}) - # Get profile for all owners - user_profiles = get_user_profiles(list(owners), meta['auth']) - user_profile_map = {profile['user']['username']: profile for profile in user_profiles} - narrative_index_name = config['global']['ws_type_to_indexes']['KBaseNarrative.Narrative'] - # TODO move this code into es_client.fetch_narratives - # ES query params - search_params: dict = { - 'indexes': [narrative_index_name], - 'size': len(workspace_ids) - } - # Filter by workspace ID - matches = [ - {'match': {'access_group': wsid}} - for wsid in workspace_ids - ] - search_params['query'] = { - 'bool': {'should': matches} - } - # Make the query for narratives on ES - search_results = es_client.search(search_params, meta) + + # Get profile for all owners in the search results + owner_list = list(owners) + user_profiles = get_user_profiles(owner_list, ctx['auth']) + user_profile_map = {} + for index, profile in enumerate(user_profiles): + if profile is None: + raise NoUserProfileError(owner_list[index]) + username = profile['user']['username'] + user_profile_map[username] = profile + # Get all the source document objects for each narrative result - narrative_hits = [hit['doc'] for hit in search_results['hits']] narr_infos = {} - for narr in narrative_hits: - _id = narr['access_group'] - if _id not in ws_infos: - continue - [workspace_id, workspace_name, owner, moddate, - max_objid, user_permission, global_permission, - lockstat, ws_metadata] = ws_infos[str(_id)] - if owner in user_profile_map: - # See type in legacy-schema.yaml/narrativeInfo - narr_infos[str(_id)] = [ - narr.get('narrative_title', ''), - narr.get('obj_id'), - iso8601_to_epoch(moddate), # Save date as an epoch + for ws_info in ws_infos.values(): + [workspace_id, _, owner, moddate, _, _, _, _, ws_metadata] = ws_info + user_profile = user_profile_map.get(owner) + real_name = user_profile['user']['realname'] + if 'narrative' in ws_metadata: + narr_infos[str(workspace_id)] = [ + ws_metadata.get('narrative_nice_name', ''), + int(ws_metadata.get('narrative')), + iso8601_to_epoch_ms(moddate), owner, - user_profile_map[owner]['user']['realname'], + real_name ] - return (ws_infos, narr_infos) + return ws_infos, narr_infos def _get_object_data_from_search_results(search_results, post_processing): @@ -199,77 +225,87 @@ def _get_object_data_from_search_results(search_results, post_processing): """ # TODO post_processing/skip_info,skip_keys,skip_data -- look at results in current api # TODO post_processing/ids_only -- look at results in current api + object_data = [] # type: list # Keys found in every ws object - for result in search_results['hits']: - source = result['doc'] + for hit in search_results['hits']: + doc = hit['doc'] obj: dict = {} - for (search2_key, search1_key) in _KEY_MAPPING.items(): - obj[search1_key] = source.get(search2_key) - # The nested 'data' is all object-specific, so exclude all global keys - obj_data = {key: source[key] for key in source if key not in _KEY_MAPPING} - if post_processing.get('skip_data') != 1: - obj['data'] = obj_data - if post_processing.get('skip_keys') != 1: - obj['key_props'] = obj_data - obj['guid'] = _get_guid_from_doc(result) - obj['kbase_id'] = obj['guid'].strip('WS:') - idx_pieces = result['index'].split(config['prefix_delimiter']) + + # Copy fields from the "hit" to the result "object". + for key in _GLOBAL_HIT_KEY_COPYING: + obj[key] = hit.get(key) + + # Simple key mapping from the doc to the object. + # The mapping transforms the raw keys from the ES result into + # friendlier keys expected by the API. + # Defined at top of file. + global_doc_keys = [] + for (search2_key, search1_key) in _GLOBAL_DOC_KEY_MAPPING.items(): + global_doc_keys.append(search2_key) + obj[search1_key] = doc.get(search2_key) + + # Even simpler key mapping - no key substitution + for key in _GLOBAL_DOC_KEY_COPYING: + global_doc_keys.append(key) + obj[key] = doc.get(key) + + # Transforms + obj['created_at'] = iso8601_to_epoch_ms((doc['creation_date'])) + + # The index name from the external pov is unqualified and + # unversioned; it is equivalent to the index alias, and + # symmetric with any parameters which limit searches by + # index. + # The form of object indexes is: + # NAMESPACE.INDEXNAME_VERSION + # (why different separators for prefix and suffix?) + # e.g. search2.genome_2 + # We are interested in the INDEXNAME and VERSION, + # although there is no need for clients to know the version + # it may be useful for diagnostics. + idx_pieces = hit['index'].split(config['suffix_delimiter']) idx_name = idx_pieces[0] + + # TODO: we should not default to 0, but rather raise an + # error. All indexes involved should be namespaced. idx_ver = int(idx_pieces[1] or 0) if len(idx_pieces) == 2 else 0 - # Set to a string obj['index_name'] = idx_name - obj['type_ver'] = idx_ver - # For the UI, make the type field "GenomeFeature" instead of "Genome". - if 'genome_feature_type' in source: - obj['type'] = 'GenomeFeature' - # Set defaults for required fields in objects/data - # Set some more top-level data manually that we use in the UI - if post_processing.get('include_highlight') == 1: - highlight = result.get('highlight', {}) - transformed_highlight = {} - for key, value in highlight.items(): - transformed_highlight[_KEY_MAPPING.get(key, key)] = value - obj['highlight'] = transformed_highlight + obj['index_version'] = idx_ver + + # Funny Business # Always set object_name as a string type + # TODO: how can this ever be missing? It is simply impossible, every + # object has a name and a type. obj['object_name'] = obj.get('object_name') or '' - obj['type'] = obj.get('type') or '' - object_data.append(obj) - return object_data + obj['workspace_type_name'] = obj.get('workspace_type_name') or '' + # The nested 'data' is all object-specific, so exclude all global keys + # The indexed doc mixes global keys and index-specific ones. + # The search1 api separated them, so this transformation respects that. + obj_data = {key: doc[key] for key in doc if key not in global_doc_keys + and key not in _GLOBAL_DOC_KEY_EXCLUSION + and key not in _GLOBAL_DOC_KEY_TRANSFORMS} -def _get_object_infos(objects: list, meta: dict) -> Optional[dict]: - """ - Args: - objects: results from _get_object_data_from_search_results - post_processing: The field pulled from the RPC params - meta: rpc meta object with 'auth' key - """ - if len(objects) == 0: - return None - refs = {obj['kbase_id'] for obj in objects} - infos = get_object_info(refs, meta['auth']) - return { - f"{info[6]}/{info[0]}/{info[4]}": info - for info in infos - } + if post_processing.get('skip_data') != 1: + obj['data'] = obj_data + # Highlights are mappings of key to a formatted string + # derived from the field with "hit" terms highlighted with + # html. + # These fields may be any field in the indexed doc, which + # mixes global and index-specific fields. + # We need to transform the keys, if the GLOBAL_KEY_MAPPING + # so deems; otherwise we use the keys directly. + # TODO: improvements needed here; not all search terms are highlighted + # as a result of this transform, which results in a confusing message + # on the front end. + if post_processing.get('include_highlight') == 1: + highlight = hit.get('highlight', {}) + transformed_highlight = {} + for key, value in highlight.items(): + transformed_highlight[_GLOBAL_DOC_KEY_MAPPING.get(key, key)] = value + obj['highlight'] = transformed_highlight -def _get_guid_from_doc(doc): - """ - Convert from our guid format 'WS::1:2:3' into the legacy format 'WS:1/2/3' - """ - # TODO this only works on the WS namespace should take into account the - # namespace name - # Remove the first namespace - _id = doc['id'].replace('WS::', '') - # Remove any secondary namespace - _id = re.sub(r'::..::.+', '', _id) - # Replace colon delimiters with slashes - _id = _id.replace(':', '/') - # Add a single-colon delimited workspace namespace - _id = 'WS:' + _id - # Append the object version - ver = str(doc.get('obj_type_version', 1)) - _id = _id + '/' + ver - return _id + object_data.append(obj) + return object_data diff --git a/src/search1_rpc/errors.py b/src/search1_rpc/errors.py new file mode 100644 index 0000000..1e9ecf3 --- /dev/null +++ b/src/search1_rpc/errors.py @@ -0,0 +1,75 @@ +from jsonrpc11base.errors import APIError +from src import exceptions + + +class UnknownTypeError(APIError): + code = 1000 + message = 'Unknown type' + + def __init__(self, message): + self.error = { + 'message': message + } + + +class AuthorizationError(APIError): + code = 2000 + message = 'Auth error' + + def __init__(self, message): + self.error = { + 'message': message + } + + +class UnknownIndexError(APIError): + code = 3000 + message = 'Unknown index' + + def __init__(self, message): + self.error = { + 'message': message + } + + +class ElasticsearchServerError(APIError): + code = 4000 + message = 'Elasticsearch server error' + + def __init__(self, message): + self.error = { + 'message': message + } + +# def __init__(self, url, resp_text): +# msg = f"User profile service error:\nResponse: {resp_text}\nURL: {url}" +# super().__init__(code=-32004, message=msg) + + +class UserProfileServiceError(APIError): + code = 50000 + message = 'User profile service error' + + def __init__(self, url, resp_text): + self.error = { + 'url': url, + 'resp_text': resp_text + } + + def __str__(self): + return f"{self.message}\nResponse: {self.error['resp_text']}\nURL: {self.error['url']}" + + +def trap_error(fun): + try: + return fun() + except exceptions.UnknownType as ut: + raise UnknownTypeError(ut.message) + except exceptions.AuthError as ae: + raise AuthorizationError(ae.message) + except exceptions.ElasticsearchError as ee: + raise ElasticsearchServerError(ee.message) + except exceptions.UnknownIndex as ue: + raise UnknownIndexError(ue.message) + except exceptions.UserProfileError as upe: + raise UserProfileServiceError(upe.url, upe.resp_text) diff --git a/src/search1_rpc/schemas/KBaseSearchEngine.get_objects.params.json b/src/search1_rpc/schemas/KBaseSearchEngine.get_objects.params.json new file mode 100644 index 0000000..27876a3 --- /dev/null +++ b/src/search1_rpc/schemas/KBaseSearchEngine.get_objects.params.json @@ -0,0 +1,24 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "array", + "minItems": 1, + "maxItems": 1, + "items": { + "type": "object", + "required": [ + "ids" + ], + "additionalProperties": false, + "properties": { + "ids": { + "type": "array", + "items": { + "type": "string" + } + }, + "post_processing": { + "$ref": "types.json#/definitions/postProcessing" + } + } + } +} \ No newline at end of file diff --git a/src/search1_rpc/schemas/KBaseSearchEngine.get_objects.result.json b/src/search1_rpc/schemas/KBaseSearchEngine.get_objects.result.json new file mode 100644 index 0000000..0d497a1 --- /dev/null +++ b/src/search1_rpc/schemas/KBaseSearchEngine.get_objects.result.json @@ -0,0 +1,43 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "array", + "minItems": 1, + "maxItems": 1, + "items": { + "type": "object", + "required": [ + "objects", + "search_time" + ], + "additionalProperties": false, + "properties": { + "objects": { + "type": "array", + "items": { + "$ref": "types.json#/definitions/searchResultHit" + } + }, + "search_time": { + "type": "integer" + }, + "access_groups_info": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[1-9][0-9]*$": { + "$ref": "types.json#/definitions/workspaceInfo" + } + } + }, + "access_group_narrative_info": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[1-9][0-9]*$": { + "$ref": "types.json#/definitions/narrativeInfo" + } + } + } + } + } +} \ No newline at end of file diff --git a/src/search1_rpc/schemas/KBaseSearchEngine.search_objects.params.json b/src/search1_rpc/schemas/KBaseSearchEngine.search_objects.params.json new file mode 100644 index 0000000..b7f5d94 --- /dev/null +++ b/src/search1_rpc/schemas/KBaseSearchEngine.search_objects.params.json @@ -0,0 +1,55 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "array", + "minItems": 1, + "maxItems": 1, + "items": { + "type": "object", + "required": [ + "match_filter" + ], + "additionalProperties": false, + "properties": { + "match_filter": { + "$ref": "types.json#/definitions/matchFilter" + }, + "access_filter": { + "$ref": "types.json#/definitions/accessFilter" + }, + "object_types": { + "type": "array", + "items": { + "type": "string" + } + }, + "pagination": { + "start": { + "type": "integer" + }, + "count": { + "type": "integer" + } + }, + "post_processing": { + "$ref": "types.json#/definitions/postProcessing" + }, + "sorting_rules": { + "type": "array", + "items": { + "type": "object", + "properties": { + "is_object_property": { + "$ref": "types.json#/definitions/sdk_boolean" + }, + "property": { + "type": "string" + }, + "ascending": { + "$ref": "types.json#/definitions/sdk_boolean" + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/search1_rpc/schemas/KBaseSearchEngine.search_objects.result.json b/src/search1_rpc/schemas/KBaseSearchEngine.search_objects.result.json new file mode 100644 index 0000000..e70ae1b --- /dev/null +++ b/src/search1_rpc/schemas/KBaseSearchEngine.search_objects.result.json @@ -0,0 +1,64 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "array", + "minItems": 1, + "maxItems": 1, + "items": { + "type": "object", + "required": [ + "objects", + "total", + "search_time" + ], + "additionalProperties": false, + "properties": { + "objects": { + "type": "array", + "items": { + "$ref": "types.json#/definitions/searchResultHit" + } + }, + "total": { + "type": "integer" + }, + "search_time": { + "type": "integer" + }, + "pagination": { + "type": "object", + "properties": { + "start": { + "type": "integer" + }, + "count": { + "type": "integer" + } + } + }, + "sorting_rules": { + "type": "array", + "items": { + "$ref": "types.json#/definitions/sortingRule" + } + }, + "access_groups_info": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[1-9][0-9]*$": { + "$ref": "types.json#/definitions/workspaceInfo" + } + } + }, + "access_group_narrative_info": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[1-9][0-9]*$": { + "$ref": "types.json#/definitions/narrativeInfo" + } + } + } + } + } +} \ No newline at end of file diff --git a/src/search1_rpc/schemas/KBaseSearchEngine.search_types.params.json b/src/search1_rpc/schemas/KBaseSearchEngine.search_types.params.json new file mode 100644 index 0000000..5e5b432 --- /dev/null +++ b/src/search1_rpc/schemas/KBaseSearchEngine.search_types.params.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "array", + "minItems": 1, + "maxItems": 1, + "items": { + "type": "object", + "required": [ + "match_filter" + ], + "additionalProperties": false, + "properties": { + "object_types": { + "type": "array", + "items": { + "type": "string" + } + }, + "match_filter": { + "$ref": "types.json#/definitions/matchFilter" + }, + "access_filter": { + "$ref": "types.json#/definitions/accessFilter" + } + } + } +} \ No newline at end of file diff --git a/src/search1_rpc/schemas/KBaseSearchEngine.search_types.result.json b/src/search1_rpc/schemas/KBaseSearchEngine.search_types.result.json new file mode 100644 index 0000000..1b35eaf --- /dev/null +++ b/src/search1_rpc/schemas/KBaseSearchEngine.search_types.result.json @@ -0,0 +1,23 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "array", + "minItems": 1, + "maxItems": 1, + "items": { + "title": "Legacy Search Types Method", + "type": "object", + "required": [ + "type_to_count", + "search_time" + ], + "additionalProperties": false, + "properties": { + "type_to_count": { + "$ref": "types.json#/definitions/typeCounts" + }, + "search_time": { + "type": "integer" + } + } + } +} \ No newline at end of file diff --git a/src/search1_rpc/schemas/types.json b/src/search1_rpc/schemas/types.json new file mode 100644 index 0000000..0464a59 --- /dev/null +++ b/src/search1_rpc/schemas/types.json @@ -0,0 +1,634 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Base types for legacy search methods", + "definitions": { + "sdk_boolean": { + "type": "integer", + "enum": [ + 0, + 1 + ] + }, + "tag": { + "type": "string", + "enum": [ + "narrative", + "refdata", + "noindex" + ] + }, + "data": { + "title": "Search result object data", + "type": "object" + }, + "searchResultHit": { + "type": "object", + "title": "Workspace object search result", + "required": [ + "id", + "workspace_id", + "object_id", + "object_version", + "object_name", + "workspace_type_module", + "workspace_type_name", + "workspace_type_version", + "modified_at", + "created_at", + "creator", + "index_name", + "index_version", + "data" + ], + "properties": { + "id": { + "type": "string" + }, + "workspace_id": { + "type": "integer" + }, + "object_id": { + "type": "integer" + }, + "object_version": { + "type": "integer" + }, + "object_name": { + "type": "string" + }, + "workspace_type_module": { + "type": "string" + }, + "workspace_type_name": { + "type": "string" + }, + "workspace_type_version": { + "type": "string" + }, + "modified_at": { + "type": "integer" + }, + "created_at": { + "type": "integer" + }, + "creator": { + "type": "string" + }, + "index_name": { + "type": "string" + }, + "index_version": { + "type": "integer" + }, + "highlight": { + "$ref": "#/definitions/highlight" + }, + "data": { + "$ref": "#/definitions/data" + } + } + }, + "searchResult": { + "type": "object", + "properties": { + "objects": { + "type": "array", + "items": { + "$ref": "#/definitions/searchResultHit" + } + }, + "access_groups_info": { + "type": "object", + "patternProperties": { + "^[1-9][0-9]*$": { + "$ref": "#/definitions/workspaceInfo" + } + } + }, + "access_group_narrative_info": { + "type": "object", + "patternProperties": { + "^[1-9][0-9]*$": { + "$ref": "#/definitions/narrativeInfo" + } + } + }, + "total": { + "type": "integer" + }, + "search_time": { + "type": "integer" + }, + "sortingRules": { + "type": "array", + "items": { + "$ref": "#/definitions/sortingRule" + } + } + } + }, + "sortingRule": { + "type": "object", + "additionalProperties": false, + "required": [ + "property" + ], + "properties": { + "is_object_property": { + "$ref": "#/definitions/sdk_boolean" + }, + "property": { + "type": "string" + }, + "ascending": { + "$ref": "#/definitions/sdk_boolean" + } + } + }, + "narrativeInfo": { + "type": "array", + "items": [{ + "type": "string", + "title": "Narrative Name" + }, + { + "type": "integer", + "title": "Narrative ID" + }, + { + "type": "integer", + "title": "Time Last Saved (Epoch)" + }, + { + "type": "string", + "title": "Owner User Name" + }, + { + "type": "string", + "title": "Owner Display Name" + } + ] + }, + "userBrief": { + "type": "object", + "additionalProperties": false, + "properties": { + "username": { + "type": "string" + }, + "realname": { + "type": "string" + } + } + }, + "workspaceInfo": { + "type": "array", + "items": [{ + "title": "Workspace Id", + "type": "integer" + }, + { + "title": "Workspace Name", + "type": "string" + }, + { + "title": "Owner", + "type": "string" + }, + { + "title": "Save date", + "type": "string" + }, + { + "title": "Maximum object id", + "type": "integer" + }, + { + "title": "User permission", + "type": "string" + }, + { + "title": "Global Permission", + "type": "string" + }, + { + "title": "Lock Status", + "type": "string" + }, + { + "title": "Metadata", + "$ref": "#/definitions/metadata" + } + ] + }, + "metadata": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "workspaceInfoDict": { + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "owner": { + "type": "string" + }, + "modification_date": { + "type": "string" + }, + "max_object_id": { + "type": "integer" + }, + "user_permission": { + "type": "string" + }, + "global_permission": { + "type": "string" + }, + "lock_status": { + "type": "string" + }, + "metadata": { + "$ref": "#/definitions/metadata" + } + } + }, + "objectInfoDict": { + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "version": { + "type": "integer" + }, + "type": { + "type": "string" + }, + "saved_date": { + "type": "string" + }, + "saved_by": { + "type": "string" + }, + "workspace_id": { + "type": "integer" + }, + "checksum": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "metadata": { + "$ref": "#/definitions/metadata" + } + } + }, + "workspaceInfoNice": { + "type": "object", + "additionalProperties": false, + "properties": { + "permission": { + "type": "string" + }, + "is_public": { + "type": "boolean" + }, + "modified_at": { + "type": "integer" + }, + "owner": { + "$ref": "#/definitions/userBrief" + } + } + }, + "highlight": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "workspaceType": { + "type": "string", + "enum": [ + "narrative", + "refdata", + "workspace" + ] + }, + "narrativeWorkspaceInfo": { + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": "string" + } + } + }, + "refdataWorkspaceInfo": { + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": "string" + }, + "source": { + "type": "string" + } + } + }, + "otherWorkspaceInfo": { + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": "string" + } + } + }, + "typeCounts": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Z][a-zA-Z]*$": { + "type": "integer" + } + } + }, + "postProcessing": { + "type": "object", + "additionalProperties": false, + "properties": { + "ids_only": { + "$ref": "#/definitions/sdk_boolean" + }, + "skip_info": { + "$ref": "#/definitions/sdk_boolean" + }, + "skip_keys": { + "$ref": "#/definitions/sdk_boolean" + }, + "skip_data": { + "$ref": "#/definitions/sdk_boolean" + }, + "include_highlight": { + "$ref": "#/definitions/sdk_boolean" + }, + "add_narrative_info": { + "$ref": "#/definitions/sdk_boolean" + }, + "add_access_group_info": { + "$ref": "#/definitions/sdk_boolean" + } + } + }, + "objectRef": { + "type": "object", + "additionalProperties": false, + "properties": { + "ref": { + "type": "string" + }, + "workspace_id": { + "type": "integer" + }, + "object_id": { + "type": "integer" + }, + "version": { + "type": "integer" + } + } + }, + "accessFilter": { + "type": "object", + "allOf": [{ + "not": { + "required": [], + "properties": { + "with_private": { + "const": 0 + }, + "with_public": { + "const": 0 + } + } + } + }, + { + "required": [], + "properties": { + "with_private": { + "$ref": "#/definitions/sdk_boolean" + }, + "with_public": { + "$ref": "#/definitions/sdk_boolean" + } + } + } + ] + }, + "matchFilter": { + "type": "object", + "required": [], + "additionalProperties": false, + "properties": { + "full_text_in_all": { + "type": "string" + }, + "timestamp": { + "$ref": "#/definitions/keyMatchItem" + }, + "lookup_in_keys": { + "$ref": "#/definitions/keyMatch" + }, + "exclude_subobjects": { + "$ref": "#/definitions/sdk_boolean" + }, + "source_tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "source_tags_blacklist": { + "type": "integer" + } + } + }, + "keyMatch": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^\\w+$": { + "$ref": "#/definitions/keyMatchItem" + } + } + }, + "keyMatchItem": { + "oneOf": [{ + "type": "object", + "additionalProperties": false, + "required": [ + "value" + ], + "properties": { + "value": { + "type": "string" + } + } + }, + { + "type": "object", + "additionalProperties": false, + "required": [ + "string_value" + ], + "properties": { + "string_value": { + "type": "string" + } + } + }, + { + "type": "object", + "additionalProperties": false, + "required": [ + "int_value" + ], + "properties": { + "int_value": { + "type": "integer" + } + } + }, + { + "type": "object", + "additionalProperties": false, + "required": [ + "double_value" + ], + "properties": { + "double_value": { + "type": "number" + } + } + }, + { + "type": "object", + "additionalProperties": false, + "required": [ + "bool_value" + ], + "properties": { + "bool_value": { + "type": "boolean" + } + } + }, + { + "type": "object", + "additionalProperties": false, + "anyOf": [{ + "required": [ + "min_int" + ] + }, + { + "required": [ + "max_int" + ] + }, + { + "required": [ + "min_int", + "max_int" + ] + } + ], + "properties": { + "min_int": { + "type": "integer" + }, + "max_int": { + "type": "integer" + } + } + }, + { + "type": "object", + "additionalProperties": false, + "anyOf": [{ + "required": [ + "min_double" + ] + }, + { + "required": [ + "max_double" + ] + }, + { + "required": [ + "min_double", + "max_double" + ] + } + ], + "properties": { + "min_double": { + "type": "number" + }, + "max_double": { + "type": "number" + } + } + }, + { + "type": "object", + "additionalProperties": false, + "anyOf": [{ + "required": [ + "min_date" + ] + }, + { + "required": [ + "max_date" + ] + }, + { + "required": [ + "min_date", + "max_date" + ] + } + ], + "properties": { + "min_boolean": { + "type": "boolean" + }, + "max_boolean": { + "type": "boolean" + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/src/search1_rpc/service.py b/src/search1_rpc/service.py index d3e4a78..2abdf27 100644 --- a/src/search1_rpc/service.py +++ b/src/search1_rpc/service.py @@ -1,5 +1,5 @@ """ -JSON-RPC 2.0 service for the legacy API +JSON-RPC 1.1 service for the legacy API All methods follow this workflow: - convert RPC params into Elasticsearch query @@ -10,23 +10,37 @@ - search1_conversion.convert_params - es_client.search - search1_conversion.convert_result + +Note that the methods implement KBase's convention for JSON-RPC 1.1, +in which the request `params` are an array of one element, usually containing +an object, each property of which is considered a `param`, and in which the +results are also wrapped in an array of one element. """ -import jsonrpcbase -import time -from src.es_client import search +import time +import os +from jsonrpc11base import JSONRPCService +from jsonrpc11base.service_description import ServiceDescription +from src.es_client.query import search from src.search1_conversion import convert_params, convert_result -from src.utils.config import config from src.utils.logger import logger +from src.search1_rpc.errors import trap_error + +# A JSON-RPC 1.1 service description +description = ServiceDescription( + 'The KBase Legacy Search API', + 'https://github.com/kbase/search_api2/src/search1_rpc/schemas', + summary='This is the legacy search interface to the KBase search service', + version='1.0' +) + +SCHEMA_DIR = os.path.join(os.path.dirname(__file__), 'schemas') -service = jsonrpcbase.JSONRPCService( - info={ - 'title': 'Search API (legacy endpoints)', - 'description': 'Search endpoints for the legacy API', - 'version': config['app_version'], - }, - schema='legacy-schema.yaml', - development=config['dev'], +service = JSONRPCService( + description=description, + schema_dir=SCHEMA_DIR, + validate_params=True, + validate_result=True ) @@ -36,33 +50,31 @@ def get_objects(params, meta): params = params[0] start = time.time() query = convert_params.get_objects(params) - result = convert_result.get_objects(params, search(query, meta), meta) + search_result = trap_error(lambda: search(query, meta)) + result = convert_result.get_objects(params, search_result, meta) logger.debug(f'Finished get_objects in {time.time() - start}s') - # KBase convention is to return result in a singleton list return [result] def search_objects(params, meta): - # KBase convention is to wrap params in an array if isinstance(params, list) and len(params) == 1: params = params[0] start = time.time() query = convert_params.search_objects(params) - result = convert_result.search_objects(params, search(query, meta), meta) + search_result = trap_error(lambda: search(query, meta)) + result = convert_result.search_objects(params, search_result, meta) logger.debug(f'Finished search_objects in {time.time() - start}s') - # KBase convention is to return result in a singleton list return [result] def search_types(params, meta): - # KBase convention is to wrap params in an array if isinstance(params, list) and len(params) == 1: params = params[0] start = time.time() query = convert_params.search_types(params) - result = convert_result.search_types(params, search(query, meta), meta) + search_result = trap_error(lambda: search(query, meta)) + result = convert_result.search_types(search_result) logger.debug(f'Finished search_types in {time.time() - start}s') - # KBase convention is to return result in a singleton list return [result] diff --git a/src/search2_conversion/convert_params.py b/src/search2_conversion/convert_params.py index b35249d..c29aee4 100644 --- a/src/search2_conversion/convert_params.py +++ b/src/search2_conversion/convert_params.py @@ -39,7 +39,7 @@ def search_workspace(params, meta): if "search" in params: # Elasticsearch Simple Query String query = params["search"]["query"] - fields = params["search"]["fields"] + fields = params["search"].get("fields", ['agg_fields']) converted["query"]["bool"]["must"].append({ "simple_query_string": { "fields": fields, @@ -50,6 +50,9 @@ def search_workspace(params, meta): if "filters" in params: converted_query = _convert_filters(params['filters']) converted["query"]["bool"]["must"].append(converted_query) + paging = params.get('paging', {}) + converted['from'] = paging.get('offset', 0) + converted['size'] = paging.get('length', 10) return converted diff --git a/src/search2_rpc/service.py b/src/search2_rpc/service.py index 3a7dbfb..0a62c5d 100644 --- a/src/search2_rpc/service.py +++ b/src/search2_rpc/service.py @@ -50,6 +50,10 @@ def show_config(params, meta): """ Display publicly readable configuration settings for this server. Be sure to add new entries here explicitly so that nothing is shown unintentionally. + + Hidden keys: + - index_prefix_delimiter: should always be "."; should never be overridden + - index_suffix_delimiter: should always be "_"; should never be overridden """ exposed_keys = ['dev', 'elasticsearch_url', 'workspace_url', 'index_prefix', 'global', 'workers', 'user_profile_url'] diff --git a/src/server/__main__.py b/src/server/__main__.py index 539abaf..f30a9a4 100644 --- a/src/server/__main__.py +++ b/src/server/__main__.py @@ -1,17 +1,29 @@ """The main entrypoint for running the Flask server.""" import json import sanic +import time import traceback from src.search1_rpc import service as legacy_service from src.search2_rpc import service as rpc_service from src.utils.config import config from src.utils.logger import logger +from src.utils.obj_utils import get_path from src.utils.wait_for_service import wait_for_service app = sanic.Sanic(name='search2') +# Mapping of JSON-RPC status code to HTTP response status +_ERR_STATUS = { + -32000: 500, # Server error + -34001: 401, # Unauthorized + -32005: 404, # Type not found + -32002: 404, # Index not found +} + +# TODO: services should not implement CORS - it should be handled +# by the services proxy @app.middleware('request') async def cors_options(request): """Handle a CORS OPTIONS request.""" @@ -31,17 +43,23 @@ async def root(request): """Handle JSON RPC methods.""" auth = request.headers.get('Authorization') body = _convert_rpc_formats(request.body) - result = rpc_service.call(body, {'auth': auth}) - return sanic.response.text(result, content_type='application/json') + result = rpc_service.call_py(body, {'auth': auth}) + status = _get_status_code(result) + return sanic.response.json(result, status=status) -@app.route('/legacy', methods=['POST', 'GET', 'OPTIONS']) +@app.route('/legacy', methods=None) async def legacy(request): """Handle legacy-formatted requests that are intended for the previous Java api.""" + # Manually handle these, so as not to inflame sanic into handling + # unhandled method errors. + if request.method != 'POST': + return sanic.response.raw(b'', status=405) auth = request.headers.get('Authorization') - body = _convert_rpc_formats(request.body) - result = legacy_service.call(body, {'auth': auth}) - return sanic.response.text(result, content_type='application/json') + result = legacy_service.call(request.body, {'auth': auth}) + return sanic.response.raw( + bytes(result, 'utf-8'), + headers={'content-type': 'application/json'}) @app.middleware('response') @@ -63,6 +81,8 @@ async def any_exception(request, err): Handle any unexpected server error. Theoretically, this should never be reached. JSONRPCBase will handle method error responses. + TODO: This assumes JSON-RPC 2.0 for all calls handled by this server; + yet the legacy api is JSON-RPC 1.1. """ traceback.print_exc() return sanic.response.json({ @@ -97,8 +117,21 @@ def _convert_rpc_formats(body: str): if 'version' not in data and 'jsonrpc' not in data: data['jsonrpc'] = '2.0' if 'id' not in data: - data['id'] = '0' - return json.dumps(data) + data['id'] = int(time.time() * 1000) + return data + + +def _get_status_code(result: dict) -> int: + """ + Create an HTTP status code from a JSON-RPC response + Technically, JSON-RPC could ignore HTTP status codes. But for the sake of + usability and convenience, we return non-2xx status codes when there is an + error. + """ + error_code = get_path(result, ['error', 'code']) + if error_code is not None: + return _ERR_STATUS.get(error_code, 400) + return 200 # Wait for dependencies to start diff --git a/src/utils/config.py b/src/utils/config.py index f1f8f6d..f5bae4f 100644 --- a/src/utils/config.py +++ b/src/utils/config.py @@ -7,18 +7,23 @@ def init_config(): """ Initialize configuration data for the whole app """ + # TODO: it might be better to NOT default to testing configuration, + # but rather explicitly set the test environment. + # Reason? A failure to configure one of these in prod could lead to + # confusing failure conditions. ws_url = os.environ.get('WORKSPACE_URL', 'https://ci.kbase.us/services/ws').strip('/') es_url = os.environ.get('ELASTICSEARCH_URL', 'http://localhost:9200').strip('/') index_prefix = os.environ.get('INDEX_PREFIX', 'test') - prefix_delimiter = os.environ.get('INDEX_PREFIX_DELIMITER', '_') + prefix_delimiter = os.environ.get('INDEX_PREFIX_DELIMITER', '.') + suffix_delimiter = os.environ.get('INDEX_SUFFIX_DELIMITER', '_') config_url = os.environ.get( 'GLOBAL_CONFIG_URL', 'https://github.com/kbase/index_runner_spec/releases/latest/download/config.yaml' ) user_profile_url = os.environ.get( 'USER_PROFILE_URL', - 'https://ci.kbase.us/services/user_profile/rpc' - ).strip('/') + 'https://ci.kbase.us/services/user_profile/rpc/' + ) # Load the global configuration release (non-environment specific, public config) allowed_protocols = ('https://', 'http://', 'file://') matches_protocol = (config_url.startswith(prot) for prot in allowed_protocols) @@ -34,6 +39,7 @@ def init_config(): 'elasticsearch_url': es_url, 'index_prefix': index_prefix, 'prefix_delimiter': prefix_delimiter, + 'suffix_delimiter': suffix_delimiter, 'workspace_url': ws_url, 'user_profile_url': user_profile_url, 'workers': int(os.environ.get('WORKERS', 8)), diff --git a/src/utils/formatting.py b/src/utils/formatting.py index cbe955b..bafec12 100644 --- a/src/utils/formatting.py +++ b/src/utils/formatting.py @@ -1,5 +1,5 @@ from datetime import datetime -def iso8601_to_epoch(time_string): - return round(datetime.strptime(time_string, '%Y-%m-%dT%H:%M:%S%z').timestamp()) +def iso8601_to_epoch_ms(time_string): + return round(datetime.strptime(time_string, '%Y-%m-%dT%H:%M:%S%z').timestamp() * 1000) diff --git a/src/utils/logger.py b/src/utils/logger.py index 17c64c3..37e63fa 100644 --- a/src/utils/logger.py +++ b/src/utils/logger.py @@ -8,11 +8,11 @@ def init_logger(): Initialize log settings. Mutates the `logger` object. """ logging.getLogger("urllib3").setLevel(logging.WARNING) - logger = logging.getLogger('search2') + search2_logger = logging.getLogger('search2') # Set the log level level = os.environ.get('LOGLEVEL', 'DEBUG').upper() - logger.setLevel(level) - logger.propagate = False # Don't print duplicate messages + search2_logger.setLevel(level) + search2_logger.propagate = False # Don't print duplicate messages logging.basicConfig(level=level) # Create the formatter fmt = "%(asctime)s %(levelname)-8s %(message)s (%(filename)s:%(lineno)s)" @@ -21,9 +21,17 @@ def init_logger(): # Stdout stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(formatter) - logger.addHandler(stdout_handler) - print(f'Logger and level: {logger}') - return logger + search2_logger.addHandler(stdout_handler) + print(f'Logger and level: {search2_logger}') + print(f''' +** To see more or less logging information, adjust the +** log level by setting the LOGLEVEL environment variable +** to one of: +** CRITICAL ERROR WARNING INFO DEBUG NOTSET +** It is currently set to: +** {level} +''') + return search2_logger logger = init_logger() diff --git a/src/utils/user_profiles.py b/src/utils/user_profiles.py index 9ffe2e9..cc2b894 100644 --- a/src/utils/user_profiles.py +++ b/src/utils/user_profiles.py @@ -5,13 +5,11 @@ from src.exceptions import UserProfileError -def get_user_profiles(usernames: list, auth_token): +def get_user_profiles(usernames: list, auth_token=None): """ Get a list of workspace IDs that the given username is allowed to access in the workspace. """ - if not auth_token: - return [] # anonymous users url = config['user_profile_url'] # TODO session cache this # Make a request to the workspace using the user's auth token to find their readable workspace IDs @@ -20,7 +18,9 @@ def get_user_profiles(usernames: list, auth_token): 'version': '1.1', 'params': [usernames] } - headers = {'Authorization': auth_token} + headers = {} + if auth_token is not None: + headers['Authorization'] = auth_token resp = requests.post( url=url, data=json.dumps(payload), diff --git a/src/utils/wait_for_service.py b/src/utils/wait_for_service.py index b783e6a..6eff310 100644 --- a/src/utils/wait_for_service.py +++ b/src/utils/wait_for_service.py @@ -3,18 +3,22 @@ from src.utils.logger import logger +DEFAULT_TIMEOUT = 180 +WAIT_POLL_INTERVAL = 5 -def wait_for_service(url, name, timeout=180): + +def wait_for_service(url, name, timeout=DEFAULT_TIMEOUT): start = time.time() while True: logger.info(f'Attempting to connect to {name} at {url}') try: - requests.get(url).raise_for_status() + requests.get(url, timeout=timeout).raise_for_status() logger.info(f'{name} is online!') break except Exception: logger.info(f'Waiting for {name} at {url}') - if time.time() - start > timeout: - logger.error(f'Unable to connect to {name} at {url}') + total_elapsed = time.time() - start + if total_elapsed > timeout: + logger.error(f'Unable to connect to {name} at {url} after {total_elapsed} seconds') exit(1) - time.sleep(5) + time.sleep(WAIT_POLL_INTERVAL) diff --git a/src/utils/workspace.py b/src/utils/workspace.py index 2a3b57a..754096d 100644 --- a/src/utils/workspace.py +++ b/src/utils/workspace.py @@ -6,47 +6,46 @@ from typing import Optional from src.utils.config import config -from src.exceptions import ResponseError +from src.exceptions import AuthError -def ws_auth(auth_token): +def ws_auth(auth_token, only_public=False, only_private=False): """ Get a list of workspace IDs that the given username is allowed to access in the workspace. """ - if not auth_token: - return [] # anonymous users # TODO session cache this # Make a request to the workspace using the user's auth token to find their # readable workspace IDs - params = {'perm': 'r'} + params = { + 'perm': 'r' + } + + if only_public: + if only_private: + raise Exception('Only one of "only_public" or "only_private" may be set') + params['onlyGlobal'] = 1 + params['excludeGlobal'] = 0 + elif only_private: + params['onlyGlobal'] = 0 + params['excludeGlobal'] = 1 + else: + params['onlyGlobal'] = 0 + params['excludeGlobal'] = 0 + result = _req('list_workspace_ids', params, auth_token) - return result['workspaces'] + return result.get('workspaces', []) + result.get('pub', []) -def get_workspace_info(workspace_id, auth_token): +def get_workspace_info(workspace_id, auth_token=None): """ - Given a list of workspace ids, return the associated workspace info for each one + Given a workspace id, return the associated workspace info """ - if not auth_token: - # TODO are we sure we want this? Doesn't make a lot of sense - return [] # anonymous users # TODO session cache this - # Make a request to the workspace using the user's auth token to find their - # readable workspace IDs params = {'id': workspace_id} return _req('get_workspace_info', params, auth_token) -def get_object_info(refs, token=None): - params = { - 'includeMetadata': 1, - 'objects': [{'ref': ref} for ref in refs], - } - result = _req('get_object_info3', params, token) - return result['infos'] - - def _req(method: str, params: dict, token: Optional[str]): """Make a generic workspace http/rpc request""" payload = { @@ -55,18 +54,23 @@ def _req(method: str, params: dict, token: Optional[str]): 'id': 0, 'params': [params], } - headers = {'Authorization': token} + + headers = {} + if token is not None: + headers['Authorization'] = token + resp = requests.post( url=config['workspace_url'], headers=headers, data=json.dumps(payload), ) + resp_json = None result = None try: result = resp.json().get('result') + resp_json = resp.json() except json.decoder.JSONDecodeError: pass if not resp.ok or not result or len(result) == 0: - msg = f"Authorization failed with response:\n{resp.text}" - raise ResponseError(code=-32001, message=msg, status=403) + raise AuthError(resp_json, resp.text) return result[0] diff --git a/tests/helpers/common.py b/tests/helpers/common.py new file mode 100644 index 0000000..a3174ca --- /dev/null +++ b/tests/helpers/common.py @@ -0,0 +1,94 @@ +import time + + +def wait_for_line(fname, predicate, timeout=10, line_count=1): + f = open(fname, 'r') + started = time.time() + times = 0 + while True: + if (time.time() - started) > timeout: + f.close() + return False + line = f.readline() + if not line or not line.endswith('\n'): + time.sleep(0.1) + continue + if predicate(line): + times = times + 1 + if times >= line_count: + f.close() + return True + + +def assert_jsonrpc20_result(actual, expected): + assert actual['jsonrpc'] == '2.0' + assert actual['id'] == expected['id'] + assert 'result' in actual + result = actual['result'] + assert isinstance(result, list) + assert 'error' not in actual + return result + + +def assert_jsonrpc20_error(actual, expected): + assert actual['jsonrpc'] == '2.0' + assert actual['id'] == expected['id'] + assert 'result' not in actual + assert 'error' in actual + error = actual['error'] + assert isinstance(error, dict) + return error + + +def assert_jsonrpc11_result(actual, expected): + assert actual['version'] == '1.1' + if 'id' in actual: + assert actual['id'] == expected['id'] + assert 'result' in actual + result = actual['result'] + assert isinstance(result, list) + assert 'error' not in actual + assert len(result) == 1 + return result[0] + + +def assert_jsonrpc11_error(actual, expected): + assert actual['version'] == '1.1' + if 'id' in actual: + assert actual['id'] == expected['id'] + assert 'result' not in actual + assert 'error' in actual + error = actual['error'] + assert isinstance(error, dict) + return error + + +def equal(d1, d2, path=[]): + if isinstance(d1, dict): + if isinstance(d2, dict): + d1_keys = set(d1.keys()) + d2_keys = set(d2.keys()) + if len(d1_keys.difference(d2_keys)) > 0: + return [False, path] + for key in d1_keys: + path.append(key) + if not equal(d1[key], d2[key], path): + return [False, path] + path.pop() + + return [True, path] + else: + return [False, path] + elif isinstance(d1, list): + if isinstance(d2, list): + if len(d1) != len(d2): + return [False, path] + i = 0 + for d1value, d2value in zip(d1, d2): + i += 1 + path.append(i) + if not equal(d1value, d2value, path): + return [False, path] + return [True, path] + else: + return [d2 == d1, path] diff --git a/tests/helpers/init_elasticsearch.py b/tests/helpers/init_elasticsearch.py index 59aaf9b..dea80f0 100644 --- a/tests/helpers/init_elasticsearch.py +++ b/tests/helpers/init_elasticsearch.py @@ -20,15 +20,22 @@ # Simple run once semaphore _COMPLETED = False +# +# For the test docs, note the workspace must match the doc's idea of permissions. +# See data.py for the workspace definitions in which: +# 0 - public workspace, refdata +# 1 - public workspace, narrative +# 100 - private workspace, narrative +# 101 - private, inaccessible workspace, narrative test_docs = [ - # Public doc - {'name': 'public-doc1', 'access_group': '1', 'is_public': True, 'timestamp': 10}, - # Public doc - {'name': 'public-doc2', 'access_group': '99', 'is_public': True, 'timestamp': 12}, + # Public doc, refdata + {'name': 'public-doc1', 'access_group': '0', 'is_public': True, 'timestamp': 10}, + # Public doc, narrative + {'name': 'public-doc2', 'access_group': '1', 'is_public': True, 'timestamp': 12}, # Private but accessible doc - {'name': 'private-doc1', 'is_public': False, 'access_group': '1', 'timestamp': 7}, + {'name': 'private-doc1', 'is_public': False, 'access_group': '100', 'timestamp': 7}, # Private but inaccessible doc - {'name': 'private2-doc1', 'is_public': False, 'access_group': '99', 'timestamp': 9}, + {'name': 'private-doc2', 'is_public': False, 'access_group': '101', 'timestamp': 9}, ] narrative_docs = [ diff --git a/tests/helpers/integration_setup.py b/tests/helpers/integration_setup.py index 14ffc4d..c43b350 100644 --- a/tests/helpers/integration_setup.py +++ b/tests/helpers/integration_setup.py @@ -1,37 +1,95 @@ -import os import subprocess +import signal +from src.utils.wait_for_service import wait_for_service +from src.utils.logger import logger +import json +import os +import requests +from . import common +from .common import assert_jsonrpc11_result, equal + +container_process = None +container_out = None +container_err = None +stop_timeout = 30 + + +def start_service(app_url): + global container_process + global container_out + global container_err + + # Build and start the app using docker-compose + cwd = 'tests/integration/docker' + logger.info(f'Running docker-compose file in "{cwd}"') + cmd = "docker-compose --no-ansi up" + logger.info(f'Running command:\n{cmd}') + container_out = open("container.out", "w") + container_err = open("container.err", "w") + container_process = subprocess.Popen(cmd, shell=True, + stdout=container_out, + stderr=container_err, + cwd=cwd) + wait_for_service(app_url, "search2") + + +def stop_service(): + global container_process + global container_out + global container_err + + if container_process is not None: + logger.info('Stopping container') + container_process.send_signal(signal.SIGTERM) + logger.info('Waiting until service has stopped...') + + if not common.wait_for_line("container.err", + lambda line: 'Stopping' in line and 'done' in line, + timeout=stop_timeout, + line_count=1): + raise Exception(f'Container did not stop in the alloted time of {stop_timeout} seconds') + logger.info('...stopped!') + + if container_err is not None: + container_err.close() + + if container_out is not None: + container_out.close() + + +def load_data_file(method, name): + """Load the json test data file with the given name from ./data/legacy """ + file_path = os.path.join(os.path.dirname(__file__), '../integration/data/legacy', method, name) + logger.info(f'loading data file from "{file_path}"') + with open(file_path) as f: + return json.load(f) + + +def do_rpc(url, request_data, response_data): + """Send the given jsonrpc request, do basic jsonrpc 1.1 compliance check.""" + resp = requests.post( + url=url, + headers={'Authorization': os.environ['WS_TOKEN']}, + data=json.dumps(request_data), + ) + return assert_jsonrpc11_result(resp.json(), response_data) + + +def assert_equal_results(actual_result, expected_result): + """Asserts that the actual results match expected; omits non-deterministic fields""" + for key in ['pagination', 'sorting_rules', 'total', 'objects']: + assert equal(actual_result[key], expected_result[key]) + + # Optional keys (may be enabled, or not) + # We check if the specified keys are expected, and matching, + # or present, and expected. + for key in ['access_group_narrative_info', 'access_group_narrative_info']: + # here we check only if it is in our expected result. + if key in expected_result: + assert equal(actual_result[key], expected_result[key]) -def _build(img_name): - """Build the test image. This blocks until finished""" - cmd = f"docker build . -t {img_name}" - print(f'Running command:\n{cmd}') - proc = subprocess.Popen(cmd, shell=True) - proc.wait() - - -def setup(): - if os.environ.get("SKIP_DOCKER"): - return - ES_URL = os.environ.get("ES_URL", "http://localhost:9500") - # TOKEN = os.environ["TOKEN"] - INDEX_PREFIX = os.environ.get('INDEX_PREFIX', 'search2') - USER_PROFILE_URL = os.environ.get('USER_PROFILE_URL', 'https://ci.kbase.us/services/user_profile') - WS_URL = os.environ.get('WS_URL', 'https://ci.kbase.us/services/ws') - IMAGE_NAME = "kbase/search2" - # Build and start the app using docker - _build(IMAGE_NAME) - cmd = f""" - docker run -e ELASTICSEARCH_URL={ES_URL} \\ - -e WORKSPACE_URL={WS_URL} \\ - -e USER_PROFILE_URL={USER_PROFILE_URL} \\ - -e WORKERS=2 \\ - -e INDEX_PREFIX={INDEX_PREFIX} \\ - -e INDEX_PREFIX_DELIMITER=. \\ - -e DEVELOPMENT=1 \\ - -p 5000:5000 \\ - --network host \\ - {IMAGE_NAME} - """ - print(f'Running command:\n{cmd}') - subprocess.Popen(cmd, shell=True) + for key in ['access_group_narrative_info', 'access_group_narrative_info']: + # but here we check if is in the actual result. + if key in actual_result: + assert equal(actual_result[key], expected_result[key]) diff --git a/tests/helpers/unit_setup.py b/tests/helpers/unit_setup.py new file mode 100644 index 0000000..773c843 --- /dev/null +++ b/tests/helpers/unit_setup.py @@ -0,0 +1,57 @@ +import subprocess +import signal +from src.utils.wait_for_service import wait_for_service +from src.utils.logger import logger +from . import common +import json +import os + + +container_process = None +container_out = None +container_err = None +stop_timeout = 30 + + +def load_data_file(name): + """Load the json test data file with the given name from ./data/legacy """ + file_path = os.path.join(os.path.dirname(__file__), '../unit/data', name) + logger.info(f'loading data file from "{file_path}"') + with open(file_path) as f: + return json.load(f) + + +def start_service(wait_for_url, wait_for_name): + global container_process + global container_out + global container_err + + cmd = "docker-compose --no-ansi up" + logger.info(f'Running command:\n{cmd}') + container_out = open("container.out", "w") + container_err = open("container.err", "w") + container_process = subprocess.Popen(cmd, shell=True, stdout=container_out, stderr=container_err) + wait_for_service(wait_for_url, wait_for_name) + + +def stop_service(): + global container_process + global container_out + global container_err + + if container_process is not None: + logger.info('Stopping container') + container_process.send_signal(signal.SIGTERM) + logger.info('Waiting until service has stopped...') + if not common.wait_for_line("container.err", + lambda line: 'Stopping' in line and 'done' in line, + timeout=stop_timeout, + line_count=2): + logger.warning(f'Container did not stop in the alotted time of {stop_timeout} seconds') + logger.info('...stopped!') + + if container_err is not None: + container_err.close() + + if container_out is not None: + container_out.close() diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 0000000..670c619 --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,21 @@ +# content of a/conftest.py +import pytest +import os +from tests.helpers.integration_setup import ( + start_service, + stop_service +) + + +DEFAULT_APP_URL = 'http://localhost:5000' + +APP_URL = os.environ.get("APP_URL", DEFAULT_APP_URL) + + +@pytest.fixture(scope="session") +def service(): + if APP_URL == DEFAULT_APP_URL: + start_service(APP_URL) + yield {'app_url': APP_URL} + if APP_URL == DEFAULT_APP_URL: + stop_service() diff --git a/tests/integration/data.py b/tests/integration/data.py index d4d0b06..12f16a7 100644 --- a/tests/integration/data.py +++ b/tests/integration/data.py @@ -136,7 +136,6 @@ "ascending": 1 }], "objects": [], - "objects_info": [], "total": 6509, "search_time": 1918, "access_group_narrative_info": {}, @@ -273,7 +272,7 @@ "object_name": "kb|g.2231", "timestamp": 1453530416321, "type": "GenomeFeature", - "type_ver": 1, + "index_version": 1, "creator": "kbasetest", "mod": "KBase Search", "parent_data": { @@ -313,33 +312,46 @@ "search_time": 3183, "access_groups_info": { "4258": [4258, "KBasePublicGenomesV5", "kbasetest", "2017-02-02T06:31:09+0000", 36987, "n", "r", "unlocked", {}] - }, - "objects_info": { - "4258/25234/1": [ - 25234, - "kb|g.28454", - "KBaseGenomes.Genome-8.0", - "2016-01-23T10:20:08+0000", - 1, - "kbasetest", - 4258, - "KBasePublicGenomesV5", - "f0f46269f7491407f8377cf5969c0cd9", - 6317976, - { - "GC content": "39.3381390370841", - "Genetic code": "11", - "Taxonomy": "Bacteria; Firmicutes; Clostridia; Clostridiales; Eubacteriaceae; Acetobacterium; Acetobacterium woodii DSM 1030", # noqa - "Source ID": "931626.3", - "Size": "4044777", - "Number features": "3832", - "Number contigs": "1", - "Domain": "Bacteria", - "Source": "KBase Central Store", - "Name": "Acetobacterium woodii DSM 1030", - "MD5": "31c52c7713b9ba638580e9d1812558e5" - } - ] } }] } + +# Basic ecoli search example with all metadtaa +search_request6 = { + "params": [{ + "match_filter": { + "full_text_in_all": "coli", + "exclude_subobjects": 1, + "source_tags": ["refdata", "noindex"], + "source_tags_blacklist": 1 + }, + "pagination": { + "start": 0, + "count": 20 + }, + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1 + }, + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "sorting_rules": [{ + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, { + "is_object_property": 0, + "property": "type", + "ascending": 1 + }] + }], + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "id": "4564119057768642" +} diff --git a/tests/integration/data/legacy/search_objects/case-01-request.json b/tests/integration/data/legacy/search_objects/case-01-request.json new file mode 100644 index 0000000..066d09b --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-01-request.json @@ -0,0 +1,38 @@ +{ + "params": [{ + "match_filter": { + "full_text_in_all": "ecoli_2contigs_orig", + "exclude_subobjects": 1, + "source_tags": ["refdata", "noindex"], + "source_tags_blacklist": 1 + }, + "pagination": { + "start": 0, + "count": 20 + }, + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1 + }, + "access_filter": { + "with_private": 0, + "with_public": 1 + }, + "sorting_rules": [{ + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, { + "is_object_property": 0, + "property": "type", + "ascending": 1 + }] + }], + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "id": "5092263263143479" +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-01-response.json b/tests/integration/data/legacy/search_objects/case-01-response.json new file mode 100644 index 0000000..70c10bf --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-01-response.json @@ -0,0 +1,312 @@ +{ + "object_version": "1.1", + "result": [{ + "pagination": { + "start": 0, + "count": 20 + }, + "sorting_rules": [{ + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, { + "is_object_property": 0, + "property": "type", + "ascending": 1 + }], + "total": 4, + "search_time": 793, + "objects": [{ + "object_name": "ecoli_2contigs_orig", + "workspace_id": 45320, + "object_id": 4, + "object_version": 4, + "timestamp": 1574124911386, + "workspace_type_name": "Genome", + "creator": "jayrbolton", + "data": { + "genome_id": "ecoli_2contigs_orig", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "The complete genome sequence of Escherichia coli K-12", "Direct Submission", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Workshop on Annotation of Escherichia coli K-12", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction"], + "publication_authors": ["Blattner,F.R. and Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Plunkett,G. III.", "Perna,N.T.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Rudd,K.E."], + "size": 4641772, + "num_contigs": 2, + "genome_type": null, + "gc_content": 0.5079, + "taxonomy": "Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 2320886.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_altered_genomic.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "45273:1:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4498, + "misc_feature": 11, + "mobile_element": 49, + "ncRNA": 65, + "non_coding_features": 773, + "non_coding_genes": 179, + "protein_encoding_gene": 4319, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 355, + "tRNA": 89, + "tmRNA": 2 + }, + "source": "Genbank", + "warnings": [], + "shared_users": ["jayrbolton"], + "creation_date": "2019-11-30T17:37:45+0000", + "is_public": true, + "copied": "45273/2/1", + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::45320:4", + "kbase_id": "45320/4/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "genome_id": ["ecoli_2contigs_orig"], + "object_name": ["ecoli_2contigs_orig"] + } + }, { + "object_name": "Narrative.1574286995467", + "workspace_id": 45320, + "object_id": 1, + "object_version": 13, + "timestamp": 1586801969406, + "workspace_type_name": "Narrative", + "creator": "jayrbolton", + "data": { + "narrative_title": "Refdata import", + "is_narratorial": false, + "data_objects": [{ + "name": "ecoli_2contigs_orig", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "small_genbank__test_batch_sub_dir_1", + "obj_type": "KBaseGenomes.Genome-15.1" + }, { + "name": "test_import_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "test_import", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "GCF_002287175.1_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "GCF_002287175.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "GCF_000302455.1_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "GCF_000302455.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "GCF_000762265.1_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "GCF_000762265.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }], + "owner": "jayrbolton", + "modified_at": 1586801969000, + "cells": [{ + "desc": "Load RefSeq genomes from NCBI to KBase", + "cell_type": "kbase_app" + }, { + "desc": "List RefSeq genomes in KBase", + "cell_type": "kbase_app" + }, { + "desc": "List RefSeq genomes in NCBI databases", + "cell_type": "kbase_app" + }], + "total_cells": 3, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["jayrbolton"], + "creation_date": "2019-11-20T21:56:35+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "id": "WS::45320:1", + "kbase_id": "45320/1/1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "data_objects.name": ["ecoli_2contigs_orig"] + } + }, { + "object_name": "ecoli_2contigs_orig", + "workspace_id": 33192, + "object_id": 36, + "object_version": 1, + "timestamp": 1574185738033, + "workspace_type_name": "Genome", + "creator": "jayrbolton", + "data": { + "genome_id": "ecoli_2contigs_orig", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Workshop on Annotation of Escherichia coli K-12", "Direct Submission", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "The complete genome sequence of Escherichia coli K-12", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains"], + "publication_authors": ["Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Rudd,K.E.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Perna,N.T.", "Blattner,F.R. and Plunkett,G. III.", "Plunkett,G. III."], + "size": 4641772, + "num_contigs": 2, + "genome_type": null, + "gc_content": 0.5079, + "taxonomy": "Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 2320886.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_altered_genomic.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "45290:1:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4498, + "misc_feature": 11, + "mobile_element": 49, + "ncRNA": 65, + "non_coding_features": 773, + "non_coding_genes": 179, + "protein_encoding_gene": 4319, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 355, + "tRNA": 89, + "tmRNA": 2 + }, + "source": "Genbank", + "warnings": [], + "shared_users": ["jayrbolton"], + "creation_date": "2020-09-08T21:00:43+0000", + "is_public": true, + "copied": "45290/2/1", + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::33192:36", + "kbase_id": "33192/36/1", + "index_name": "genome", + "index_version": 2, + "highlight": { + "genome_id": ["ecoli_2contigs_orig"], + "object_name": ["ecoli_2contigs_orig"] + } + }, { + "object_name": "Narrative.1528306445083", + "workspace_id": 33192, + "object_id": 1, + "object_version": 57, + "timestamp": 1597187547970, + "workspace_type_name": "Narrative", + "creator": "jayrbolton", + "data": { + "narrative_title": "Test fiesta", + "is_narratorial": false, + "data_objects": [{ + "name": "MEGAHIT.contigss", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "rhodobacter-genome", + "obj_type": "KBaseGenomes.Genome-12.3" + }, { + "name": "Rhodobacter_sphaeroides_2.4.1", + "obj_type": "KBaseGenomes.Genome-7.0" + }, { + "name": "rhodobacter-metabolic-model", + "obj_type": "KBaseFBA.FBAModel-14.0" + }, { + "name": "rhodobacter-metabolic-model.gf.0", + "obj_type": "KBaseFBA.FBA-13.0" + }, { + "name": "MEGAHIT.contigsss", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "rhodobacter.art.q20.int.PE.reads_alignment", + "obj_type": "KBaseRNASeq.RNASeqAlignment-9.1" + }, { + "name": "Ecoli_go_term", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "Candidatus_Nealsonbacteria_bacterium_RBG_13_37_56.gbk_genome_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "GCF_000302455.1_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "GCF_000302455.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "GCF_002287175.1_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "GCF_002287175.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "GCF_004368345.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "GCF_004368345.1_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "ecoli_2contigs_orig", + "obj_type": "KBaseGenomes.Genome-17.0" + }], + "owner": "jayrbolton", + "modified_at": 1599598844000, + "cells": [{ + "desc": "", + "cell_type": "code_cell" + }, { + "desc": "Align Reads using Bowtie2 v2.3.2", + "cell_type": "kbase_app" + }, { + "desc": "Echo test", + "cell_type": "kbase_app" + }], + "total_cells": 3, + "static_narrative_saved": "1597187531703", + "static_narrative_ref": "/33192/56/", + "shared_users": ["jayrbolton"], + "creation_date": "2018-06-06T17:34:05+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "id": "WS::33192:1", + "kbase_id": "33192/1/1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "data_objects.name": ["ecoli_2contigs_orig"] + } + }], + "access_group_narrative_info": { + "45320": ["Refdata import", 1, 1586801969000, "jayrbolton", "Jay Bolton"], + "33192": ["Test fiesta", 1, 1599598844000, "jayrbolton", "Jay Bolton"] + } + }], + "id": "5092263263143479" +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-02-request.json b/tests/integration/data/legacy/search_objects/case-02-request.json new file mode 100644 index 0000000..9ba9364 --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-02-request.json @@ -0,0 +1,34 @@ +{ + "params": [{ + "match_filter": { + "full_text_in_all": "genome", + "exclude_subobjects": 1 + }, + "pagination": { + "start": 0, + "count": 15 + }, + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1, + "add_access_group_info": 1 + }, + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "sorting_rules": [{ + "property": "timestamp", + "ascending": 0, + "is_object_property": 0 + }], + "object_types": ["Narrative", "Assembly", "Genome", "PairedEndLibrary", "Pangenome", "Tree", "Media"] + }], + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "id": "0008033925138730691" +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-02-response.json b/tests/integration/data/legacy/search_objects/case-02-response.json new file mode 100644 index 0000000..0795394 --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-02-response.json @@ -0,0 +1,1307 @@ +{ + "id": "0008033925138730691", + "version": "1.1", + "result": [{ + "pagination": { + "start": 0, + "count": 15 + }, + "sorting_rules": [{ + "property": "timestamp", + "ascending": 0, + "is_object_property": 0 + }], + "total": 18011, + "search_time": 2445, + "objects": [{ + "object_name": "Narrative.1594582422207", + "workspace_id": 52407, + "object_id": 1, + "workspace_version": 65, + "timestamp": 1614657640817, + "workspace_type_name": "Narrative", + "creator": "zimingy", + "data": { + "narrative_title": "KBase Test Data", + "is_narratorial": false, + "data_objects": [{ + "name": "small.interlaced_reads", + "obj_type": "KBaseFile.PairedEndLibrary-2.2" + }, { + "name": "metagenome.gff_metagenome.assembly.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "KBase_derived_16_paired_trim_MEGAHIT.contigs.fa_genome.gff_genome.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "KBase_derived_16_paired_trim_MEGAHIT.contigs.fa_genome.gff_genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "archaea_test.fa_assembly.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "3300011599_1.fa_assembly.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "Listeria_monocytogenes_000196035.assembly.fa_assembly.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "KBase_derived_GCF_002287175.1.gbff_genome_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "KBase_derived_GCF_002287175.1.gbff_genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "metagenome_test_annotated.assembly.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "Pseudomonas_stutzeri_RCH2.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "test_metagenome", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "metagenome_badabing.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "metagenome_badabing", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "metagenome_badabing.assembly.fa_metagenome.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "metagenome_badabing.assembly.fa_metagenome", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }], + "owner": "scanon", + "modified_at": 1614658102000, + "cells": [{ + "desc": "KBase Test Data\nThis narrative is just a place holder for test data. \nIf you add data to this narrative, you need to also add the same data with the exact same name in each of the other deploys.\n", + "cell_type": "markdown" + }, { + "desc": "Name: metagenome_badabing (MetagenomeAPI)\nThis is an assembly data set. The source can be found here...\nBerkeley: /kbase/testdata/metagenomebadabing.GFF_FASTA.zip\n", + "cell_type": "markdown" + }, { + "desc": "Import GFF3/FASTA file as Genome from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "Import GFF3/FASTA file as Annotated Metagenome Assembly from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "kbaseGenomeView", + "cell_type": "widget" + }, { + "desc": "Name: PseudomonasstutzeriRCH2.assembly (MetagemoneUtils)\nThis is an assembly data set. The source can be found here...\nBerkeley: /kbase/testdata/Pseudomonasstutzeri_RCH2.assembly.FASTA.zip\n", + "cell_type": "markdown" + }, { + "desc": "Import FASTA File as Assembly from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "Name: metagenometestannotated.assembly.fa_assembly (MetaGenomeAPI)\nThis is an assembly data set. The source can be found here...\nBerkeley: /kbase/testdata/metagenometest_annotated.assembly.FASTA.zip\n", + "cell_type": "markdown" + }, { + "desc": "Name: KBasederivedGCF002287175.1.gbffgenome (GenomeFileUtil)\nThis is a genome data set. The source can be found here...\nBerkeley: /kbase/testdata/GCF002287175.1.GENBANK.zip\n", + "cell_type": "markdown" + }, { + "desc": "Import GenBank File as Genome from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "Upload of a small interlaced read library\nlocation: ftp://ftp.kbase.us/test_data/small.inter.fq\n", + "cell_type": "markdown" + }, { + "desc": "Import FASTQ/SRA File as Reads from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "Name: archaeatest.faassembly.fa_assembly(AessemblyUtil)\nThis is a assembly data set. The source can be found here...\nBerkeley: /kbase/testdata/archaeatest.fa_assembly.FASTA.zip\n", + "cell_type": "markdown" + }, { + "desc": "Name: Listeriamonocytogenes000196035.assembly.faassembly.faassembly(AessemblyUtil)\nThis is a assembly data set. The source can be found here:\nBerkeley: /kbase/testdata/Listeriamonocytogenes000196035.assembly.faassembly.FASTA.zip\n", + "cell_type": "markdown" + }, { + "desc": "Name: KBasederived16pairedtrimMEGAHIT.contigs.fagenome.gff_genome(AessemblyUtil)\nThis is a genome data set. The source can be found here...\nBerkeley: /kbase/testdata/16pairedtrimMEGAHIT.contigs.fagenome.GFF__FASTA.zip\n", + "cell_type": "markdown" + }, { + "desc": "KBase_derived_16_paired_trim_MEGAHIT.contigs.fa_genome.gff_genome", + "cell_type": "data" + }, { + "desc": "Name: 33000115991.faassembly.fa_assembly(AessemblyUtil)\nThis is a assembly data set. The source can be found here:\nBerkeley: /kbase/testdata/33000115991.fa_assembly.FASTA.zip\n", + "cell_type": "markdown" + }, { + "desc": "Name: metagenome.gffmetagenome.assembly.faassembly(AessemblyUtil)\nThis is a assembly data set. The source can be found here:\nBerkeley: /kbase/testdata/metagenome.gffmetagenome.assembly.fa_assembly\n", + "cell_type": "markdown" + }], + "total_cells": 26, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["zimingy", "scanon"], + "creation_date": "2020-07-12T19:33:42+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:52407/1/1", + "kbase_id": "52407/1/1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["Import GFF3/FASTA file as Genome from Staging Area", "Name: KBasederivedGCF002287175.1.gbffgenome (GenomeFileUtil)\nThis is a genome data set.", "Import GenBank File as Genome from Staging Area", "Name: KBasederived16pairedtrimMEGAHIT.contigs.fagenome.gff_genome(AessemblyUtil)\nThis is a genome data"], + "workspace_type_name": ["Narrative"] + } + }, { + "object_name": "metagenome_badabing", + "workspace_id": 52407, + "object_id": 41, + "workspace_version": 1, + "timestamp": 1614655779259, + "workspace_type_name": "Genome", + "creator": "zimingy", + "data": { + "genome_id": "metagenome_badabing", + "scientific_name": "unknown_taxon", + "publication_titles": [], + "publication_authors": [], + "size": 70257324, + "num_contigs": 87272, + "genome_type": "draft isolate", + "gc_content": 0.63534, + "taxonomy": "Unconfirmed Organism", + "mean_contig_length": 805.0385461545513, + "external_origination_date": null, + "original_source_file_name": null, + "cds_count": 131854, + "feature_count": 131854, + "mrna_count": 0, + "non_coding_feature_count": 1733, + "assembly_ref": "52407:40:1", + "source_id": "unknown", + "feature_counts": { + "CDS": 131854, + "gene": 131854, + "non_coding_features": 1733, + "protein_encoding_gene": 131854, + "rRNA": 287, + "repeat_region": 422, + "tRNA": 1024 + }, + "source": "Other", + "warnings": ["Genome molecule_type SingleLetterAlphabet is not expected for domain Unknown.", "SUSPECT: This genome has 131854 genes that needed to be spoofed for existing parentless CDS.", "Unable to determine organism taxonomy"], + "shared_users": ["zimingy", "scanon"], + "creation_date": "2021-03-02T03:30:28+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:52407/41/1", + "kbase_id": "52407/41/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "warnings": ["Genome molecule_type SingleLetterAlphabet is not expected for domain Unknown.", "SUSPECT: This genome has 131854 genes that needed to be spoofed for existing parentless CDS."], + "workspace_type_name": ["Genome"] + } + }, { + "object_name": "Narrative.1484955375553", + "workspace_id": 16325, + "object_id": 1, + "workspace_version": 30, + "timestamp": 1614644544954, + "workspace_type_name": "Narrative", + "creator": "gaprice", + "data": { + "narrative_title": "QUASTtest", + "is_narratorial": false, + "data_objects": [{ + "name": "foobarbaz", + "obj_type": "KBaseGenomes.ContigSet-3.0" + }, { + "name": "MEGAHIT.contigs", + "obj_type": "KBaseGenomeAnnotations.Assembly-4.1" + }], + "owner": "gaprice", + "modified_at": 1614644658000, + "cells": [{ + "desc": "QUAST", + "cell_type": "kbase_app" + }, { + "desc": "Export Data Object To Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "Assemble Reads with MEGAHIT", + "cell_type": "kbase_app" + }, { + "desc": "QUAST - Quality Assessment Tool for Genome Assemblies", + "cell_type": "kbase_app" + }, { + "desc": "Assess Quality of Assemblies with QUAST - v4.4", + "cell_type": "kbase_app" + }], + "total_cells": 9, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["kbasetest", "swwang", "eapearson", "jrbolton", "janaka", "janakakbase", "thomasoniii", "gaprice"], + "creation_date": "2017-01-20T23:36:22+0000", + "is_public": false, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:16325/1/1", + "kbase_id": "16325/1/1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["QUAST - Quality Assessment Tool for Genome Assemblies"], + "workspace_type_name": ["Narrative"] + } + }, { + "object_name": "Narrative.1587165662023", + "workspace_id": 51911, + "object_id": 16, + "workspace_version": 50, + "timestamp": 1613962939430, + "workspace_type_name": "Narrative", + "creator": "dylan", + "data": { + "narrative_title": "CI - BLAST tests on AMA", + "is_narratorial": false, + "data_objects": [{ + "name": "HL1M-N_Prodigal_Rfam-FILTER-2000-NOFUNC.AMA.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "HL1M-N_Prodigal_Rfam-FILTER-2000-NOFUNC.AMA", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "ama_genes.gff_metagenome.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "ama_genes.gff_metagenome", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "ama_original_gene_calls-Prokka.AMA.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "ama_fresh_gene_calls-Prokka.AMA.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "ama_fresh_gene_calls-Prokka.AMA", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "ama_original_gene_calls-Prokka.AMA", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "foo-import_test2.AMA.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "foo-import_test2.AMA", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "query-foo-BLASTn.FS.Seq", + "obj_type": "KBaseSequences.SequenceSet-1.1" + }, { + "name": "foo-BLASTn.FS", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "query-foo-BLASTp.FS.Seq", + "obj_type": "KBaseSequences.SequenceSet-1.1" + }, { + "name": "foo-BLASTp.FS", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_HQ_Bins.AMA", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "Bin.012-Prokka.Genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "Bin.035-Prokka.Genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "Bin.001-Prokka.Genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "Bin.003-Prokka.Genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "Bin.004-Prokka.Genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "Bin.062-Prokka.Genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "Bin.032-Prokka.Genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "Compost_MAGs.GenomeSet", + "obj_type": "KBaseSearch.GenomeSet-3.0" + }, { + "name": "query-Compost_MAGs-AMA-rpoB.FeatureSet.Seq", + "obj_type": "KBaseSequences.SequenceSet-1.1" + }, { + "name": "query-Compost_MAGs-genomes-rpoB.FeatureSet.Seq", + "obj_type": "KBaseSequences.SequenceSet-1.1" + }, { + "name": "Compost_MAGs-genomes-rpoB.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_MAGs-AMA-rpoB.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Genome_plus_AMA-rpoB.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_MAGs.GenomeSet-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_HQ_Bins.AMA-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_MAGs.GenomeSet-run2-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_MAGs.GenomeSet-run3-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_MAGs.GenomeSet-run4-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_MAGs.GenomeSet-run5-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_HQ_Bins.AMA-run6-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_MAGs.GenomeSet-run6-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "run6-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_HQ_Bins.AMA-run7-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Compost_MAGs.GenomeSet-run7-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "run7-BLASTp_Search.FeatureSet", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }], + "owner": "dylan", + "modified_at": 1613962939000, + "cells": [{ + "desc": "Batch Create Genome Set - v1.2.0", + "cell_type": "kbase_app" + }, { + "desc": "ama_fresh_gene_calls-Prokka.AMA", + "cell_type": "data" + }, { + "desc": "HL1M-N_Prodigal_Rfam-FILTER-2000-NOFUNC.AMA", + "cell_type": "data" + }, { + "desc": "BLASTn nuc-nuc Search - v2.10.1", + "cell_type": "kbase_app" + }, { + "desc": "BLASTp prot-prot Search - v2.10.1", + "cell_type": "kbase_app" + }, { + "desc": "foo-BLASTp.FS", + "cell_type": "data" + }, { + "desc": "Compost_MAGs-genomes-rpoB.FeatureSet", + "cell_type": "data" + }, { + "desc": "Compost_MAGs-AMA-rpoB.FeatureSet", + "cell_type": "data" + }, { + "desc": "Merge FeatureSets - v1.0.1", + "cell_type": "kbase_app" + }, { + "desc": "kbaseReportView", + "cell_type": "widget" + }, { + "desc": "Genome_plus_AMA-rpoB.FeatureSet", + "cell_type": "data" + }, { + "desc": "BLASTp prot-prot Search - v2.11.0", + "cell_type": "kbase_app" + }], + "total_cells": 14, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["eapearson", "dylan", "kbaseuitest"], + "creation_date": "2020-07-04T04:27:55+0000", + "is_public": false, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:51911/16/1", + "kbase_id": "51911/16/1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["Batch Create Genome Set - v1.2.0"], + "workspace_type_name": ["Narrative"] + } + }, { + "object_name": "Narrative.1605571868207", + "workspace_id": 57559, + "object_id": 11, + "workspace_version": 107, + "timestamp": 1613410653476, + "workspace_type_name": "Narrative", + "creator": "ialarmedalien", + "data": { + "narrative_title": "Many cell types", + "is_narratorial": false, + "data_objects": [{ + "name": "reads_test3_contigs", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "MyReads", + "obj_type": "KBaseSets.ReadsSet-1.2" + }, { + "name": "HierachicalClusters", + "obj_type": "KBaseFeatureValues.FeatureClusters-1.0" + }, { + "name": "KMeansClusters", + "obj_type": "KBaseFeatureValues.FeatureClusters-1.0" + }, { + "name": "Shewanella_Impute", + "obj_type": "KBaseFeatureValues.ExpressionMatrix-2.0" + }, { + "name": "MyReadsSet98", + "obj_type": "KBaseSets.ReadsSet-1.2" + }, { + "name": "CombinedReads", + "obj_type": "KBaseSets.ReadsSet-1.2" + }, { + "name": "combinatedReads", + "obj_type": "KBaseSets.ReadsSet-1.2" + }], + "owner": "ialarmedalien", + "modified_at": 1613410653000, + "cells": [{ + "desc": "", + "cell_type": "code_cell" + }, { + "desc": "advancedViewCell\n", + "cell_type": "markdown" + }, { + "desc": "from biokbase.narrative.jobs.appmanager import AppManager\nAppManager().run_local_app_advanced(\n \"eapearson_customInputDev/validate_custom_input\",\n {\n \"param1\": \"\",\n \"param2\": None\n},\n None,\n tag=\"dev\",\n cell_id=\"33ad1655-e44e-45f2-812a-64ec19d9d0a0\",\n run_id=\"29391eac-21ac-4b26-88a6-a905e7be5c57\"\n)", + "cell_type": "code_cell" + }, { + "desc": "appCell2\n", + "cell_type": "markdown" + }, { + "desc": "app in batch mode\n", + "cell_type": "markdown" + }, { + "desc": "App Sleep", + "cell_type": "kbase_app" + }, { + "desc": "Cancelled whilst queueing\n", + "cell_type": "markdown" + }, { + "desc": "Cancelled whilst running\n", + "cell_type": "markdown" + }, { + "desc": "Generate an App Error", + "cell_type": "kbase_app" + }, { + "desc": "Assess Genome Quality with CheckM - v1.0.8", + "cell_type": "kbase_app" + }, { + "desc": "Align Reads using Bowtie2 v2.3.2", + "cell_type": "kbase_app" + }, { + "desc": "Run Templatomatic", + "cell_type": "kbase_app" + }, { + "desc": "bulkImportCell\n", + "cell_type": "markdown" + }, { + "desc": "print('show code settings: code shown')\nprint('python is shit')\n", + "cell_type": "code_cell" + }, { + "desc": "print('show code settings: code hidden')", + "cell_type": "code_cell" + }, { + "desc": "dataCell\n", + "cell_type": "markdown" + }, { + "desc": "Shewanella_Impute", + "cell_type": "data" + }, { + "desc": "editorCell\n", + "cell_type": "markdown" + }, { + "desc": "markdown cell\nThis is a markdown cell in all its glory!\n", + "cell_type": "markdown" + }, { + "desc": "outputCell\n", + "cell_type": "markdown" + }, { + "desc": "kbaseReportView", + "cell_type": "widget" + }, { + "desc": "viewCell\n", + "cell_type": "markdown" + }], + "total_cells": 29, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["ialarmedalien"], + "creation_date": "2020-12-10T17:15:02+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:57559/11/1", + "kbase_id": "57559/11/1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["Assess Genome Quality with CheckM - v1.0.8"], + "workspace_type_name": ["Narrative"] + } + }, { + "object_name": "Narrative.1610463857540", + "workspace_id": 58624, + "object_id": 1, + "workspace_version": 44, + "timestamp": 1612897762108, + "workspace_type_name": "Narrative", + "creator": "zimingy", + "data": { + "narrative_title": "kbase test data", + "is_narratorial": false, + "data_objects": [{ + "name": "3300011599_1.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "16_paired_trim_MEGAHIT.contigs.fa_genome.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "16_paired_trim_MEGAHIT.contigs.fa_genome", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "metagenome.gff_metagenome.assembly.fa_metagenome.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "metagenome.gff_metagenome.assembly.fa_metagenome", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "archaea_test.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "Listeria_monocytogenes_000196035.assembly.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "metagenome_test_annotated.assembly.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "gene_ontology", + "obj_type": "KBaseOntology.OntologyDictionary-2.0" + }, { + "name": "metagenome_test_annotated.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "metagenome_badabing.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "metagenome_badabing", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "GCF_002287175.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "metagenome_badabing.assembly.fa_metagenome.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "metagenome_badabing.assembly.fa_metagenome", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "Rhodobacter_CACIA_14H1_contigs", + "obj_type": "KBaseGenomes.ContigSet-2.0" + }, { + "name": "Rhodobacter_CACIA_14H1_contigs.fa_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }], + "owner": "zimingy", + "modified_at": 1614194641000, + "cells": [{ + "desc": "This is the test narrative for AssemblyUtil tests\n", + "cell_type": "markdown" + }, { + "desc": "Import FASTA File as Assembly from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "Import GFF3/FASTA file as Genome from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "Rhodobacter_CACIA_14H1_contigs", + "cell_type": "data" + }, { + "desc": "Import GFF3/FASTA file as Annotated Metagenome Assembly from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "kbaseGenomeView", + "cell_type": "widget" + }], + "total_cells": 16, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["zimingy"], + "creation_date": "2021-01-12T15:04:17+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:58624/1/1", + "kbase_id": "58624/1/1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["Import GFF3/FASTA file as Genome from Staging Area"], + "workspace_type_name": ["Narrative"] + } + }, { + "object_name": "Rhodobacter_sphaeroides_2.4.1_KBase.RAST", + "workspace_id": 59262, + "object_id": 4, + "workspace_version": 1, + "timestamp": 1611981183498, + "workspace_type_name": "Genome", + "creator": "eapearson", + "data": { + "non_coding_feature_count": 67, + "copied": null, + "genome_id": "Rhodobacter_sphaeroides_2.4.1_KBase.RAST", + "taxonomy": "Bacteria; Proteobacteria; Alphaproteobacteria; Rhodobacterales; Rhodobacteraceae; Rhodobacter; Rhodobacter sphaeroides 2.4.1", + "source": "NCBI", + "publication_titles": [], + "num_contigs": 7, + "assembly_ref": "16:12:1", + "mean_contig_length": 657568.1428571428, + "obj_type_module": "KBaseGenomes", + "feature_count": 4280, + "genome_type": null, + "gc_content": 0.688, + "cds_count": 4280, + "shared_users": ["eaptest30", "eapearson", "*"], + "warnings": ["Genome molecule_type Unknown is not expected for domain Bacteria.", "Unable to determine organism taxonomy"], + "external_origination_date": null, + "mrna_count": 4280, + "scientific_name": "Rhodobacter sphaeroides 2.4.1", + "creation_date": "2021-01-30T04:33:06+0000", + "obj_type_version": "17.0", + "index_runner_ver": "1.9.17", + "tags": ["narrative"], + "original_source_file_name": null, + "size": 4602977, + "publication_authors": [], + "is_public": true, + "feature_counts": { + "CDS": 4280, + "non_coding_features": 67, + "mRNA": 4280, + "protein_encoding_gene": 4280 + }, + "source_id": "NCBI" + }, + "guid": "WS:59262/4/1", + "kbase_id": "59262/4/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "warnings": ["Genome molecule_type Unknown is not expected for domain Bacteria."], + "workspace_type_name": ["Genome"] + } + }, { + "object_name": "KBase_derived_GCF_002287175.1.gbff_genome", + "workspace_id": 52407, + "object_id": 29, + "workspace_version": 1, + "timestamp": 1611940994702, + "workspace_type_name": "Genome", + "creator": "zimingy", + "data": { + "genome_id": "KBase_derived_GCF_002287175.1.gbff_genome", + "scientific_name": "Methanobacterium bryantii", + "publication_titles": ["Genomic analysis of methanogenic archaea reveals a shift towards energy conservation", "Direct Submission"], + "publication_authors": ["Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Seppala,S., Yoo,J.I., Huyett,L.M., Pressman,A., Cogan,J.Z., Kivenson,V., Peng,X., Tan,Y., Valentine,D.L. and O'Malley,M.A.", "Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Huyett,L.M., Yoo,J., Pressman,A., Cogan,Z., Valentine,D.L., O'Malley,M.A. and Tan,Y."], + "size": 3466370, + "num_contigs": 41, + "genome_type": "draft isolate", + "gc_content": 0.33192, + "taxonomy": "Unconfirmed Organism", + "mean_contig_length": 84545.60975609756, + "external_origination_date": "28-Jan-2021", + "original_source_file_name": "KBase_derived_GCF_002287175.1.gbff", + "cds_count": 3246, + "feature_count": 3246, + "mrna_count": 0, + "non_coding_feature_count": 99, + "assembly_ref": "52407:28:1", + "source_id": "NZ_LMVM01000001", + "feature_counts": { + "CDS": 3246, + "gene": 3293, + "misc_feature": 3, + "ncRNA": 2, + "non_coding_features": 99, + "non_coding_genes": 47, + "protein_encoding_gene": 3246, + "rRNA": 5, + "regulatory": 1, + "repeat_region": 1, + "tRNA": 40 + }, + "source": "RefSeq", + "warnings": ["Unable to determine organism taxonomy"], + "shared_users": ["zimingy", "scanon"], + "creation_date": "2021-01-29T17:23:17+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:52407/29/1", + "kbase_id": "52407/29/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "workspace_type_name": ["Genome"] + } + }, { + "object_name": "GCF_002287175.1_ASM228717v1_genomic.gbff_genome_2", + "workspace_id": 52407, + "object_id": 26, + "workspace_version": 1, + "timestamp": 1611933804041, + "workspace_type_name": "Genome", + "creator": "zimingy", + "data": { + "genome_id": "GCF_002287175.1_ASM228717v1_genomic.gbff_genome_2", + "scientific_name": "Methanobacterium bryantii", + "publication_titles": ["Genomic analysis of methanogenic archaea reveals a shift towards energy conservation", "Direct Submission"], + "publication_authors": ["Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Huyett,L.M., Yoo,J., Pressman,A., Cogan,Z., Valentine,D.L., O'Malley,M.A. and Tan,Y.", "Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Seppala,S., Yoo,J.I., Huyett,L.M., Pressman,A., Cogan,J.Z., Kivenson,V., Peng,X., Tan,Y., Valentine,D.L. and O'Malley,M.A."], + "size": 3466370, + "num_contigs": 41, + "genome_type": "draft isolate", + "gc_content": 0.33192, + "taxonomy": "Unconfirmed Organism", + "mean_contig_length": 84545.60975609756, + "external_origination_date": "31-Jul-2019", + "original_source_file_name": "GCF_002287175.1_ASM228717v1_genomic.gbff", + "cds_count": 3246, + "feature_count": 3246, + "mrna_count": 0, + "non_coding_feature_count": 99, + "assembly_ref": "52407:25:1", + "source_id": "NZ_LMVM01000001", + "feature_counts": { + "CDS": 3246, + "gene": 3293, + "misc_feature": 3, + "ncRNA": 2, + "non_coding_features": 99, + "non_coding_genes": 47, + "protein_encoding_gene": 3246, + "rRNA": 5, + "regulatory": 1, + "repeat_region": 1, + "tRNA": 40 + }, + "source": "RefSeq", + "warnings": ["Unable to determine organism taxonomy"], + "shared_users": ["zimingy", "scanon"], + "creation_date": "2021-01-29T15:23:27+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:52407/26/1", + "kbase_id": "52407/26/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "workspace_type_name": ["Genome"] + } + }, { + "object_name": "GCF_002287175.1_ASM228717v1_genomic.gbff_genome", + "workspace_id": 52407, + "object_id": 23, + "workspace_version": 1, + "timestamp": 1611933097634, + "workspace_type_name": "Genome", + "creator": "zimingy", + "data": { + "genome_id": "GCF_002287175.1_ASM228717v1_genomic.gbff_genome", + "scientific_name": "Methanobacterium bryantii", + "publication_titles": ["Genomic analysis of methanogenic archaea reveals a shift towards energy conservation", "Direct Submission"], + "publication_authors": ["Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Huyett,L.M., Yoo,J., Pressman,A., Cogan,Z., Valentine,D.L., O'Malley,M.A. and Tan,Y.", "Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Seppala,S., Yoo,J.I., Huyett,L.M., Pressman,A., Cogan,J.Z., Kivenson,V., Peng,X., Tan,Y., Valentine,D.L. and O'Malley,M.A."], + "size": 3466370, + "num_contigs": 41, + "genome_type": "draft isolate", + "gc_content": 0.33192, + "taxonomy": "Unconfirmed Organism", + "mean_contig_length": 84545.60975609756, + "external_origination_date": "28-Jan-2021", + "original_source_file_name": "KBase_derived_GCF_002287175.1.gbff", + "cds_count": 3246, + "feature_count": 3246, + "mrna_count": 0, + "non_coding_feature_count": 99, + "assembly_ref": "52407:22:1", + "source_id": "NZ_LMVM01000001", + "feature_counts": { + "CDS": 3246, + "gene": 3293, + "misc_feature": 3, + "ncRNA": 2, + "non_coding_features": 99, + "non_coding_genes": 47, + "protein_encoding_gene": 3246, + "rRNA": 5, + "regulatory": 1, + "repeat_region": 1, + "tRNA": 40 + }, + "source": "RefSeq", + "warnings": ["Unable to determine organism taxonomy"], + "shared_users": ["zimingy", "scanon"], + "creation_date": "2021-01-29T15:11:40+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:52407/23/1", + "kbase_id": "52407/23/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "workspace_type_name": ["Genome"] + } + }, { + "object_name": "GCF_002287175.1_assembly.fa_genome", + "workspace_id": 52407, + "object_id": 19, + "workspace_version": 1, + "timestamp": 1611856021670, + "workspace_type_name": "Genome", + "creator": "zimingy", + "data": { + "genome_id": "GCF_002287175.1_assembly.fa_genome", + "scientific_name": "unknown_taxon", + "publication_titles": [], + "publication_authors": [], + "size": 3466370, + "num_contigs": 41, + "genome_type": "draft isolate", + "gc_content": 0.33192, + "taxonomy": "Unconfirmed Organism", + "mean_contig_length": 84545.60975609756, + "external_origination_date": null, + "original_source_file_name": null, + "cds_count": 3246, + "feature_count": 3246, + "mrna_count": 0, + "non_coding_feature_count": 99, + "assembly_ref": "52407:18:1", + "source_id": "unknown", + "feature_counts": { + "CDS": 3246, + "gene": 3293, + "misc_feature": 3, + "ncRNA": 2, + "non_coding_features": 99, + "non_coding_gene": 47, + "protein_encoding_gene": 3246, + "rRNA": 5, + "regulatory": 1, + "repeat_region": 1, + "tRNA": 40 + }, + "source": "Other", + "warnings": ["Genome molecule_type SingleLetterAlphabet is not expected for domain Unknown.", "Unable to determine organism taxonomy"], + "shared_users": ["zimingy", "scanon"], + "creation_date": "2021-01-28T17:47:03+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:52407/19/1", + "kbase_id": "52407/19/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "warnings": ["Genome molecule_type SingleLetterAlphabet is not expected for domain Unknown."], + "workspace_type_name": ["Genome"] + } + }, { + "object_name": "GCF_002287175.1", + "workspace_id": 52407, + "object_id": 21, + "workspace_version": 1, + "timestamp": 1611847515429, + "workspace_type_name": "Genome", + "creator": "zimingy", + "data": { + "genome_id": "GCF_002287175.1", + "scientific_name": "Methanobacterium bryantii", + "publication_titles": ["Direct Submission", "Genomic analysis of methanogenic archaea reveals a shift towards energy conservation"], + "publication_authors": ["Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Seppala,S., Yoo,J.I., Huyett,L.M., Pressman,A., Cogan,J.Z., Kivenson,V., Peng,X., Tan,Y., Valentine,D.L. and O'Malley,M.A.", "Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Huyett,L.M., Yoo,J., Pressman,A., Cogan,Z., Valentine,D.L., O'Malley,M.A. and Tan,Y."], + "size": 3466370, + "num_contigs": 41, + "genome_type": null, + "gc_content": 0.33192, + "taxonomy": "Archaea; Euryarchaeota; Methanomada group; Methanobacteria; Methanobacteriales; Methanobacteriaceae; Methanobacterium", + "mean_contig_length": 84545.60975609756, + "external_origination_date": "31-Jul-2019", + "original_source_file_name": "GCF_002287175.1_ASM228717v1_genomic.gbff", + "cds_count": 3246, + "feature_count": 3246, + "mrna_count": 0, + "non_coding_feature_count": 99, + "assembly_ref": "33192:32:1", + "source_id": "NZ_LMVM01000001", + "feature_counts": { + "CDS": 3246, + "gene": 3293, + "misc_feature": 3, + "ncRNA": 2, + "non_coding_features": 99, + "non_coding_genes": 47, + "protein_encoding_gene": 3246, + "rRNA": 5, + "regulatory": 1, + "repeat_region": 1, + "tRNA": 40 + }, + "source": "RefSeq", + "warnings": [], + "shared_users": ["zimingy", "scanon"], + "creation_date": "2021-01-28T20:06:09+0000", + "is_public": true, + "copied": "58624/31/20", + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:52407/21/1", + "kbase_id": "52407/21/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "workspace_type_name": ["Genome"] + } + }, { + "object_name": "GCF_002287175.1", + "workspace_id": 58624, + "object_id": 31, + "workspace_version": 20, + "timestamp": 1611847515429, + "workspace_type_name": "Genome", + "creator": "zimingy", + "data": { + "genome_id": "GCF_002287175.1", + "scientific_name": "Methanobacterium bryantii", + "publication_titles": ["Genomic analysis of methanogenic archaea reveals a shift towards energy conservation", "Direct Submission"], + "publication_authors": ["Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Huyett,L.M., Yoo,J., Pressman,A., Cogan,Z., Valentine,D.L., O'Malley,M.A. and Tan,Y.", "Gilmore,S.P., Henske,J.K., Sexton,J.A., Solomon,K.V., Seppala,S., Yoo,J.I., Huyett,L.M., Pressman,A., Cogan,J.Z., Kivenson,V., Peng,X., Tan,Y., Valentine,D.L. and O'Malley,M.A."], + "size": 3466370, + "num_contigs": 41, + "genome_type": null, + "gc_content": 0.33192, + "taxonomy": "Archaea; Euryarchaeota; Methanomada group; Methanobacteria; Methanobacteriales; Methanobacteriaceae; Methanobacterium", + "mean_contig_length": 84545.60975609756, + "external_origination_date": "31-Jul-2019", + "original_source_file_name": "GCF_002287175.1_ASM228717v1_genomic.gbff", + "cds_count": 3246, + "feature_count": 3246, + "mrna_count": 0, + "non_coding_feature_count": 99, + "assembly_ref": "33192:32:1", + "source_id": "NZ_LMVM01000001", + "feature_counts": { + "CDS": 3246, + "gene": 3293, + "misc_feature": 3, + "ncRNA": 2, + "non_coding_features": 99, + "non_coding_genes": 47, + "protein_encoding_gene": 3246, + "rRNA": 5, + "regulatory": 1, + "repeat_region": 1, + "tRNA": 40 + }, + "source": "RefSeq", + "warnings": [], + "shared_users": ["zimingy"], + "creation_date": "2021-01-24T20:25:34+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "guid": "WS:58624/31/1", + "kbase_id": "58624/31/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "workspace_type_name": ["Genome"] + } + }, { + "object_name": "Narrative.1611672842083", + "workspace_id": 59134, + "object_id": 1, + "workspace_version": 50, + "timestamp": 1611679881176, + "workspace_type_name": "Narrative", + "creator": "eapearson", + "data": { + "narrative_title": "Data Prep Feature Set Viewer Integration Test", + "is_narratorial": false, + "data_objects": [{ + "name": "GCF_000007925.1_ASM792v1_genomic.fna_assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "prochlorococus_prokka_output.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "prochlorococus_prokka_output", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "Vibrio_alginolyticus", + "obj_type": "KBaseGenomes.Genome-14.1" + }, { + "name": "query-blastn_prochloroccus.Seq", + "obj_type": "KBaseSequences.SequenceSet-1.1" + }, { + "name": "KBase_derived_mygenome.gff_edited.gff_metagenome.assembly", + "obj_type": "KBaseGenomeAnnotations.Assembly-6.0" + }, { + "name": "KBase_derived_mygenome.gff_edited.gff_metagenome", + "obj_type": "KBaseMetagenomes.AnnotatedMetagenomeAssembly-1.0" + }, { + "name": "query-compost_hq_bins_blastp_output.Seq", + "obj_type": "KBaseSequences.SequenceSet-1.1" + }, { + "name": "compost_hq_bins_blastp_output", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "Vibrio_alginolyticus_feature_set", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "merged_feature_sets", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }], + "owner": "eapearson", + "modified_at": 1611679898000, + "cells": [{ + "desc": "# Welcome to the Narrative\nfrom IPython.display import IFrame\nIFrame(\"https://www.kbase.us/narrative-welcome-cell/\", width=\"100%\", height=\"300px\")", + "cell_type": "code_cell" + }, { + "desc": "Create AMA from imported data from Dylan\nFrom narrative https://ci.kbase.us/narrative/51911\ndownload gff+faa from CompostHQBins ama\nupload to my staging area\nimport as Metagenome GFF\n", + "cell_type": "markdown" + }, { + "desc": "Import GFF3/FASTA file as Annotated Metagenome Assembly from Staging Area", + "cell_type": "kbase_app" + }, { + "desc": "Add genome\nWe are going to have featuresets with genome, ama, and mixed genome & ama.\nJust for fun, we'll also use another marine bacterium, Vibrio alginolyticus.\n", + "cell_type": "markdown" + }, { + "desc": "Create feature sets\nAMA\nGenome\nSimply use buildfeatureset from genome, selecting 10 features.\n", + "cell_type": "markdown" + }, { + "desc": "BLASTp prot-prot Search - v2.11.0", + "cell_type": "kbase_app" + }, { + "desc": "Build FeatureSet from Genome", + "cell_type": "kbase_app" + }, { + "desc": "Merge ama and genome feature sets\nWe want a mixed feature set, \n", + "cell_type": "markdown" + }, { + "desc": "Merge FeatureSets - v1.0.1", + "cell_type": "kbase_app" + }], + "total_cells": 9, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["eapearson", "kbaseuitest"], + "creation_date": "2021-01-26T14:54:02+0000", + "is_public": false, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.16" + }, + "guid": "WS:59134/1/1", + "kbase_id": "59134/1/1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["Add genome\nWe are going to have featuresets with genome, ama, and mixed genome & ama.", "Create feature sets\nAMA\nGenome\nSimply use buildfeatureset from genome, selecting 10 features.", "Build FeatureSet from Genome", "Merge ama and genome feature sets\nWe want a mixed feature set,"], + "workspace_type_name": ["Narrative"] + } + }, { + "object_name": "KBase_derived_GCF_002287175.1.gff_genome", + "workspace_id": 52407, + "object_id": 16, + "workspace_version": 1, + "timestamp": 1611519563370, + "workspace_type_name": "Genome", + "creator": "zimingy", + "data": { + "genome_id": "KBase_derived_GCF_002287175.1.gff_genome", + "scientific_name": "unknown_taxon", + "publication_titles": [], + "publication_authors": [], + "size": 3466370, + "num_contigs": 41, + "genome_type": "draft isolate", + "gc_content": 0.33192, + "taxonomy": "Unconfirmed Organism", + "mean_contig_length": 84545.60975609756, + "external_origination_date": null, + "original_source_file_name": null, + "cds_count": 3246, + "feature_count": 3246, + "mrna_count": 0, + "non_coding_feature_count": 99, + "assembly_ref": "52407:15:1", + "source_id": "unknown", + "feature_counts": { + "CDS": 3246, + "gene": 3293, + "misc_feature": 3, + "ncRNA": 2, + "non_coding_features": 99, + "non_coding_gene": 47, + "protein_encoding_gene": 3246, + "rRNA": 5, + "regulatory": 1, + "repeat_region": 1, + "tRNA": 40 + }, + "source": "Other", + "warnings": ["Genome molecule_type SingleLetterAlphabet is not expected for domain Unknown.", "Unable to determine organism taxonomy"], + "shared_users": ["scanon", "zimingy"], + "creation_date": "2021-01-24T20:19:25+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.16" + }, + "guid": "WS:52407/16/1", + "kbase_id": "52407/16/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "warnings": ["Genome molecule_type SingleLetterAlphabet is not expected for domain Unknown."], + "workspace_type_name": ["Genome"] + } + }], + "access_group_narrative_info": { + "58624": ["kbase test data", 1, 1614194641000, "zimingy", "Ziming Yang"], + "16325": ["QUASTtest", 1, 1614644658000, "gaprice", ""], + "51911": ["CI - BLAST tests on AMA", 16, 1613963071000, "dylan", "Dylan Chivian"], + "59134": ["Data Prep Feature Set Viewer Integration Test", 1, 1611679898000, "eapearson", "Erik A Pearson"], + "57559": ["Many cell types", 11, 1613410653000, "ialarmedalien", "AJ Ireland"], + "52407": ["KBase Test Data", 1, 1614658102000, "scanon", "Shane Canon"], + "59262": ["Hello", 1, 1612405747000, "eapearson", "Erik A Pearson"] + }, + "access_groups_info": { + "58624": [58624, "zimingy:narrative_1610463857540", "zimingy", "2021-02-24T19:24:01+0000", 37, "n", "r", "unlocked", { + "cell_count": "1", + "narrative_nice_name": "kbase test data", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "1" + }], + "16325": [16325, "gaprice:1484955375553", "gaprice", "2021-03-02T00:24:18+0000", 17, "r", "n", "unlocked", { + "narrative_nice_name": "QUASTtest", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "1", + "data_palette_id": "6" + }], + "51911": [51911, "dylan:narrative_1593836875543", "dylan", "2021-02-22T03:04:31+0000", 75, "a", "n", "unlocked", { + "narrative_nice_name": "CI - BLAST tests on AMA", + "cell_count": "9", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "16" + }], + "59134": [59134, "eapearson:narrative_1611672842083", "eapearson", "2021-01-26T16:51:38+0000", 29, "a", "n", "unlocked", { + "cell_count": "1", + "narrative_nice_name": "Data Prep Feature Set Viewer Integration Test", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "1" + }], + "57559": [57559, "ialarmedalien:narrative_1607620501273", "ialarmedalien", "2021-02-15T17:37:33+0000", 18, "n", "r", "unlocked", { + "narrative_nice_name": "Many cell types", + "cell_count": "22", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "11" + }], + "52407": [52407, "KBaseTestData", "scanon", "2021-03-02T04:08:22+0000", 45, "n", "r", "unlocked", { + "cell_count": "1", + "narrative_nice_name": "KBase Test Data", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "1" + }], + "59262": [59262, "eapearson:narrative_1611962874127", "eapearson", "2021-02-04T02:29:07+0000", 7, "a", "r", "unlocked", { + "static_narrative_ver": "3", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "1", + "cell_count": "1", + "narrative_nice_name": "Hello", + "static_narrative_saved": "1611962934695", + "static_narrative": "/59262/3/" + }] + } + }] +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-03-request.json b/tests/integration/data/legacy/search_objects/case-03-request.json new file mode 100644 index 0000000..af3f7be --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-03-request.json @@ -0,0 +1,38 @@ +{ + "params": [{ + "match_filter": { + "full_text_in_all": "Prochlorococcus", + "exclude_subobjects": 1, + "source_tags": ["refdata", "noindex"], + "source_tags_blacklist": 1 + }, + "pagination": { + "start": 0, + "count": 20 + }, + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1 + }, + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "sorting_rules": [{ + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, { + "is_object_property": 0, + "property": "type", + "ascending": 1 + }] + }], + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "id": "12573638302160872" +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-03-response.json b/tests/integration/data/legacy/search_objects/case-03-response.json new file mode 100644 index 0000000..35d1ddf --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-03-response.json @@ -0,0 +1,672 @@ +{ + "object_version": "1.1", + "result": [{ + "pagination": { + "start": 0, + "count": 20 + }, + "sorting_rules": [{ + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, { + "is_object_property": 0, + "property": "type", + "ascending": 1 + }], + "total": 12, + "search_time": 1544, + "objects": [{ + "object_name": "GCF_000759885.1", + "workspace_id": 56638, + "object_id": 2, + "object_version": 1, + "timestamp": 1531222570501, + "workspace_type_name": "Genome", + "creator": "kbasedata", + "data": { + "genome_id": "GCF_000759885.1", + "scientific_name": "Prochlorococcus marinus str. GP2", + "publication_titles": ["Genomes of diverse isolates of the marine cyanobacterium Prochlorococcus", "Direct Submission"], + "publication_authors": ["Biller,S., Berube,P., Thompson,J., Kelly,L., Roggensack,S., Awad,L., Roache-Johnson,K., Ding,H., Giovannoni,S.J., Moore,L.R. and Chisholm,S.W.", "Biller,S.J., Berube,P.M., Berta-Thompson,J.W., Kelly,L., Roggensack,S.E., Awad,L., Roache-Johnson,K.H., Ding,H., Giovannoni,S.J., Rocap,G., Moore,L.R. and Chisholm,S.W."], + "size": 1624310, + "num_contigs": 11, + "genome_type": null, + "gc_content": 0.31164, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus; Prochlorococcus marinus", + "mean_contig_length": 147664.54545454544, + "external_origination_date": "11-Apr-2017", + "original_source_file_name": "GCF_000759885.1_ASM75988v1_genomic.gbff", + "cds_count": 1760, + "feature_count": 1760, + "mrna_count": 0, + "non_coding_feature_count": 89, + "assembly_ref": "15792:64027:2", + "source_id": "NZ_JNAH01000001", + "feature_counts": { + "CDS": 1760, + "gene": 1804, + "ncRNA": 3, + "non-protein_encoding_gene": 44, + "protein_encoding_gene": 1760, + "rRNA": 3, + "regulatory": 1, + "tRNA": 37, + "tmRNA": 1 + }, + "source": "RefSeq", + "warnings": ["SUSPECT: CDS EU91_RS03000_CDS_1 has a length of 974 which is not consistent with the length of the translation included (323 amino acids)."], + "shared_users": ["kbaseuitest"], + "creation_date": "2020-11-19T01:07:33+0000", + "is_public": true, + "copied": "15792/64028/2", + "tags": ["narrative"], + "obj_type_version": "14.2", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["Genomes of diverse isolates of the marine cyanobacterium Prochlorococcus"], + "scientific_name": ["Prochlorococcus marinus str. GP2"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", "; Prochlorococcus marinus"] + } + }, { + "object_name": "Narrative.1605747150690", + "workspace_id": 56638, + "object_id": 1, + "object_version": 13, + "timestamp": 1605765178549, + "workspace_type_name": "Narrative", + "creator": "kbaseuitest", + "data": { + "narrative_title": "Test Narrative for FeatureSet Integration Test", + "is_narratorial": false, + "data_objects": [{ + "name": "GCF_000759885.1", + "obj_type": "KBaseGenomes.Genome-14.2" + }, { + "name": "GCF_001766235.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, { + "name": "featureset2", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "featureset1", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, { + "name": "mergedfeatureset", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }], + "owner": "kbaseuitest", + "modified_at": 1605765178000, + "cells": [{ + "desc": "Test Narrative for FeatureSet Integration Test.\nTo reproduce:\n\nremove add a markdown cell (this one!), remove the welcome cell\nImport 2 genomes\nin this case, I copied, from RefSeq refdata:\n\nAcetobacter ascendens\nProchlorococcus marinus str. GP2\n\nat the time of writing, the public data search for refseq is broken, so I copied into this narrative from their landing pages\ncreate a FeatureSet for each Genome, using Build FeatureSet from Genome\n\nchoose the first 3 features from each genome for the feature set\nname the feature set for Acetobacter \"featureset1\", for Prochlorococcus \"featureset2\"\n\ncreate another FeatureSet combining these two using \"Merge FeatureSets\"\nin the Description parameter add the text \"merged feature set\", or whatever text you like (it will need to match the integration test.)\nin \"Output FeatureSet Name\" name it \"mergedfeatureset\"\ninsert the merged feature set object into the narrative as the last cell; it should be the 6th cell.\nfinally make the narrative public.\n\n", + "cell_type": "markdown" + }, { + "desc": "Build FeatureSet from Genome", + "cell_type": "kbase_app" + }, { + "desc": "Merge FeatureSets - v1.0.1", + "cell_type": "kbase_app" + }, { + "desc": "kbaseReportView", + "cell_type": "widget" + }, { + "desc": "mergedfeatureset", + "cell_type": "data" + }], + "total_cells": 6, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["kbaseuitest"], + "creation_date": "2020-11-19T00:52:30+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["the welcome cell\nImport 2 genomes\nin this case, I copied, from RefSeq refdata:\n\nAcetobacter ascendens\nProchlorococcus", "features from each genome for the feature set\nname the feature set for Acetobacter \"featureset1\", for Prochlorococcus"] + } + }, { + "object_name": "Prochlorococcus_marinus", + "workspace_id": 53745, + "object_id": 2, + "object_version": 1, + "timestamp": 1485796830804, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_001180265.1", + "scientific_name": "Prochlorococcus marinus", + "publication_titles": ["Direct Submission"], + "publication_authors": ["Thompson R,Luke."], + "size": 1388767, + "num_contigs": 120, + "genome_type": null, + "gc_content": 0.3111781889978665, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "mean_contig_length": null, + "external_origination_date": "08-JAN-2017", + "original_source_file_name": "GCF_001180265.1_CLC_assembled_contigs_genomic.gbff", + "cds_count": 1456, + "feature_count": 1601, + "mrna_count": 1456, + "non_coding_feature_count": 0, + "assembly_ref": "15792:64165:1", + "source_id": "NZ_CVSW01000001 (119 more accessions)", + "feature_counts": null, + "source": "refseq", + "warnings": null, + "shared_users": ["eaptest31", "eapearson"], + "creation_date": "2020-09-10T14:12:35+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "12.3", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Prochlorococcus marinus"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus"] + } + }, { + "object_name": "Prochlorococcus_marinus", + "workspace_id": 53744, + "object_id": 4, + "object_version": 1, + "timestamp": 1485796830804, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_001180265.1", + "scientific_name": "Prochlorococcus marinus", + "publication_titles": ["Direct Submission"], + "publication_authors": ["Thompson R,Luke."], + "size": 1388767, + "num_contigs": 120, + "genome_type": null, + "gc_content": 0.3111781889978665, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "mean_contig_length": null, + "external_origination_date": "08-JAN-2017", + "original_source_file_name": "GCF_001180265.1_CLC_assembled_contigs_genomic.gbff", + "cds_count": 1456, + "feature_count": 1601, + "mrna_count": 1456, + "non_coding_feature_count": 0, + "assembly_ref": "15792:64165:1", + "source_id": "NZ_CVSW01000001 (119 more accessions)", + "feature_counts": null, + "source": "refseq", + "warnings": null, + "shared_users": ["eaptest31", "eapearson", "kbaseuitest", "eaptest30"], + "creation_date": "2020-09-10T15:24:20+0000", + "is_public": false, + "copied": "53745/2/1", + "tags": ["narrative"], + "obj_type_version": "12.3", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::53744:4", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Prochlorococcus marinus"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus"] + } + }, { + "object_name": "prokka_ouput_1", + "workspace_id": 47458, + "object_id": 7, + "object_version": 1, + "timestamp": 1585919913126, + "workspace_type_name": "Genome", + "creator": "kbaseuitest", + "data": { + "genome_id": "GCF_000015645.1", + "scientific_name": "Prochlorococcus marinus str. AS9601", + "publication_titles": ["Patterns and implications of gene gain and loss in the evolution of Prochlorococcus", "Direct Submission"], + "publication_authors": ["Kettler,G.C., Martiny,A.C., Huang,K., Zucker,J., Coleman,M.L., Rodrigue,S., Chen,F., Lapidus,A., Ferriera,S., Johnson,J., Steglich,C., Church,G.M., Richardson,P. and Chisholm,S.W.", "Chisholm,S., Huang,K., Martiny,A., Kettler,G., Coleman,M., Keller,K., Arkin,A., Coe,A., Rodrigue,S., Ferriera,S., Johnson,J., Kravitz,S., Beeson,K., Sutton,G., Rogers,Y.-H., Friedman,R., Frazier,M. and Venter,J.C."], + "size": 1669886, + "num_contigs": 1, + "genome_type": "Unknown", + "gc_content": 0.313215393146598, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus; Prochlorococcus marinus", + "mean_contig_length": null, + "external_origination_date": "24-NOV-2016", + "original_source_file_name": "GCF_000015645.1_ASM1564v1_genomic.gbff", + "cds_count": 1780, + "feature_count": 1780, + "mrna_count": 1780, + "non_coding_feature_count": 46, + "assembly_ref": "15792:63937:1", + "source_id": "NC_008816", + "feature_counts": { + "CDS": 1780, + "gene": 1826, + "mRNA": 1780, + "non_coding_features": 46, + "non_coding_genes": 46, + "protein_encoding_gene": 1780, + "pseudogene": 14 + }, + "source": "RefSeq", + "warnings": ["Genome molecule_type Unknown is not expected for domain Bacteria.", "Unable to determine organism taxonomy"], + "shared_users": ["kbaseuitest"], + "creation_date": "2020-04-03T13:18:35+0000", + "is_public": false, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::47458:7", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["Patterns and implications of gene gain and loss in the evolution of Prochlorococcus"], + "scientific_name": ["Prochlorococcus marinus str. AS9601"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", "; Prochlorococcus marinus"] + } + }, { + "object_name": "Prochlorococcus_marinus_str._AS9601", + "workspace_id": 47458, + "object_id": 6, + "object_version": 1, + "timestamp": 1485794271879, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_000015645.1", + "scientific_name": "Prochlorococcus marinus str. AS9601", + "publication_titles": ["Patterns and implications of gene gain and loss in the evolution of Prochlorococcus", "Direct Submission"], + "publication_authors": ["Kettler,G.C., Martiny,A.C., Huang,K., Zucker,J., Coleman,M.L., Rodrigue,S., Chen,F., Lapidus,A., Ferriera,S., Johnson,J., Steglich,C., Church,G.M., Richardson,P. and Chisholm,S.W.", "Chisholm,S., Huang,K., Martiny,A., Kettler,G., Coleman,M., Keller,K., Arkin,A., Coe,A., Rodrigue,S., Ferriera,S., Johnson,J., Kravitz,S., Beeson,K., Sutton,G., Rogers,Y.-H., Friedman,R., Frazier,M. and Venter,J.C."], + "size": 1669886, + "num_contigs": 1, + "genome_type": null, + "gc_content": 0.313215393146598, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus; Prochlorococcus marinus", + "mean_contig_length": null, + "external_origination_date": "24-NOV-2016", + "original_source_file_name": "GCF_000015645.1_ASM1564v1_genomic.gbff", + "cds_count": 1780, + "feature_count": 1840, + "mrna_count": 1780, + "non_coding_feature_count": 0, + "assembly_ref": "15792:63937:1", + "source_id": "NC_008816", + "feature_counts": null, + "source": "refseq", + "warnings": null, + "shared_users": ["kbaseuitest"], + "creation_date": "2020-04-03T13:11:52+0000", + "is_public": false, + "copied": "15792/63939/1", + "tags": ["narrative"], + "obj_type_version": "12.3", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::47458:6", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["Patterns and implications of gene gain and loss in the evolution of Prochlorococcus"], + "scientific_name": ["Prochlorococcus marinus str. AS9601"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", "; Prochlorococcus marinus"] + } + }, { + "object_name": "Prochlorococcus_marinus", + "workspace_id": 44697, + "object_id": 2, + "object_version": 1, + "timestamp": 1485796717279, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_001180245.1", + "scientific_name": "Prochlorococcus marinus", + "publication_titles": ["Direct Submission"], + "publication_authors": ["Thompson R,Luke."], + "size": 1418374, + "num_contigs": 136, + "genome_type": null, + "gc_content": 0.31359570888919286, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "mean_contig_length": null, + "external_origination_date": "08-JAN-2017", + "original_source_file_name": "GCF_001180245.1_CLC_assembled_contigs_genomic.gbff", + "cds_count": 1494, + "feature_count": 1648, + "mrna_count": 1494, + "non_coding_feature_count": 0, + "assembly_ref": "15792:64156:1", + "source_id": "NZ_CVSV01000001 (135 more accessions)", + "feature_counts": null, + "source": "refseq", + "warnings": null, + "shared_users": ["psdehal", "scanon", "eapearson", "marcin"], + "creation_date": "2019-11-05T18:28:29+0000", + "is_public": true, + "copied": "15792/64157/1", + "tags": ["narrative"], + "obj_type_version": "12.3", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::44697:2", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Prochlorococcus marinus"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus"] + } + }, { + "object_name": "Prochlorococcus_marinus", + "workspace_id": 35753, + "object_id": 2, + "object_version": 1, + "timestamp": 1485796717279, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_001180245.1", + "scientific_name": "Prochlorococcus marinus", + "publication_titles": ["Direct Submission"], + "publication_authors": ["Thompson R,Luke."], + "size": 1418374, + "num_contigs": 136, + "genome_type": null, + "gc_content": 0.31359570888919286, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "mean_contig_length": null, + "external_origination_date": "08-JAN-2017", + "original_source_file_name": "GCF_001180245.1_CLC_assembled_contigs_genomic.gbff", + "cds_count": 1494, + "feature_count": 1648, + "mrna_count": 1494, + "non_coding_feature_count": 0, + "assembly_ref": "15792:64156:1", + "source_id": "NZ_CVSV01000001 (135 more accessions)", + "feature_counts": null, + "source": "refseq", + "warnings": null, + "shared_users": ["eapearson"], + "creation_date": "2018-08-17T23:35:08+0000", + "is_public": true, + "copied": "15792/64157/1", + "tags": [], + "obj_type_version": "12.3", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::35753:2", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Prochlorococcus marinus"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus"] + } + }, { + "object_name": "out_tree_1", + "workspace_id": 35753, + "object_id": 3, + "object_version": 1, + "timestamp": 1534549148045, + "workspace_type_name": "Tree", + "creator": "eapearson", + "data": { + "tree_name": null, + "workspace_type_name": "SpeciesTree", + "labels": [{ + "node_id": "GCF_000007925.1", + "label": "Prochlorococcus marinus subsp. marinus str. CCMP1375 (GCF_000007925.1)" + }, { + "node_id": "GCF_000011465.1", + "label": "Prochlorococcus marinus subsp. pastoris str. CCMP1986 (GCF_000011465.1)" + }, { + "node_id": "GCF_000011485.1", + "label": "Prochlorococcus marinus str. MIT 9313 (GCF_000011485.1)" + }, { + "node_id": "GCF_000012465.1", + "label": "Prochlorococcus marinus str. NATL2A (GCF_000012465.1)" + }, { + "node_id": "GCF_000012505.1", + "label": "Synechococcus sp. CC9902 (GCF_000012505.1)" + }, { + "node_id": "GCF_000012645.1", + "label": "Prochlorococcus marinus str. MIT 9312 (GCF_000012645.1)" + }, { + "node_id": "GCF_000014585.1", + "label": "Synechococcus sp. CC9311 (GCF_000014585.1)" + }, { + "node_id": "GCF_000015645.1", + "label": "Prochlorococcus marinus str. AS9601 (GCF_000015645.1)" + }, { + "node_id": "GCF_000015665.1", + "label": "Prochlorococcus marinus str. MIT 9515 (GCF_000015665.1)" + }, { + "node_id": "GCF_000015965.1", + "label": "Prochlorococcus marinus str. MIT 9301 (GCF_000015965.1)" + }, { + "node_id": "GCF_000063505.1", + "label": "Synechococcus sp. WH 7803 (GCF_000063505.1)" + }, { + "node_id": "GCF_000153065.1", + "label": "Synechococcus sp. RS9917 (GCF_000153065.1)" + }, { + "node_id": "GCF_000153825.1", + "label": "Synechococcus sp. RS9916 (GCF_000153825.1)" + }, { + "node_id": "GCF_000737535.1", + "label": "Synechococcus sp. KORDI-100 (GCF_000737535.1)" + }, { + "node_id": "GCF_000737575.1", + "label": "Synechococcus sp. KORDI-49 (GCF_000737575.1)" + }, { + "node_id": "GCF_000737595.1", + "label": "Synechococcus sp. KORDI-52 (GCF_000737595.1)" + }, { + "node_id": "GCF_000759855.1", + "label": "Prochlorococcus marinus str. MIT 9107 (GCF_000759855.1)" + }, { + "node_id": "GCF_000759955.1", + "label": "Prochlorococcus marinus str. MIT 9201 (GCF_000759955.1)" + }, { + "node_id": "GCF_000760175.1", + "label": "Prochlorococcus sp. MIT 0601 (GCF_000760175.1)" + }, { + "node_id": "GCF_000760215.1", + "label": "Prochlorococcus sp. MIT 0603 (GCF_000760215.1)" + }, { + "node_id": "user1", + "label": "Genome 35753/2/1 (Prochlorococcus_marinus)" + }], + "shared_users": ["eapearson"], + "creation_date": "2018-08-17T23:39:08+0000", + "is_public": true, + "copied": null, + "tags": [], + "obj_type_version": "1.0", + "obj_type_module": "KBaseTrees", + "index_runner_ver": "1.9.17" + }, + "id": "WS::35753:3", + "index_name": "tree_1", + "index_version": 0, + "highlight": { + "labels.label": ["Prochlorococcus marinus str. MIT 9313 (GCF_000011485.1)", "Prochlorococcus marinus str. NATL2A (GCF_000012465.1)", "Prochlorococcus marinus str. AS9601 (GCF_000015645.1)", "Prochlorococcus sp. MIT 0601 (GCF_000760175.1)", "Prochlorococcus sp. MIT 0603 (GCF_000760215.1)"] + } + }, { + "object_name": "Prochlorococcus_marinus_2", + "workspace_id": 35011, + "object_id": 4, + "object_version": 1, + "timestamp": 1531226116626, + "workspace_type_name": "Genome", + "creator": "kbasedata", + "data": { + "genome_id": "GCF_001180305.1", + "scientific_name": "Prochlorococcus marinus", + "publication_titles": ["Direct Submission"], + "publication_authors": ["Thompson R,Luke."], + "size": 1463721, + "num_contigs": 85, + "genome_type": null, + "gc_content": 0.30995, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "mean_contig_length": 17220.24705882353, + "external_origination_date": "08-Jan-2017 _ 11-Apr-2017", + "original_source_file_name": "GCF_001180305.1_CLC_assembled_contigs_genomic.gbff", + "cds_count": 1619, + "feature_count": 1619, + "mrna_count": 0, + "non_coding_feature_count": 85, + "assembly_ref": "15792:64183:2", + "source_id": "NZ_CVSX01000001", + "feature_counts": { + "CDS": 1619, + "gene": 1661, + "ncRNA": 2, + "non-protein_encoding_gene": 42, + "protein_encoding_gene": 1619, + "rRNA": 7, + "regulatory": 1, + "tRNA": 32, + "tmRNA": 1 + }, + "source": "RefSeq", + "warnings": ["SUSPECT: CDS AMO15_RS07665_CDS_1 has a length of 407 which is not consistent with the length of the translation included (134 amino acids)."], + "shared_users": ["eapearson"], + "creation_date": "2018-08-06T16:18:35+0000", + "is_public": true, + "copied": "15792/64184/2", + "tags": [], + "obj_type_version": "14.2", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::35011:4", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Prochlorococcus marinus"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus"] + } + }, { + "object_name": "Prochlorococcus_marinus", + "workspace_id": 35011, + "object_id": 2, + "object_version": 1, + "timestamp": 1485796717279, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_001180245.1", + "scientific_name": "Prochlorococcus marinus", + "publication_titles": ["Direct Submission"], + "publication_authors": ["Thompson R,Luke."], + "size": 1418374, + "num_contigs": 136, + "genome_type": null, + "gc_content": 0.31359570888919286, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "mean_contig_length": null, + "external_origination_date": "08-JAN-2017", + "original_source_file_name": "GCF_001180245.1_CLC_assembled_contigs_genomic.gbff", + "cds_count": 1494, + "feature_count": 1648, + "mrna_count": 1494, + "non_coding_feature_count": 0, + "assembly_ref": "15792:64156:1", + "source_id": "NZ_CVSV01000001 (135 more accessions)", + "feature_counts": null, + "source": "refseq", + "warnings": null, + "shared_users": ["eapearson"], + "creation_date": "2018-08-06T16:18:28+0000", + "is_public": true, + "copied": "15792/64157/1", + "tags": [], + "obj_type_version": "12.3", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::35011:2", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Prochlorococcus marinus"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus"] + } + }, { + "object_name": "Prochlorococcus_marinus_1", + "workspace_id": 35011, + "object_id": 3, + "object_version": 1, + "timestamp": 1485797107623, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_001180325.1", + "scientific_name": "Prochlorococcus marinus", + "publication_titles": ["Direct Submission"], + "publication_authors": ["Thompson R,Luke."], + "size": 1443989, + "num_contigs": 93, + "genome_type": null, + "gc_content": 0.3119442045611151, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "mean_contig_length": null, + "external_origination_date": "08-JAN-2017", + "original_source_file_name": "GCF_001180325.1_CLC_assembled_contigs_genomic.gbff", + "cds_count": 1550, + "feature_count": 1684, + "mrna_count": 1550, + "non_coding_feature_count": 0, + "assembly_ref": "15792:64192:1", + "source_id": "NZ_CVSZ01000001 (92 more accessions)", + "feature_counts": null, + "source": "refseq", + "warnings": null, + "shared_users": ["eapearson"], + "creation_date": "2018-08-06T16:18:34+0000", + "is_public": true, + "copied": "15792/64193/1", + "tags": [], + "obj_type_version": "12.3", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::35011:3", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Prochlorococcus marinus"], + "taxonomy": ["Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus"] + } + }], + "access_group_narrative_info": { + "47458": ["Sample Jobs for Testing", 1, 1586207131000, "kbaseuitest", "KBase UI Test User"], + "35011": ["Marine Cyanobacteria", 1, 1533582035000, "eapearson", "Erik A Pearson"], + "35753": ["Species Tree for Search Test", 1, 1537028823000, "eapearson", "Erik A Pearson"], + "53744": ["Search2 Sharing Test", 1, 1599751906000, "eapearson", "Erik A Pearson"], + "53745": ["search2 public narrative", 1, 1599752072000, "eapearson", "Erik A Pearson"], + "44697": ["Ontology landing page links and stuff", 1, 1601927359000, "eapearson", "Erik A Pearson"], + "56638": ["Test Narrative for FeatureSet Integration Test", 1, 1605765178000, "kbaseuitest", "KBase UI Test User"] + } + }], + "id": "12573638302160872" +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-04-request.json b/tests/integration/data/legacy/search_objects/case-04-request.json new file mode 100644 index 0000000..dd906cc --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-04-request.json @@ -0,0 +1,28 @@ +{ + "id": "xyz", + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "params": [{ + "access_filter": { + "with_private": 0, + "with_public": 1 + }, + "match_filter": { + "exclude_subobjects": 1, + "full_text_in_all": "coli", + "source_tags": ["refdata"], + "source_tags_blacklist": 0 + }, + "pagination": { + "count": 0, + "start": 0 + }, + "post_processing": { + "ids_only": 1, + "include_highlight": 1, + "skip_data": 1, + "skip_info": 1, + "skip_keys": 1 + } + }] +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-04-response.json b/tests/integration/data/legacy/search_objects/case-04-response.json new file mode 100644 index 0000000..3631eaf --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-04-response.json @@ -0,0 +1,18 @@ +{ + "id": "xyz", + "version": "1.1", + "result": [{ + "pagination": { + "start": 0, + "count": 0 + }, + "sorting_rules": [{ + "property": "timestamp", + "is_object_property": 0, + "ascending": 1 + }], + "objects": [], + "total": 15039, + "search_time": 1848 + }] +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-04.md b/tests/integration/data/legacy/search_objects/case-04.md new file mode 100644 index 0000000..fb4426c --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-04.md @@ -0,0 +1 @@ +# A search against public refdata data diff --git a/tests/integration/data/legacy/search_objects/case-06-request.json b/tests/integration/data/legacy/search_objects/case-06-request.json new file mode 100644 index 0000000..c7df723 --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-06-request.json @@ -0,0 +1,38 @@ +{ + "id": "xyz", + "method": "KBaseSearchEngine.search_objects", + "params": [{ + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "match_filter": { + "exclude_subobjects": 1, + "full_text_in_all": "coli", + "source_tags": ["refdata", "noindex"], + "source_tags_blacklist": 1 + }, + "pagination": { + "count": 20, + "start": 0 + }, + "post_processing": { + "add_narrative_info": 1, + "ids_only": 0, + "include_highlight": 1, + "skip_data": 0, + "skip_info": 0, + "skip_keys": 0 + }, + "sorting_rules": [{ + "ascending": 0, + "is_object_property": 0, + "property": "access_group_id" + }, { + "ascending": 1, + "is_object_property": 0, + "property": "type" + }] + }], + "version": "1.1" +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-06-response.json b/tests/integration/data/legacy/search_objects/case-06-response.json new file mode 100644 index 0000000..078396c --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-06-response.json @@ -0,0 +1,20 @@ +{ + "id": "xyz", + "version": "1.1", + "result": [{ + "pagination": {"start": 0, "count": 20}, + "sorting_rules": [{ + "property": "access_group_id", + "is_object_property": 0, + "ascending": 0 + }, { + "property": "type", + "is_object_property": 0, + "ascending": 1 + }], + "objects": [], + "total": 6509, + "search_time": 1918, + "access_group_narrative_info": {} + }] +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-06.md b/tests/integration/data/legacy/search_objects/case-06.md new file mode 100644 index 0000000..cd686dc --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-06.md @@ -0,0 +1 @@ +Search for "coli" in narrative workspaces which are either public or private. \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-09-request.json b/tests/integration/data/legacy/search_objects/case-09-request.json new file mode 100644 index 0000000..518b835 --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-09-request.json @@ -0,0 +1,38 @@ +{ + "params": [{ + "match_filter": { + "full_text_in_all": "coli", + "exclude_subobjects": 1, + "source_tags": ["refdata", "noindex"], + "source_tags_blacklist": 1 + }, + "pagination": { + "start": 0, + "count": 20 + }, + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1 + }, + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "sorting_rules": [{ + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, { + "is_object_property": 0, + "property": "type", + "ascending": 1 + }] + }], + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "id": "4564119057768642" +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-09-response.json b/tests/integration/data/legacy/search_objects/case-09-response.json new file mode 100644 index 0000000..59dcd41 --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-09-response.json @@ -0,0 +1,1180 @@ +{ + "object_version": "1.1", + "result": [{ + "pagination": { + "start": 0, + "count": 20 + }, + "sorting_rules": [{ + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, { + "is_object_property": 0, + "property": "workspace_type", + "ascending": 1 + }], + "total": 115, + "search_time": 784, + "objects": [{ + "id": "WS::52489:1", + "object_name": "Narrative.1594758723895", + "workspace_id": 52489, + "object_id": 1, + "object_version": 23, + "timestamp": 1609804811154, + "workspace_type_name": "Narrative", + "creator": "eapearson", + "data": { + "narrative_title": "Taxonomy Landing Page Testing", + "is_narratorial": false, + "data_objects": [], + "owner": "eapearson", + "modified_at": 1609804811000, + "cells": [{ + "desc": "Taxonomy Landing Page Testing\nA Narrative full of fun stuff about testing the taxonomy landing page\n", + "cell_type": "markdown" + }, { + "desc": "URL Format\nThe following endpoints are supported\ntaxonomy/taxon/NAMESPACE/ID/[TIMESTAMP]\nTaxon landing page for viewing taxon ID in namespace NAMESPACE with optional TIMESTAMP.\nSee examples below.\ntaxonomy/about\nplaceholder\ntaxonomy/help\nplaceholder\n", + "cell_type": "markdown" + }, { + "desc": "NCBI\nnamespace: ncbi_taxonomy\nExample for Escherichia coli:\nhttps://ci.kbase.us#taxonomy/taxon/ncbi_taxonomy/562\n", + "cell_type": "markdown" + }, { + "desc": "GTDB\nnamespace: gtdb\nExample for Escherichia coli:\nhttps://ci.kbase.us#taxonomy/taxon/gtdb/GBGCA900481195.1\n", + "cell_type": "markdown" + }, { + "desc": "RDP\nnamespace: rdb_taxonomy\nExample for Escherichia coli; PK3; X80731:\nhttps://ci.kbase.us#taxonomy/taxon/rdp_taxonomy/S000000986\n", + "cell_type": "markdown" + }, { + "desc": "SILVA\nnamespace: silva_taxonomy\nExample for Escherichia coli; PK3; X80731:\nhttps://ci.kbase.us#taxonomy/taxon/silva_taxonomy/HG710171.1.771\n", + "cell_type": "markdown" + }], + "total_cells": 6, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["scanon", "eapearson", "psdehal"], + "creation_date": "2020-07-14T20:32:04+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["NCBI\nnamespace: ncbi_taxonomy\nExample for Escherichia coli:\nhttps://ci.kbase.us#taxonomy/taxon/ncbi_taxonomy", "GTDB\nnamespace: gtdb\nExample for Escherichia coli:\nhttps://ci.kbase.us#taxonomy/taxon/gtdb/GBGCA900481195.1", "RDP\nnamespace: rdb_taxonomy\nExample for Escherichia coli; PK3; X80731:\nhttps://ci.kbase.us#taxonomy/taxon", "SILVA\nnamespace: silva_taxonomy\nExample for Escherichia coli; PK3; X80731:\nhttps://ci.kbase.us#taxonomy"] + } + }, { + "id": "WS::50631:3", + "object_name": "GCF_000005845.2_ASM584v2_genomic.gbff_genome", + "workspace_id": 50631, + "object_id": 3, + "object_version": 1, + "timestamp": 1573161872922, + "workspace_type_name": "Genome", + "creator": "bsadkhin", + "data": { + "genome_id": "GCF_000005845.2_ASM584v2_genomic.gbff_genome", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Direct Submission", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Workshop on Annotation of Escherichia coli K-12", "The complete genome sequence of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56"], + "publication_authors": ["Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Perna,N.T.", "Plunkett,G. III.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Blattner,F.R. and Plunkett,G. III.", "Rudd,K.E.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L."], + "size": 4641652, + "num_contigs": 1, + "genome_type": "draft isolate", + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "11-Oct-2018", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4357, + "feature_count": 4355, + "mrna_count": 0, + "non_coding_feature_count": 1187, + "assembly_ref": "44701:2:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4357, + "gene": 4566, + "misc_feature": 48, + "misc_recomb": 1, + "mobile_element": 49, + "ncRNA": 72, + "non_coding_features": 211, + "protein_encoding_gene": 4355, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 697, + "tRNA": 86 + }, + "source": "RefSeq", + "warnings": ["For prokaryotes, CDS array should generally be the same length as the Features array."], + "shared_users": ["bsadkhin"], + "creation_date": "2020-05-15T23:25:40+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Workshop on Annotation of Escherichia coli K-12", "The complete genome sequence of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "id": "WS::48753:1", + "object_name": "Narrative.1585596681448", + "workspace_id": 48753, + "object_id": 1, + "object_version": 7, + "timestamp": 1586539905964, + "workspace_type_name": "Narrative", + "creator": "eapearson", + "data": { + "narrative_title": "RE landing page sample links", + "is_narratorial": false, + "data_objects": [{ + "name": "Abiotrophia_defectiva_ATCC_49176", + "obj_type": "KBaseGenomes.Genome-17.0" + }], + "owner": "eapearson", + "modified_at": 1586539906000, + "cells": [{ + "desc": "Relation Engine Landing Pages\n", + "cell_type": "markdown" + }, { + "desc": "Ontology Landing Pages\nStart at the root, and explore!\nbiological process - https://ci.kbase.us/#review/go_ontology/GO:0008150\nbiological process - https://ci.kbase.us/#review/go_ontology/GO:0008150\n", + "cell_type": "markdown" + }, { + "desc": "Taxonomy Landing Pages\nThe ubiquitous E. coli...\nNCBI:\nhttps://ci.kbase.us/#review/ncbi_taxonomy/562\nGTDB:\nhttps://ci.kbase.us#review/gtdb/GBGCA900481195.1\n", + "cell_type": "markdown" + }, { + "desc": "Relation engine landing page urls\nA url to the relation engine landing page looks like:\n\nhttps://ci.kbase.us#review/namespace/id[/ts]\n\nwhere\n\nreview is the name of the relation engine landing page plugin endpoint; this is short for \"relation engine view\"\nnamespace is the collection to be accessed; e.g. ncbi_taxonomy, gtdb, go_ontology.\nid is the identifier for the specific item to be viewed; the form of the id depends on the collection,e.g. 562 is E. coli in the ncbi taxonomy, GO:0008150 is the id for \"biological process\" in the go ontology.\nts is an optional timestamp associated with the specific instance of the item; items within a collection are stamped with an time range which defines when the item is valid; this is used to manage collection updates over time. If omitted, the most recent version of the item is selected\n\n", + "cell_type": "markdown" + }, { + "desc": "Taxonomy API\nThe kb sdk module taxonomy_re_api serves as the taxonomy api.\nIn order to test calls against it, you'll first need to get the url:\njson\n{\n \"version\": \"1.1\",\n \"method\": \"ServiceWizard.get_service_status\",\n \"id\": \"9360468998301\",\n \"params\": [{\n \"module_name\": \"taxonomy_re_api\",\n \"version\": \"dev\"\n }]\n}\n\nwill give you something like:\njson\n{\n \"version\": \"1.1\",\n \"result\": [\n {\n \"git_commit_hash\": \"5a120b167a19b7b5fb87a11c7c35f704a225c156\",\n \"status\": \"active\",\n \"version\": \"3.4.0\",\n \"hash\": \"5a120b167a19b7b5fb87a11c7c35f704a225c156\",\n \"release_tags\": [\n \"dev\"\n ],\n \"url\": \"https://ci.kbase.us:443/dynserv/5a120b167a19b7b5fb87a11c7c35f704a225c156.taxonomy-re-api\",\n \"module_name\": \"taxonomy_re_api\",\n \"health\": \"healthy\",\n \"up\": 1\n }\n ],\n \"id\": \"9360468998301\"\n}\n\nArmed with the service url, it is handy to be able to find critters to explore:\nThe taxonomy_re_api.search_taxa method can be used with a free-form text search to find taxonomic entities:\njson\n{\n \"params\": [{\n \"ns\": \"gtdb\",\n \"search_text\": \"coli\"\n }],\n \"method\": \"taxonomy_re_api.search_taxa\",\n \"version\": \"1.1\",\n \"id\": \"3658889363010196\"\n}\n\nreturns \njson\n{\n \"result\": [\n {\n \"stats\": {\n \"executionTime\": 0.08076357841491699,\n \"filtered\": 0,\n \"httpRequests\": 0,\n \"peakMemoryUsage\": 1563242,\n \"scannedFull\": 0,\n \"scannedIndex\": 3147,\n \"writesExecuted\": 0,\n \"writesIgnored\": 0\n },\n \"total_count\": 3147,\n \"results\": [\n {\n \"_id\": \"gtdb_taxon/GB_GCA_900481195.1_r89\",\n \"_key\": \"GB_GCA_900481195.1_r89\",\n \"_rev\": \"_ZclM1Oa--e\",\n \"created\": 0,\n \"expired\": 9007199254740991,\n \"first_version\": \"r89\",\n \"id\": \"GB_GCA_900481195.1\",\n \"last_version\": \"r89\",\n \"name\": \"Escherichia coli\",\n \"rank\": \"genome\",\n \"release_created\": 0,\n \"release_expired\": 9007199254740991\n },\n {\n \"_id\": \"gtdb_taxon/RS_GCF_002548945.1_r89\",\n \"_key\": \"RS_GCF_002548945.1_r89\",\n \"_rev\": \"_ZclM1Oa--M\",\n \"created\": 0,\n \"expired\": 9007199254740991,\n \"first_version\": \"r89\",\n \"id\": \"RS_GCF_002548945.1\",\n \"last_version\": \"r89\",\n \"name\": \"Escherichia coli\",\n \"rank\": \"genome\",\n \"release_created\": 0,\n \"release_expired\": 9007199254740991\n },\n\nFrom here you can grab the id and make another call. The taxonomy api repo documents the api calls, which include:\n\nget_taxon\ngettaxonfromwsobj\ngetassociatedws_objects\nget_lineage\nget_children\nget_siblings\nsearch_species\nsearch_taxa\n\njson\n{\n \"params\": [{\n \"ns\": \"gtdb\",\n \"id\": \"GB_GCA_900481195.1\"\n }],\n \"method\": \"taxonomy_re_api.get_lineage\",\n \"version\": \"1.1\",\n \"id\": \"3658889363010196\"\n}\n\n", + "cell_type": "markdown" + }], + "total_cells": 5, + "static_narrative_saved": null, + "static_narrative_ref": null, + "shared_users": ["eapearson", "scanon"], + "creation_date": "2020-03-30T19:31:21+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["Taxonomy Landing Pages\nThe ubiquitous E. coli...", "identifier for the specific item to be viewed; the form of the id depends on the collection,e.g. 562 is E. coli", "search to find taxonomic entities:\njson\n{\n \"params\": [{\n \"ns\": \"gtdb\",\n \"search_text\": \"coli", "GB_GCA_900481195.1\",\n \"last_version\": \"r89\",\n \"name\": \"Escherichia coli", "RS_GCF_002548945.1\",\n \"last_version\": \"r89\",\n \"name\": \"Escherichia coli"] + } + }, { + "id": "WS::45320:4", + "object_name": "ecoli_2contigs_orig", + "workspace_id": 45320, + "object_id": 4, + "object_version": 4, + "timestamp": 1574124911386, + "workspace_type_name": "Genome", + "creator": "jayrbolton", + "data": { + "genome_id": "ecoli_2contigs_orig", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "The complete genome sequence of Escherichia coli K-12", "Direct Submission", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Workshop on Annotation of Escherichia coli K-12", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction"], + "publication_authors": ["Blattner,F.R. and Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Plunkett,G. III.", "Perna,N.T.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Rudd,K.E."], + "size": 4641772, + "num_contigs": 2, + "genome_type": null, + "gc_content": 0.5079, + "taxonomy": "Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 2320886.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_altered_genomic.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "45273:1:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4498, + "misc_feature": 11, + "mobile_element": 49, + "ncRNA": 65, + "non_coding_features": 773, + "non_coding_genes": 179, + "protein_encoding_gene": 4319, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 355, + "tRNA": 89, + "tmRNA": 2 + }, + "source": "Genbank", + "warnings": [], + "shared_users": ["jayrbolton"], + "creation_date": "2019-11-30T17:37:45+0000", + "is_public": true, + "copied": "45273/2/1", + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "kbase_id": "45320/4/1", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "The complete genome sequence of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Workshop on Annotation of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "small_genbank__test_batch_sub_dir_1", + "workspace_id": 45320, + "object_id": 5, + "object_version": 1, + "timestamp": 1549683737483, + "workspace_type_name": "Genome", + "creator": "tgu2", + "data": { + "genome_id": "small_genbank__test_batch_sub_dir_1", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "The complete genome sequence of Escherichia coli K-12", "Direct Submission", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Workshop on Annotation of Escherichia coli K-12", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction"], + "publication_authors": ["Blattner,F.R. and Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Plunkett,G. III.", "Perna,N.T.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Rudd,K.E."], + "size": 4641652, + "num_contigs": 1, + "genome_type": "draft isolate", + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "small_genbank.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "28327:272:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4498, + "misc_feature": 11, + "mobile_element": 49, + "ncRNA": 65, + "non_coding_features": 179, + "protein_encoding_gene": 4319, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 355, + "tRNA": 89, + "tmRNA": 2 + }, + "source": "Other", + "warnings": [], + "shared_users": ["jayrbolton"], + "creation_date": "2019-11-30T17:38:35+0000", + "is_public": true, + "copied": "28327/273/1", + "tags": ["narrative"], + "obj_type_version": "15.1", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::45320:5", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "The complete genome sequence of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Workshop on Annotation of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "GCF_000005845.2_ASM584v2_genomic.gbff_genome", + "workspace_id": 44783, + "object_id": 3, + "object_version": 1, + "timestamp": 1573159884255, + "workspace_type_name": "Genome", + "creator": "bsadkhin", + "data": { + "genome_id": "GCF_000005845.2_ASM584v2_genomic.gbff_genome", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["The complete genome sequence of Escherichia coli K-12", "Direct Submission", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005"], + "publication_authors": ["Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Perna,N.T.", "Blattner,F.R. and Plunkett,G. III.", "Rudd,K.E.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L."], + "size": 4641652, + "num_contigs": 1, + "genome_type": "draft isolate", + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "11-Oct-2018", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4357, + "feature_count": 4355, + "mrna_count": 0, + "non_coding_feature_count": 1187, + "assembly_ref": "44783:2:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4357, + "gene": 4566, + "misc_feature": 48, + "misc_recomb": 1, + "mobile_element": 49, + "ncRNA": 72, + "non_coding_features": 211, + "protein_encoding_gene": 4355, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 697, + "tRNA": 86 + }, + "source": "RefSeq", + "warnings": ["For prokaryotes, CDS array should generally be the same length as the Features array."], + "shared_users": ["bsadkhin", "briehltest", "testbriehl"], + "creation_date": "2019-11-07T20:51:27+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::44783:3", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["The complete genome sequence of Escherichia coli K-12", "Workshop on Annotation of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "Escherichia_coli", + "workspace_id": 44697, + "object_id": 3, + "object_version": 1, + "timestamp": 1527549837859, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_002163935.1", + "scientific_name": "Escherichia coli", + "publication_titles": ["Direct Submission"], + "publication_authors": ["Chalmers,G."], + "size": 4855538, + "num_contigs": 2, + "genome_type": null, + "gc_content": 0.50863, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia", + "mean_contig_length": 2427769.0, + "external_origination_date": "11-Jan-2018", + "original_source_file_name": "GCF_002163935.1_ASM216393v1_genomic.gbff", + "cds_count": 4917, + "feature_count": 4917, + "mrna_count": 0, + "non_coding_feature_count": 237, + "assembly_ref": "15792:120861:2", + "source_id": "NZ_CP019560", + "feature_counts": { + "CDS": 4917, + "gene": 5031, + "ncRNA": 6, + "non-protein_encoding_gene": 114, + "protein_encoding_gene": 4917, + "rRNA": 22, + "regulatory": 7, + "repeat_region": 2, + "tRNA": 85, + "tmRNA": 1 + }, + "source": "RefSeq", + "warnings": [], + "shared_users": ["psdehal", "scanon", "eapearson", "marcin"], + "creation_date": "2019-11-05T18:42:51+0000", + "is_public": true, + "copied": "15792/120862/2", + "tags": ["narrative"], + "obj_type_version": "14.1", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::44697:3", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Escherichia coli"] + } + }, { + "object_name": "Ecoli_go_term", + "workspace_id": 44640, + "object_id": 9, + "object_version": 1, + "timestamp": 1572897735244, + "workspace_type_name": "Genome", + "creator": "luj", + "data": { + "genome_id": "Ecoli_go_term", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Direct Submission", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction"], + "publication_authors": ["Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Perna,N.T.", "Plunkett,G. III.", "Rudd,K.E.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Blattner,F.R. and Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H."], + "size": 4641652, + "num_contigs": 1, + "genome_type": "finished isolate", + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "11-Oct-2018", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic_updated.gbff", + "cds_count": 4357, + "feature_count": 4355, + "mrna_count": 0, + "non_coding_feature_count": 1187, + "assembly_ref": "44640:8:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4357, + "gene": 4566, + "misc_feature": 48, + "misc_recomb": 1, + "mobile_element": 49, + "ncRNA": 72, + "non_coding_features": 211, + "protein_encoding_gene": 4355, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 697, + "tRNA": 86 + }, + "source": "RefSeq", + "warnings": ["For prokaryotes, CDS array should generally be the same length as the Features array."], + "shared_users": ["bsadkhin", "luj"], + "creation_date": "2019-11-04T20:02:18+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::44640:9", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "Ecoli_go_term2", + "workspace_id": 44640, + "object_id": 12, + "object_version": 1, + "timestamp": 1573105778886, + "workspace_type_name": "Genome", + "creator": "bsadkhin", + "data": { + "genome_id": "Ecoli_go_term2", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Direct Submission", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction"], + "publication_authors": ["Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Perna,N.T.", "Plunkett,G. III.", "Rudd,K.E.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Blattner,F.R. and Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H."], + "size": 4641652, + "num_contigs": 1, + "genome_type": "finished isolate", + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "11-Oct-2018", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4357, + "feature_count": 4355, + "mrna_count": 0, + "non_coding_feature_count": 1187, + "assembly_ref": "44640:11:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4357, + "gene": 4566, + "misc_feature": 48, + "misc_recomb": 1, + "mobile_element": 49, + "ncRNA": 72, + "non_coding_features": 211, + "protein_encoding_gene": 4355, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 697, + "tRNA": 86 + }, + "source": "RefSeq", + "warnings": ["For prokaryotes, CDS array should generally be the same length as the Features array."], + "shared_users": ["bsadkhin", "luj"], + "creation_date": "2019-11-07T05:49:42+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::44640:12", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "Ecoli_go", + "workspace_id": 44640, + "object_id": 3, + "object_version": 1, + "timestamp": 1572672656546, + "workspace_type_name": "Genome", + "creator": "bsadkhin", + "data": { + "genome_id": "Ecoli_go", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Direct Submission", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction"], + "publication_authors": ["Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Perna,N.T.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Plunkett,G. III.", "Rudd,K.E.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Blattner,F.R. and Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H."], + "size": 4641652, + "num_contigs": 1, + "genome_type": "finished isolate", + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "11-Oct-2018", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4357, + "feature_count": 4355, + "mrna_count": 0, + "non_coding_feature_count": 1187, + "assembly_ref": "44640:2:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4357, + "gene": 4566, + "misc_feature": 48, + "misc_recomb": 1, + "mobile_element": 49, + "ncRNA": 72, + "non_coding_features": 211, + "protein_encoding_gene": 4355, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 697, + "tRNA": 86 + }, + "source": "RefSeq", + "warnings": ["For prokaryotes, CDS array should generally be the same length as the Features array."], + "shared_users": ["bsadkhin", "luj"], + "creation_date": "2019-11-02T05:31:00+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::44640:3", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "Narrative.1569012281066", + "workspace_id": 44150, + "object_id": 1, + "object_version": 15, + "timestamp": 1582215852059, + "workspace_type_name": "Narrative", + "creator": "eapearson", + "data": { + "narrative_title": "Relation Engine (RE) Landing Page Demo", + "is_narratorial": false, + "data_objects": [{ + "name": "Escherichia_coli_2", + "obj_type": "KBaseGenomes.Genome-12.3" + }, { + "name": "Pseudomonas_aeruginosa", + "obj_type": "KBaseGenomes.Genome-12.3" + }], + "owner": "eapearson", + "modified_at": 1582215852000, + "cells": [{ + "desc": "This narrative provide links to ontology and taxonomy landing pages.\n", + "cell_type": "markdown" + }, { + "desc": "Ontology Landing Page Examples\nThere is no object landing page support for re ontology yet, so we present some links to interesting ontology term landing pages:\nLinked Features\nThis one should have linked features:\nhatching behavior\n\nNote: There were linked features appearing a few days ago, but not now.\n\nLarge Graph\nneed an example\nMany children\nneed an example\n", + "cell_type": "markdown" + }, { + "desc": "Taxonomy Examples\nObjects on the left don't have relations in place to their taxons, so the landing page shows an error message.\nThe examples from below use refdata objects because their relationships have been manually built.\nRelationships are being built in real time, although given the mis-assignments at present it is a bit difficult to trace. The E. coli in the data panel to is actually linked from the \"E. coli\" taxon landing page you can get to from the genome landing page cited below. You can get back to this narrative from that link as well!\n", + "cell_type": "markdown" + }, { + "desc": "Genome Landing Page\n\nE. coli refdata\nThis link leads to a Genome landing page which shows the lineage widget rewritten to use the Relation Engine to:\n- get the taxon ref given the object ref\n- get the taxon for the taxon ref\n- get the lineage for the taxon ref\nThe lineage widget shows the existing lineage widget, which derives its data from the genome object itself (scientific name and lineage).\nClicking on the scientific name or any class in the lineage will open the associated taxonomy landing page.\nThe linked objects tab has 3 items.\n", + "cell_type": "markdown" + }, { + "desc": "Genome Landing Page\n\nPseudomonas aeruginosa refdata\nThis link leads to a Genome landing page which shows the lineage widget rewritten to use the Relation Engine to:\n- get the taxon ref given the object ref\n- get the taxon for the taxon ref\n- get the lineage for the taxon ref\nThe lineage widget shows the existing lineage widget, which derives its data from the genome object itself (scientific name and lineage).\nClicking on the scientific name or any class in the lineage will open the associated taxonomy landing page.\nThe linked objects tab has 442 linked objects shows the scale better.\n", + "cell_type": "markdown" + }], + "total_cells": 5, + "static_narrative_saved": "1573231380073", + "static_narrative_ref": "/44150/14", + "shared_users": ["scanon", "eapearson"], + "creation_date": "2019-09-20T20:44:41+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "index_runner_ver": "1.9.17" + }, + "id": "WS::44150:1", + "index_name": "narrative_2", + "index_version": 0, + "highlight": { + "cells.desc": ["The E. coli in the data panel to is actually linked from the \"E. coli\" taxon landing page you can get", "Genome Landing Page\n\nE. coli refdata\nThis link leads to a Genome landing page which shows the lineage"] + } + }, { + "object_name": "GCF_000005845.2", + "workspace_id": 42557, + "object_id": 2, + "object_version": 1, + "timestamp": 1484680259109, + "workspace_type_name": "Genome", + "creator": "qzhang", + "data": { + "genome_id": "GCF_000005845.2", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Workshop on Annotation of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Direct Submission", "The complete genome sequence of Escherichia coli K-12"], + "publication_authors": ["", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Blattner,F.R. and Plunkett,G. III.", "Plunkett,G. III.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Rudd,K.E.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Perna,N.T."], + "size": 4641652, + "num_contigs": 1, + "genome_type": null, + "gc_content": 0.5079070985933456, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": null, + "external_origination_date": "08-AUG-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4140, + "feature_count": 4498, + "mrna_count": 4140, + "non_coding_feature_count": 0, + "assembly_ref": "15792:27697:1", + "source_id": "NC_000913", + "feature_counts": null, + "source": "refseq", + "warnings": null, + "shared_users": ["filipeliu"], + "creation_date": "2019-04-29T19:42:33+0000", + "is_public": true, + "copied": "15792/27699/1", + "tags": ["narrative"], + "obj_type_version": "12.3", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::42557:2", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "Workshop on Annotation of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "The complete genome sequence of Escherichia coli K-12"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "GCF_000005845.2.RAST", + "workspace_id": 42557, + "object_id": 3, + "object_version": 1, + "timestamp": 1556576383612, + "workspace_type_name": "Genome", + "creator": "filipeliu", + "data": { + "genome_id": "GCF_000005845.2.RAST", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Workshop on Annotation of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Direct Submission", "The complete genome sequence of Escherichia coli K-12"], + "publication_authors": ["", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Blattner,F.R. and Plunkett,G. III.", "Plunkett,G. III.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Rudd,K.E.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Perna,N.T."], + "size": 4641652, + "num_contigs": 1, + "genome_type": null, + "gc_content": 0.508, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": null, + "external_origination_date": "08-AUG-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4140, + "feature_count": 4140, + "mrna_count": 4140, + "non_coding_feature_count": 358, + "assembly_ref": "15792:27697:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4140, + "mRNA": 4140, + "non_coding_features": 358, + "protein_encoding_gene": 4140 + }, + "source": "RefSeq", + "warnings": ["Genome molecule_type Unknown is not expected for domain Bacteria."], + "shared_users": ["filipeliu"], + "creation_date": "2019-04-29T22:19:46+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "15.1", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::42557:3", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "Workshop on Annotation of Escherichia coli K-12", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "The complete genome sequence of Escherichia coli K-12"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "GCF_000005845.2_ASM584v2_genomic.gbff_genome", + "workspace_id": 39819, + "object_id": 2, + "object_version": 6, + "timestamp": 1548276078101, + "workspace_type_name": "Genome", + "creator": "jjeffryes", + "data": { + "genome_id": "GCF_000005845.2_ASM584v2_genomic.gbff_genome", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Direct Submission", "Workshop on Annotation of Escherichia coli K-12", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "The complete genome sequence of Escherichia coli K-12"], + "publication_authors": ["Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Perna,N.T.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Rudd,K.E.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Blattner,F.R. and Plunkett,G. III.", "Plunkett,G. III."], + "size": 4641652, + "num_contigs": 1, + "genome_type": "draft isolate", + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "39819:5:3", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4498, + "misc_feature": 11, + "mobile_element": 49, + "ncRNA": 65, + "non_coding_features": 179, + "protein_encoding_gene": 4319, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 355, + "tRNA": 89, + "tmRNA": 2 + }, + "source": "RefSeq", + "warnings": [], + "shared_users": ["jjeffryes"], + "creation_date": "2019-01-23T01:24:11+0000", + "is_public": true, + "copied": null, + "tags": [], + "obj_type_version": "15.1", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::39819:2", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "The complete genome sequence of Escherichia coli K-12"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "GCF_000005845.2_ASM584v2_genomic.gbff_genome", + "workspace_id": 39218, + "object_id": 3, + "object_version": 1, + "timestamp": 1547241468826, + "workspace_type_name": "Genome", + "creator": "jjeffryes", + "data": { + "genome_id": "GCF_000005845.2_ASM584v2_genomic.gbff_genome", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Direct Submission", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction"], + "publication_authors": ["Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Perna,N.T.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Rudd,K.E.", "Plunkett,G. III.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Blattner,F.R. and Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H."], + "size": 4641652, + "num_contigs": 1, + "genome_type": null, + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "39218:2:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4498, + "misc_feature": 11, + "mobile_element": 49, + "ncRNA": 65, + "non_coding_features": 179, + "protein_encoding_gene": 4319, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 355, + "tRNA": 89, + "tmRNA": 2 + }, + "source": "RefSeq", + "warnings": [], + "shared_users": ["jjeffryes", "miriamtester"], + "creation_date": "2019-01-11T21:17:48+0000", + "is_public": true, + "copied": null, + "tags": [], + "obj_type_version": "15.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::39218:3", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "object_name": "Prokka_genome", + "workspace_id": 37649, + "object_id": 3, + "object_version": 1, + "timestamp": 1543271602030, + "workspace_type_name": "Genome", + "creator": "jjeffryes", + "data": { + "genome_id": "GCF_000005845.2_ASM584v2_genomic", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "Direct Submission", "Workshop on Annotation of Escherichia coli K-12", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005"], + "publication_authors": ["Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Plunkett,G. III.", "Rudd,K.E.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Perna,N.T.", "Blattner,F.R. and Plunkett,G. III.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L."], + "size": 4641652, + "num_contigs": 1, + "genome_type": null, + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "30957:2:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4498, + "misc_feature": 11, + "mobile_element": 49, + "ncRNA": 65, + "non-protein_encoding_gene": 179, + "protein_encoding_gene": 4319, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 355, + "tRNA": 89, + "tmRNA": 2 + }, + "source": "RefSeq", + "warnings": [], + "shared_users": ["jjeffryes"], + "creation_date": "2018-11-26T22:33:22+0000", + "is_public": true, + "copied": null, + "tags": [], + "obj_type_version": "15.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "id": "WS::37649:3", + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "id": "WS::37649:2", + "object_name": "GCF_000005845.2_ASM584v2_genomic", + "workspace_id": 37649, + "object_id": 2, + "object_version": 1, + "timestamp": 1522766145529, + "workspace_type_name": "Genome", + "creator": "jjeffryes", + "data": { + "genome_id": "GCF_000005845.2_ASM584v2_genomic", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "Direct Submission", "Workshop on Annotation of Escherichia coli K-12", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005"], + "publication_authors": ["Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Plunkett,G. III.", "Rudd,K.E.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Perna,N.T.", "Blattner,F.R. and Plunkett,G. III.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L."], + "size": 4641652, + "num_contigs": 1, + "genome_type": null, + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "30957:2:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4498, + "misc_feature": 11, + "mobile_element": 49, + "ncRNA": 65, + "non-protein_encoding_gene": 179, + "protein_encoding_gene": 4319, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 355, + "tRNA": 89, + "tmRNA": 2 + }, + "source": "RefSeq", + "warnings": [], + "shared_users": ["jjeffryes"], + "creation_date": "2018-11-26T22:22:24+0000", + "is_public": true, + "copied": "30957/3/1", + "tags": [], + "obj_type_version": "14.1", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "id": "WS::37649:5", + "object_name": "Rast_genome", + "workspace_id": 37649, + "object_id": 5, + "object_version": 1, + "timestamp": 1543294049109, + "workspace_type_name": "Genome", + "creator": "jjeffryes", + "data": { + "genome_id": "Rast_genome", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "Direct Submission", "Workshop on Annotation of Escherichia coli K-12", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005"], + "publication_authors": ["Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Plunkett,G. III.", "Rudd,K.E.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H.", "Perna,N.T.", "Blattner,F.R. and Plunkett,G. III.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L."], + "size": 4641652, + "num_contigs": 1, + "genome_type": null, + "gc_content": 0.508, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "08-Aug-2016", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic.gbff", + "cds_count": 4319, + "feature_count": 4319, + "mrna_count": 0, + "non_coding_feature_count": 773, + "assembly_ref": "30957:2:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4319, + "gene": 4319, + "mRNA": 0, + "non-protein_encoding_gene": 773, + "protein_encoding_gene": 4319 + }, + "source": "RefSeq", + "warnings": [], + "shared_users": ["jjeffryes"], + "creation_date": "2018-11-27T04:47:29+0000", + "is_public": true, + "copied": null, + "tags": [], + "obj_type_version": "15.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "id": "WS::33192:21", + "object_name": "Ecoli_go_term", + "workspace_id": 33192, + "object_id": 21, + "object_version": 1, + "timestamp": 1572897735244, + "workspace_type_name": "Genome", + "creator": "luj", + "data": { + "genome_id": "Ecoli_go_term", + "scientific_name": "Escherichia coli str. K-12 substr. MG1655", + "publication_titles": ["A more accurate sequence comparison between genomes of Escherichia coli K12 W3110 and MG1655 strains", "ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Direct Submission", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12", "A manual approach to accurate translation start site annotation: an E. coli K-12 case study", "Escherichia coli K-12 MG1655 yqiK-rfaE intergenic region, genomic sequence correction"], + "publication_authors": ["Riley,M., Abe,T., Arnaud,M.B., Berlyn,M.K., Blattner,F.R., Chaudhuri,R.R., Glasner,J.D., Horiuchi,T., Keseler,I.M., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Rudd,K.E., Serres,M.H., Thomas,G.H., Thomson,N.R., Wishart,D. and Wanner,B.L.", "Hayashi,K., Morooka,N., Yamamoto,Y., Fujita,K., Isono,K., Choi,S., Ohtsubo,E., Baba,T., Wanner,B.L., Mori,H. and Horiuchi,T.", "Perna,N.T.", "Plunkett,G. III.", "Rudd,K.E.", "Blattner,F.R., Plunkett,G. III, Bloch,C.A., Perna,N.T., Burland,V., Riley,M., Collado-Vides,J., Glasner,J.D., Rode,C.K., Mayhew,G.F., Gregor,J., Davis,N.W., Kirkpatrick,H.A., Goeden,M.A., Rose,D.J., Mau,B. and Shao,Y.", "Hayashi,K., Morooka,N., Mori,H. and Horiuchi,T.", "Blattner,F.R. and Plunkett,G. III.", "Arnaud,M., Berlyn,M.K.B., Blattner,F.R., Galperin,M.Y., Glasner,J.D., Horiuchi,T., Kosuge,T., Mori,H., Perna,N.T., Plunkett,G. III, Riley,M., Rudd,K.E., Serres,M.H., Thomas,G.H. and Wanner,B.L.", "Glasner,J.D., Perna,N.T., Plunkett,G. III, Anderson,B.D., Bockhorst,J., Hu,J.C., Riley,M., Rudd,K.E. and Serres,M.H."], + "size": 4641652, + "num_contigs": 1, + "genome_type": "finished isolate", + "gc_content": 0.50791, + "taxonomy": "cellular organisms; Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli; Escherichia coli K-12", + "mean_contig_length": 4641652.0, + "external_origination_date": "11-Oct-2018", + "original_source_file_name": "GCF_000005845.2_ASM584v2_genomic_updated.gbff", + "cds_count": 4357, + "feature_count": 4355, + "mrna_count": 0, + "non_coding_feature_count": 1187, + "assembly_ref": "44640:8:1", + "source_id": "NC_000913", + "feature_counts": { + "CDS": 4357, + "gene": 4566, + "misc_feature": 48, + "misc_recomb": 1, + "mobile_element": 49, + "ncRNA": 72, + "non_coding_features": 211, + "protein_encoding_gene": 4355, + "rRNA": 22, + "rep_origin": 1, + "repeat_region": 697, + "tRNA": 86 + }, + "source": "RefSeq", + "warnings": ["For prokaryotes, CDS array should generally be the same length as the Features array."], + "shared_users": ["jayrbolton"], + "creation_date": "2020-01-08T20:31:06+0000", + "is_public": true, + "copied": "44640/9/1", + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "publication_titles": ["ASAP: Escherichia coli K-12 strain MG1655 version m56", "Workshop on Annotation of Escherichia coli K-12", "Escherichia coli K-12: a cooperatively developed annotation snapshot--2005", "Highly accurate genome sequences of Escherichia coli K-12 strains MG1655 and W3110", "The complete genome sequence of Escherichia coli K-12"], + "scientific_name": ["Escherichia coli str. K-12 substr. MG1655"], + "taxonomy": ["Proteobacteria; Gammaproteobacteria; Enterobacterales; Enterobacteriaceae; Escherichia; Escherichia coli", "; Escherichia coli K-12"] + } + }, { + "id": "WS::33192:34", + "object_name": "GCF_004368345.1", + "workspace_id": 33192, + "object_id": 34, + "object_version": 2, + "timestamp": 1578704620951, + "workspace_type_name": "Genome", + "creator": "jayrbolton", + "data": { + "genome_id": "GCF_004368345.1", + "scientific_name": "Campylobacter coli", + "publication_titles": ["Direct Submission", "Genetic characteristics of Campylobacter in China"], + "publication_authors": ["Zhang,M.", "Zhang,M. and Liang,H."], + "size": 1666978, + "num_contigs": 19, + "genome_type": null, + "gc_content": 0.31331, + "taxonomy": "Bacteria; Proteobacteria; delta/epsilon subdivisions; Epsilonproteobacteria; Campylobacterales; Campylobacteraceae; Campylobacter", + "mean_contig_length": 87735.68421052632, + "external_origination_date": "27-Mar-2019", + "original_source_file_name": "GCF_004368345.1_ASM436834v1_genomic.gbff", + "cds_count": 1687, + "feature_count": 1687, + "mrna_count": 0, + "non_coding_feature_count": 92, + "assembly_ref": "15792:232458:1", + "source_id": "NZ_JXUB01000001", + "feature_counts": { + "CDS": 1687, + "gene": 1732, + "ncRNA": 2, + "non_coding_features": 92, + "non_coding_genes": 45, + "protein_encoding_gene": 1687, + "rRNA": 3, + "regulatory": 1, + "repeat_region": 1, + "tRNA": 39, + "tmRNA": 1 + }, + "source": "RefSeq", + "warnings": ["SUSPECT: CDS TN92_RS08435_CDS_1 has a length of 164 which is not consistent with the length of the translation included (53 amino acids).", "SUSPECT: CDS TN92_RS07230_CDS_1 has a length of 587 which is not consistent with the length of the translation included (194 amino acids).", "SUSPECT: CDS TN92_RS00010_CDS_1 has a length of 98 which is not consistent with the length of the translation included (31 amino acids)."], + "shared_users": ["jayrbolton"], + "creation_date": "2020-01-11T00:52:48+0000", + "is_public": true, + "copied": null, + "tags": ["narrative"], + "obj_type_version": "17.0", + "obj_type_module": "KBaseGenomes", + "index_runner_ver": "1.9.17" + }, + "index_name": "genome_2", + "index_version": 0, + "highlight": { + "scientific_name": ["Campylobacter coli"] + } + }], + "access_group_narrative_info": { + "44640": ["Ecoli_go", 1, 1573106371000, "luj", "Zhenyuan Lu"], + "50631": ["Public Log", 1, 1589833955000, "bsadkhin", "Boris Sadkhin"], + "45320": ["Refdata import", 1, 1586801969000, "jayrbolton", "Jay Bolton"], + "52489": ["Taxonomy Landing Page Testing", 1, 1609804811000, "eapearson", "Erik A Pearson"], + "33192": ["Test fiesta", 1, 1599598844000, "jayrbolton", "Jay Bolton"], + "39819": ["Subdata", 1, 1551734679000, "jjeffryes", "James Jeffryes"], + "44783": ["Waiting IN queue but Error?", 1, 1573160060000, "bsadkhin", "Boris Sadkhin"], + "48753": ["RE landing page sample links", 1, 1586539906000, "eapearson", "Erik A Pearson"], + "39218": ["GBFF importer", 1, 1547241525000, "miriamtester", "Miriam Tester Land"], + "37649": ["RE_test", 1, 1543330685000, "jjeffryes", "James Jeffryes"], + "44150": ["Relation Engine (RE) Landing Page Demo", 1, 1582215852000, "eapearson", "Erik A Pearson"], + "44697": ["Ontology landing page links and stuff", 1, 1601927359000, "eapearson", "Erik A Pearson"], + "42557": ["KBase Models", 1, 1580938108000, "filipeliu", "Filipe Alexandre Wang Liu"] + } + }], + "id": "4564119057768642" +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-09.md b/tests/integration/data/legacy/search_objects/case-09.md new file mode 100644 index 0000000..272b455 --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-09.md @@ -0,0 +1 @@ +Basic ecoli search example with all metadata \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-10.json b/tests/integration/data/legacy/search_objects/case-10.json new file mode 100644 index 0000000..181cf48 --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-10.json @@ -0,0 +1,40 @@ +{ + "rpc": { + "id": "xyz", + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "params": [{ + "access_filter": { + "with_private": 0, + "with_public": 1 + }, + "match_filter": { + "exclude_subobjects": 1, + "full_text_in_all": "", + "source_tags": ["refdata"], + "source_tags_blacklist": 0 + }, + "pagination": { + "count": 0, + "start": 0 + }, + "post_processing": { + "ids_only": 1, + "include_highlight": 1, + "skip_data": 1, + "skip_info": 1, + "skip_keys": 1 + } + }] + }, + "cases": [{ + "full_text_in_all": "Prochlorococcus marinus", + "total": 45 + }, { + "full_text_in_all": "Prochlorococcus", + "total": 74 + }, { + "full_text_in_all": "marinus", + "total": 92 + }] +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_objects/case-10.md b/tests/integration/data/legacy/search_objects/case-10.md new file mode 100644 index 0000000..cb5f78f --- /dev/null +++ b/tests/integration/data/legacy/search_objects/case-10.md @@ -0,0 +1,3 @@ +# Adding additional terms results in narrowing the search. + +The test data provides an rpc call for the legacy search_objects method and three cases of searches which should demonstrate that searches with additional terms should narrow search results if they can. diff --git a/tests/integration/data/legacy/search_types/case-05-request.json b/tests/integration/data/legacy/search_types/case-05-request.json new file mode 100644 index 0000000..3810b2b --- /dev/null +++ b/tests/integration/data/legacy/search_types/case-05-request.json @@ -0,0 +1,17 @@ +{ + "version": "1.1", + "method": "KBaseSearchEngine.search_types", + "id": "6959719268936883", + "params": [{ + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "match_filter": { + "exclude_subobjects": 1, + "full_text_in_all": "coli", + "source_tags": ["refdata", "noindex"], + "source_tags_blacklist": 1 + } + }] +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_types/case-05-response.json b/tests/integration/data/legacy/search_types/case-05-response.json new file mode 100644 index 0000000..df69a68 --- /dev/null +++ b/tests/integration/data/legacy/search_types/case-05-response.json @@ -0,0 +1,14 @@ +{ + "id": "6959719268936883", + "version": "1.1", + "result": [{ + "type_to_count": { + "Narrative": 13, + "Taxon": 3318, + "Tree": 3, + "Genome": 3174, + "Workspace": 1 + }, + "search_time": 1506 + }] +} \ No newline at end of file diff --git a/tests/integration/data/legacy/search_types/case-05.md b/tests/integration/data/legacy/search_types/case-05.md new file mode 100644 index 0000000..6c37e73 --- /dev/null +++ b/tests/integration/data/legacy/search_types/case-05.md @@ -0,0 +1 @@ +# Method call to get type counts \ No newline at end of file diff --git a/tests/integration/docker/docker-compose.yaml b/tests/integration/docker/docker-compose.yaml new file mode 100644 index 0000000..05e4be6 --- /dev/null +++ b/tests/integration/docker/docker-compose.yaml @@ -0,0 +1,64 @@ +version: "3.4" + +# This docker-compose is for developer workflows, not for running in production. + +# This + +networks: + kbase-dev: + external: + name: kbase-dev + +services: + searchapi2: + image: searchapi2:${TAG:-dev} + depends_on: + - estunnel + container_name: searchapi2 + # network_mode: host + build: + context: ../../.. + volumes: + - ${PWD}/../../..:/app + networks: + - kbase-dev + ports: + - "127.0.0.1:5000:5000" + dns: + - "8.8.8.8" + environment: + - DEVELOPMENT=1 + - PYTHONUNBUFFERED=true + - ELASTICSEARCH_URL=http://estunnel:9500 + - INDEX_PREFIX=search2 + - INDEX_PREFIX_DELIMITER=. + - INDEX_SUFFIX_DELIMITER=_ + - USER_PROFILE_URL=https://ci.kbase.us/services/user_profile/rpc + - WORKERS=2 + - LOGLEVEL=WARNING + estunnel: + # To connect to kbase-ui + networks: + - kbase-dev + # So it can be poked at from the host + ports: + - "127.0.0.1:9500:9500" + build: + context: ../estunnel + environment: + # IP for elasticsearch instance within the KBase network the SSHHOST resides within + - IP + + # The KBase SSHHOST to which the tunnel should connect. + - SSHHOST + + # invented to have a form similar to SSHPASS + # this should be the host + + - SSHUSER + # official env var for SSHPASS + # this should be the password for the KBase dev account used above + + - SSHPASS + # image: kbase/estunnel + # Must have an ssh tunnel to CI Elasticsearch via localhost:9500 diff --git a/tests/integration/estunnel/Dockerfile b/tests/integration/estunnel/Dockerfile new file mode 100644 index 0000000..23d3191 --- /dev/null +++ b/tests/integration/estunnel/Dockerfile @@ -0,0 +1,19 @@ +FROM alpine:3.13 + +# This image is dedicated to running an SSH tunnel from +# the docker network to an ES server node. + +# We use sshpass to enable ssh to take a password from an environment variable +RUN apk upgrade --update-cache --available && \ + apk add --update --no-cache openssh sshpass && \ + mkdir -p /kb + +COPY ./tunnel.sh /kb + +EXPOSE 9500 + +WORKDIR /kb + +ENTRYPOINT [ "/bin/sh" ] + +CMD [ "/kb/tunnel.sh" ] \ No newline at end of file diff --git a/tests/integration/estunnel/README.md b/tests/integration/estunnel/README.md new file mode 100644 index 0000000..29df70a --- /dev/null +++ b/tests/integration/estunnel/README.md @@ -0,0 +1,26 @@ +# ssh tunnel to elasticsearch host in a container + +The searchapi2, when running locally in a container, it needs an ssh tunnel into the KBase network pointing to an ES cluster host. + +The default way to do this is with an ssh tunnel running on the host, which is accessed by the searchapi2 service running in a container via Docker. + +However, this does not work for macOS, nor with integration with a local kbase-ui instance when using working on searchapi2 in tandem with an kbase-ui hosted tool. + +One option is to use `host.docker.internal`, instead of `localhost` in the searchapi2 container. However, this does not work when using a custom network which is required for working with kbase-ui. + +The Docker image defined in this directory supports running an ssh tunnel for elastsearch within the container itself. + +Besides solving the issues described above, this tool also takes care of dependencies for us. + +## Usage + +```bash +IP="" SSHHOST="" SSHUSER="" SSHPASS="" docker run --rm -p 9500:9500 -e IP -e SSHUSER -e SSHPASS --name estunnel --network kbase-dev dev_estunnel +``` + +where the following environment variables must be set: + +- `ES_CLUSTER_HOST_IP` is the IP address of one of the hosts in a CI eslasticsearch cluster +- `SSHHOST` is the host name to connect to for ssh tunneling. +- `SSHHUSER` is the username for your KBase developer account. Only internal KBase developers may use this tool +- `SSHPASS` is the plain-text password for the account provided above diff --git a/tests/integration/estunnel/tunnel.sh b/tests/integration/estunnel/tunnel.sh new file mode 100644 index 0000000..d4467c7 --- /dev/null +++ b/tests/integration/estunnel/tunnel.sh @@ -0,0 +1,19 @@ +# Shell script to open an ssh tunnel to an elastic search +# cluster node hosted at Berkeley KBase. + +# Parameterized by: +# IP - the ip address of the ES node machine +# SSHUSER - the dev's username for ssh +# SSHPASS - the dev's password + +sshpass -e ssh \ + -4 \ + -N \ + -o PubkeyAuthentication=no \ + -o StrictHostKeyChecking=no \ + -o ServerAliveInterval=30 \ + -o ServerAliveCountMax=3 \ + -o UserKnownHostsFile=/dev/null \ + -L "0.0.0.0:9500:${IP}:9500" \ + "${SSHUSER}@${SSHHOST}" + \ No newline at end of file diff --git a/tests/integration/test_integration_legacy.py b/tests/integration/test_integration_legacy.py deleted file mode 100644 index f16ad14..0000000 --- a/tests/integration/test_integration_legacy.py +++ /dev/null @@ -1,125 +0,0 @@ -import json -import os -import requests - -from tests.helpers.integration_setup import setup -from src.utils.wait_for_service import wait_for_service -from tests.integration.data import ( - search_request1, - search_response1, - search_request2, - search_response2, - search_request3, - search_response3, - search_request4, - search_response4, - search_request5, - search_response5, -) - - -APP_URL = os.environ.get("APP_URL", 'http://localhost:5000') - -setup() -# This implicitly tests the "/" path -wait_for_service(APP_URL, "search2") - - -def test_search_example1(): - url = APP_URL + '/legacy' - resp = requests.post( - url=url, - data=json.dumps(search_request1), - ) - data = resp.json() - assert data['jsonrpc'] == search_response1['jsonrpc'] - assert data['id'] == search_response1['id'] - assert len(data['result']) == 1 - res = data['result'][0] - expected_res = search_response1['result'][0] - assert res['search_time'] > 0 - assert res['total'] > 0 - assert res['sorting_rules'] == expected_res['sorting_rules'] - assert res['objects'] == expected_res['objects'] - - -def test_search_example2(): - url = APP_URL + '/legacy' - resp = requests.post( - url=url, - data=json.dumps(search_request2), - ) - data = resp.json() - assert data['jsonrpc'] == search_response2['jsonrpc'] - assert data['id'] == search_response2['id'] - assert len(data['result']) == 1 - res = data['result'][0] - # expected_res = search_response2['result'][0] - assert res['search_time'] > 0 - assert res['type_to_count'] # TODO match more closely when things are more indexed - - -def test_search_example3(): - url = APP_URL + '/legacy' - resp = requests.post( - url=url, - data=json.dumps(search_request3), - ) - data = resp.json() - print('data', data) - assert data['jsonrpc'] == search_response3['jsonrpc'] - assert data['id'] == search_response3['id'] - assert len(data['result']) == 1 - res = data['result'][0] - expected_res = search_response3['result'][0] - assert set(res.keys()) == set(expected_res.keys()) - assert res['pagination'] == expected_res['pagination'] - assert res['sorting_rules'] == expected_res['sorting_rules'] - assert res['total'] > 0 - assert res['search_time'] > 0 - assert len(res['objects']) > 0 - # TODO assert on access_group_narrative_info - # assert ['access_group_narrative_info'] - - -def test_search_example4(): - """Genome features count with no data""" - url = APP_URL + '/legacy' - resp = requests.post( - url=url, - data=json.dumps(search_request4), - ) - data = resp.json() - assert data['jsonrpc'] == search_response4['jsonrpc'] - assert data['id'] == search_response4['id'] - assert len(data['result']) == 1 - expected_res = search_response4['result'][0] - res = data['result'][0] - assert set(res.keys()) == set(expected_res.keys()) - assert res['pagination'] == expected_res['pagination'] - assert res['sorting_rules'] == expected_res['sorting_rules'] - assert res['objects'] == expected_res['objects'] - assert res['total'] > 0 - assert res['search_time'] > 0 - - -def test_search_example5(): - """Genome features search with results""" - url = APP_URL + '/legacy' - resp = requests.post( - url=url, - data=json.dumps(search_request5), - ) - data = resp.json() - assert data['jsonrpc'] == search_response5['jsonrpc'] - assert data['id'] == search_response5['id'] - assert len(data['result']) == 1 - expected_res = search_response5['result'][0] - res = data['result'][0] - assert set(res.keys()) == set(expected_res.keys()) - assert res['pagination'] == expected_res['pagination'] - assert res['sorting_rules'] == expected_res['sorting_rules'] - assert len(res['objects']) > 0 - assert len(res['objects_info']) > 0 - assert res['total'] > 0 - assert res['search_time'] > 0 diff --git a/tests/integration/test_integration_search_workspaces.py b/tests/integration/test_integration_search_workspaces.py index 22f59b0..118f707 100644 --- a/tests/integration/test_integration_search_workspaces.py +++ b/tests/integration/test_integration_search_workspaces.py @@ -1,19 +1,8 @@ import json -import os import requests -from tests.helpers.integration_setup import setup -from src.utils.wait_for_service import wait_for_service - -APP_URL = os.environ.get("APP_URL", 'http://localhost:5000') -setup() - -# This implicitly tests the "/" path -wait_for_service(APP_URL, "search2") - - -def test_narrative_example(): +def test_narrative_example(service): params = { "access": { "only_public": True, @@ -31,7 +20,7 @@ def test_narrative_example(): "offset": 0 } } - url = APP_URL + '/rpc' + url = service['app_url'] + '/rpc' resp = requests.post( url=url, data=json.dumps({ @@ -42,32 +31,33 @@ def test_narrative_example(): }) ) data = resp.json() + assert 'result' in data assert data['result']['count'] > 0 -def test_dashboard_example(): +def test_dashboard_example(service): params = { - "id": 1597353298754, - "jsonrpc": "2.0", - "method": "search_workspace", - "params": { - "filters": { - "fields": [ - {"field": "is_temporary", "term": False}, - {"field": "creator", "term": "jayrbolton"} - ], - "operator": "AND" - }, - "paging": {"length": 20, "offset": 0}, - "sorts": [["timestamp", "desc"], ["_score", "desc"]], - "track_total_hits": False, - "types": ["KBaseNarrative.Narrative"] - } + "id": 1597353298754, + "jsonrpc": "2.0", + "method": "search_workspace", + "params": { + "filters": { + "fields": [ + {"field": "creator", "term": "kbaseuitest"} + ], + "operator": "AND" + }, + "paging": {"length": 20, "offset": 0}, + "sorts": [["timestamp", "desc"], ["_score", "desc"]], + "track_total_hits": False, + "types": ["KBaseNarrative.Narrative"] + } } - url = APP_URL + '/rpc' + url = service['app_url'] + '/rpc' resp = requests.post( url=url, data=json.dumps(params), ) data = resp.json() + assert 'result' in data assert data['result']['count'] > 0 diff --git a/tests/integration/test_legacy_search_objects.py b/tests/integration/test_legacy_search_objects.py new file mode 100644 index 0000000..cd29a5b --- /dev/null +++ b/tests/integration/test_legacy_search_objects.py @@ -0,0 +1,182 @@ +import json +import os +import requests +import pytest +from tests.helpers.common import ( + assert_jsonrpc11_result, + assert_jsonrpc11_error +) +from tests.helpers.integration_setup import ( + do_rpc, + load_data_file, assert_equal_results +) + + +def test_search_objects_public(service): + """A search against public refdata data should succeed""" + request_test_data = load_data_file('search_objects', 'case-04-request.json') + response_test_data = load_data_file('search_objects', 'case-04-response.json') + url = service['app_url'] + '/legacy' + res = do_rpc(url, request_test_data, response_test_data) + # Assert characteristics of some properties + assert res['search_time'] > 0 + assert res['total'] > 0 + + +def test_search_example3(service): + request_data = load_data_file('search_objects', 'case-06-request.json') + response_data = load_data_file('search_objects', 'case-06-response.json') + url = service['app_url'] + '/legacy' + res = do_rpc(url, request_data, response_data) + assert res['total'] > 0 + assert res['search_time'] > 0 + assert len(res['objects']) > 0 + + +def test_search_objects_private(service): + """Search over private data""" + if 'WS_TOKEN' not in os.environ: + pytest.skip('Token required for this test') + + request_data = load_data_file('search_objects', 'case-09-request.json') + response_data = load_data_file('search_objects', 'case-09-response.json') + url = service['app_url'] + '/legacy' + + res = do_rpc(url, request_data, response_data) + + # TODO: should check more fields. + assert len(res['objects']) > 0 + for obj in res['objects']: + assert len(obj['highlight']) > 0 + + +def test_search_objects_multiple_terms_narrow(service): + """Multiple terms should narrow search results""" + if 'WS_TOKEN' not in os.environ: + pytest.skip('Token required for this test') + + test_data = load_data_file('search_objects', 'case-10.json') + url = service['app_url'] + '/legacy' + rpc = test_data['rpc'] + + for case in test_data['cases']: + rpc['params'][0]['match_filter']['full_text_in_all'] = case['full_text_in_all'] + res = do_rpc(url, rpc, {'id': rpc['id']}) + assert res['total'] == case['total'] + + +def test_search_objects_(service): + """Search for public non-reference data""" + if 'WS_TOKEN' not in os.environ: + pytest.skip('Token required for this test') + + url = service['app_url'] + '/legacy' + request_data = load_data_file('search_objects', 'case-01-request.json') + response_data = load_data_file('search_objects', 'case-01-response.json') + res = do_rpc(url, request_data, response_data) + assert_equal_results(res, response_data['result'][0]) + + +def test_search_case_01_no_auth(service): + """Search for public non-reference data, without token""" + url = service['app_url'] + '/legacy' + request_data = load_data_file('search_objects', 'case-01-request.json') + response_data = load_data_file('search_objects', 'case-01-response.json') + res = do_rpc(url, request_data, response_data) + assert_equal_results(res, response_data['result'][0]) + +# Simulates search from data-search with a search term, only private data +# TODO: + + +# Simulates search from data-search with a search term, only private and public data +# TODO: + + +def test_search_objects_many_results(service): + url = service['app_url'] + '/legacy' + + if 'WS_TOKEN' not in os.environ: + pytest.skip('Token required for this test') + + request_data = load_data_file('search_objects', 'case-02-request.json') + response_data = load_data_file('search_objects', 'case-02-response.json') + + resp = requests.post( + url=url, + headers={'Authorization': os.environ['WS_TOKEN']}, + data=json.dumps(request_data), + ) + result = assert_jsonrpc11_result(resp.json(), response_data) + assert result['total'] > 10000 + + +def assert_counts(service, with_private, with_public, expected_count): + resp = make_call(service, with_private, with_public) + response_data = load_data_file('search_objects', 'case-03-response.json') + result = assert_jsonrpc11_result(resp.json(), response_data) + assert result['total'] == expected_count + + +def make_call(service, with_private, with_public): + url = service['app_url'] + '/legacy' + + if 'WS_TOKEN' not in os.environ: + pytest.skip('Token required for this test') + + request_data = load_data_file('search_objects', 'case-03-request.json') + request_data['params'][0]['access_filter']['with_private'] = with_private + request_data['params'][0]['access_filter']['with_public'] = with_public + + return requests.post( + url=url, + headers={'Authorization': os.environ['WS_TOKEN']}, + data=json.dumps(request_data), + ) + + +def get_error(service, with_private, with_public): + resp = make_call(service, with_private, with_public) + data = resp.json() + response_data = load_data_file('search_objects', 'case-03-response.json') + return assert_jsonrpc11_error(data, response_data) + + +def get_count(service, with_private, with_public): + resp = make_call(service, with_private, with_public) + response_data = load_data_file('search_objects', 'case-03-response.json') + result = assert_jsonrpc11_result(resp.json(), response_data) + return result['total'] + +# +# This series of tests relies upon a specific state of data in +# search. + + +def test_search_objects_private_and_public_counts(service): + assert_counts(service, 1, 1, 12) + + +def test_search_objects_private_counts(service): + assert_counts(service, 1, 0, 5) + + +def test_search_objects_public_counts(service): + assert_counts(service, 0, 1, 9) + + +def test_search_objects_neither_private_nor_public(service): + error = get_error(service, 0, 0) + assert error['code'] == -32602 + assert error['message'] == 'Invalid params' + + +# A safer but less precise method. + + +def test_search_objects_public_vs_private(service): + all_count = get_count(service, 1, 1) + private_count = get_count(service, 1, 0) + public_count = get_count(service, 0, 1) + assert private_count < all_count + assert public_count < all_count diff --git a/tests/integration/test_legacy_search_types.py b/tests/integration/test_legacy_search_types.py new file mode 100644 index 0000000..8e65dbc --- /dev/null +++ b/tests/integration/test_legacy_search_types.py @@ -0,0 +1,10 @@ +from tests.helpers.integration_setup import do_rpc, load_data_file + + +def test_search_types(service): + request_data = load_data_file('search_types', 'case-05-request.json') + response_data = load_data_file('search_types', 'case-05-response.json') + url = service['app_url'] + '/legacy' + res = do_rpc(url, request_data, response_data) + assert res['search_time'] > 0 + assert res['type_to_count'] # TODO match more closely when things are more indexed diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 0000000..59fe32d --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,18 @@ +# content of a/conftest.py +import pytest +from tests.helpers.unit_setup import ( + start_service, + stop_service +) +from tests.helpers import init_elasticsearch + +# ES_URL = 'http://localhost:9200' +APP_URL = 'http://localhost:5000' + + +@pytest.fixture(scope="session") +def services(): + start_service(APP_URL, 'searchapi2') + init_elasticsearch() + yield {'app_url': APP_URL} + stop_service() diff --git a/tests/unit/es_client/test_es_client.py b/tests/unit/es_client/test_es_client.py index 41cf4e1..f66db61 100644 --- a/tests/unit/es_client/test_es_client.py +++ b/tests/unit/es_client/test_es_client.py @@ -1,6 +1,5 @@ import json import pytest -import subprocess import responses # For mocking workspace calls @@ -9,121 +8,143 @@ from src.utils.config import config from src.exceptions import UnknownIndex from src.es_client import search -from src.utils.wait_for_service import wait_for_service from src.exceptions import ElasticsearchError -from tests.helpers import init_elasticsearch - - -ES_URL = 'http://localhost:9200' -subprocess.run("docker-compose up -d", shell=True) -wait_for_service(ES_URL, 'Elasticsearch') -init_elasticsearch() - - -def test_search_public_valid(): - params = { - 'only_public': True, - 'track_total_hits': True, - } - result = search(params, {'auth': None}) - assert result['count'] == 4 - assert result['search_time'] >= 0 - assert result['aggregations'] == {} - expected: set = { - ('index1', 'public-doc1'), - ('index2', 'public-doc1'), - ('index1', 'public-doc2'), - ('index2', 'public-doc2'), - } - docs = {(doc['index'], doc['id']) for doc in result['hits']} - assert docs == expected - - -def test_search_query_valid(): - params = { - 'only_public': True, - 'query': { - "term": {"name": "doc2"}, - }, - } - result = search(params, {'auth': None}) - assert result['count'] == 2 - assert result['search_time'] >= 0 - assert result['aggregations'] == {} - expected: set = { - ('index1', 'public-doc2'), - ('index2', 'public-doc2'), - } - docs = {(doc['index'], doc['id']) for doc in result['hits']} - assert docs == expected - - -def test_search_aggs_valid(): - params = { - 'aggs': {'count_by_index': {'terms': {'field': '_index'}}} - } - result = search(params, {'auth': None}) - assert result['count'] == 4 - assert result['aggregations']['count_by_index']['counts'] == [ - {'key': 'test_index1', 'count': 2}, - {'key': 'test_index2', 'count': 2}, - ] - - -def test_search_sort_valid(): - params = {'sort': [{'timestamp': {'order': 'desc'}}, '_score']} - result = search(params, {'auth': None}) - docs = [r['doc'] for r in result['hits']] - timestamps = [r['timestamp'] for r in docs] - assert timestamps == [12, 12, 10, 10] - # And ascending - params = {'sort': [{'timestamp': {'order': 'asc'}}, '_score']} - result = search(params, {'auth': None}) - docs = [r['doc'] for r in result['hits']] - timestamps = [r['timestamp'] for r in docs] - assert timestamps == [10, 10, 12, 12] - - -def test_search_highlight_valid(): - params = { - 'query': {'term': {'name': 'doc1'}}, - 'highlight': {'name': {}} - } - result = search(params, {'auth': None}) - highlights: set = {hit['highlight']['name'][0] for hit in result['hits']} - assert highlights == {'public-doc1'} - - -def test_search_source_filtering_valid(): - params = { - 'source': ['name'] - } - result = search(params, {'auth': None}) - docs = {json.dumps(r['doc']) for r in result['hits']} - assert docs == {'{"name": "public-doc1"}', '{"name": "public-doc2"}'} - - -def test_search_by_index_valid(): - params = {'indexes': ['index1']} - result = search(params, {'auth': None}) - indexes = {r['index'] for r in result['hits']} - assert indexes == {'index1'} - - -def test_search_unknown_index(): - idx_name = 'xyz' - full_name = config['index_prefix'] + '_' + idx_name - params = {'indexes': [idx_name]} - with pytest.raises(UnknownIndex) as ctx: - search(params, {'auth': None}) - assert str(ctx.value) == f"no such index [{full_name}]" - - -def test_search_private_valid(): + + +def test_search_public_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + 'only_public': True, + 'track_total_hits': True, + } + result = search(params, {'auth': None}) + assert result['count'] == 4 + assert result['search_time'] >= 0 + assert result['aggregations'] == {} + expected: set = { + ('index1', 'public-doc1'), + ('index2', 'public-doc1'), + ('index1', 'public-doc2'), + ('index2', 'public-doc2'), + } + docs = {(doc['index'], doc['id']) for doc in result['hits']} + assert docs == expected + + +def test_search_query_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + 'only_public': True, + 'query': { + "term": {"name": "doc2"}, + }, + } + result = search(params, {'auth': None}) + assert result['count'] == 2 + assert result['search_time'] >= 0 + assert result['aggregations'] == {} + expected: set = { + ('index1', 'public-doc2'), + ('index2', 'public-doc2'), + } + docs = {(doc['index'], doc['id']) for doc in result['hits']} + assert docs == expected + + +def test_search_aggs_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + 'aggs': {'count_by_index': {'terms': {'field': '_index'}}} + } + result = search(params, {'auth': None}) + assert result['count'] == 4 + assert result['aggregations']['count_by_index']['counts'] == [ + {'key': 'test.index1', 'count': 2}, + {'key': 'test.index2', 'count': 2}, + ] + + +def test_search_sort_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = {'sort': [{'timestamp': {'order': 'desc'}}, '_score']} + result = search(params, {'auth': None}) + docs = [r['doc'] for r in result['hits']] + timestamps = [r['timestamp'] for r in docs] + assert timestamps == [12, 12, 10, 10] + # And ascending + params = {'sort': [{'timestamp': {'order': 'asc'}}, '_score']} + result = search(params, {'auth': None}) + docs = [r['doc'] for r in result['hits']] + timestamps = [r['timestamp'] for r in docs] + assert timestamps == [10, 10, 12, 12] + + +def test_search_highlight_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + 'query': {'term': {'name': 'doc1'}}, + 'highlight': {'fields': {'name': {}}} + } + result = search(params, {'auth': None}) + highlights: set = {hit['highlight']['name'][0] for hit in result['hits']} + assert highlights == {'public-doc1'} + + +def test_search_source_filtering_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + 'source': ['name'] + } + result = search(params, {'auth': None}) + docs = {json.dumps(r['doc']) for r in result['hits']} + assert docs == {'{"name": "public-doc1"}', '{"name": "public-doc2"}'} + + +def test_search_by_index_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = {'indexes': ['index1']} + result = search(params, {'auth': None}) + indexes = {r['index'] for r in result['hits']} + assert indexes == {'index1'} + + +def test_search_unknown_index(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + idx_name = 'xyz' + full_name = config['index_prefix'] + config['prefix_delimiter'] + idx_name + params = {'indexes': [idx_name]} + with pytest.raises(UnknownIndex) as ctx: + search(params, {'auth': None}) + assert str(ctx.value) == f"no such index [{full_name}]" + + +def test_search_bad_query(services): with patch('src.es_client.query.ws_auth') as mocked: - mocked.return_value = [1, 2, 3] # Authorized workspaces + mocked.return_value = [0, 1] # Public workspaces + # force a bad query by passing a nonsensical sort value. + params = { + 'indexes': ['index1'], + 'sort': 'x' + } + with pytest.raises(ElasticsearchError) as ctx: + search(params, {'auth': None}) + assert str(ctx.value) == "all shards failed" + + +def test_search_private_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [100] # Private workspaces this fictional user has access to params = {'only_private': True} result = search(params, {'auth': 'x'}) + # Is two docs since this is across two indexes. assert result['count'] == 2 names = {hit['doc']['name'] for hit in result['hits']} assert names == {'private-doc1'} @@ -131,33 +152,61 @@ def test_search_private_valid(): assert is_public == {False} -def test_search_private_no_access(): +def test_search_private_no_access(services): with patch('src.es_client.query.ws_auth') as mocked: - mocked.return_value = [55] # Authorized workspaces + mocked.return_value = [55] # A workspace which is not indexed params = {'only_private': True} result = search(params, {'auth': 'x'}) assert result['count'] == 0 @responses.activate -def test_es_response_error(): +def test_es_response_error(services): """Test the case where ES gives a non-2xx response.""" - prefix = config['index_prefix'] - delim = config['prefix_delimiter'] - index_name_str = prefix + delim + "default_search" - url = config['elasticsearch_url'] + '/' + index_name_str + '/_search' - responses.add(responses.POST, url, json={}, status=500) - with pytest.raises(ElasticsearchError): - search({}, {'auth': None}) + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [100] # Private workspaces this fictional user has access to + prefix = config['index_prefix'] + delim = config['prefix_delimiter'] + index_name_str = prefix + delim + "default_search" + url = config['elasticsearch_url'] + '/' + index_name_str + '/_search' + responses.add(responses.POST, url, json={}, status=500) + with pytest.raises(ElasticsearchError): + search({}, {'auth': None}) @responses.activate -def test_es_response_error_no_json(): +def test_es_response_error_no_json(services): """Test the case where ES gives a non-2xx response with a non-json body.""" - prefix = config['index_prefix'] - delim = config['prefix_delimiter'] - index_name_str = prefix + delim + "default_search" - url = config['elasticsearch_url'] + '/' + index_name_str + '/_search' - responses.add(responses.POST, url, body="!", status=500) - with pytest.raises(ElasticsearchError): - search({}, {'auth': None}) + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [100] # Private workspaces this fictional user has access to + prefix = config['index_prefix'] + delim = config['prefix_delimiter'] + index_name_str = prefix + delim + "default_search" + url = config['elasticsearch_url'] + '/' + index_name_str + '/_search' + responses.add(responses.POST, url, body="!", status=500) + with pytest.raises(ElasticsearchError): + search({}, {'auth': None}) + + +@responses.activate +def test_es_response_error_default(services): + """Test the case where ES gives a non-2xx response with an unhandled err_type.""" + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [100] # Private workspaces this fictional user has access to + prefix = config['index_prefix'] + delim = config['prefix_delimiter'] + index_name_str = prefix + delim + "default_search" + url = config['elasticsearch_url'] + '/' + index_name_str + '/_search' + error_response = { + 'error': { + 'reason': 'My error reason', + 'root_cause': [ + { + 'type': 'unhandled' + } + ] + } + } + responses.add(responses.POST, url, body=json.dumps(error_response), status=500) + with pytest.raises(ElasticsearchError): + search({}, {'auth': None}) diff --git a/tests/unit/mocks/data/README.md b/tests/unit/mocks/data/README.md new file mode 100644 index 0000000..8a11132 --- /dev/null +++ b/tests/unit/mocks/data/README.md @@ -0,0 +1,15 @@ +Data for mocks + +Generally all mock described herein uses a query for "Prochlorococcus marinus str. GP2". For search_objects and search_types, a query generated by the data-search ui was used as the basis. For get_objects a hand-crafted query was created with the same post_processing. + +Each case includes the initial request, the params, the elasticsearch result, the prepared result, and the api response. + +All test data is in JSON, stored in *.json files. + +Each test data file is located in a directory named after the service, and for the api, a subdirectory for the method. + +Data files use the naming convention -.json, where is 'case-01', 'case-02', etc., and usage is one of 'request', 'params', 'result', or 'response'. + +case-01: A default public + private search, with all post processing additions enabled, no result parts disabled, via the data-search interface with search query for "Prochlorococcus marinus str. GP2". + +case-02: Designed for get_objects, using the ids returned by case-01. \ No newline at end of file diff --git a/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/params.json b/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/params.json new file mode 100644 index 0000000..23dfda1 --- /dev/null +++ b/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/params.json @@ -0,0 +1,14 @@ +{ + "ids": [ + "WS::56638:2", + "WS::56638:1" + ], + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1 + } +} \ No newline at end of file diff --git a/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/request.json b/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/request.json new file mode 100644 index 0000000..f78657a --- /dev/null +++ b/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/request.json @@ -0,0 +1,21 @@ +{ + "params": [ + { + "ids": [ + "WS::56638:2", + "WS::56638:1" + ], + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1 + } + } + ], + "method": "KBaseSearchEngine.get_objects", + "version": "1.1", + "id": "12345" +} \ No newline at end of file diff --git a/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/result.json b/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/result.json new file mode 100644 index 0000000..4c02447 --- /dev/null +++ b/tests/unit/mocks/data/SearchAPI/legacy/get_objects/case-02/result.json @@ -0,0 +1,142 @@ +{ + "search_time": 91945, + "access_group_narrative_info": { + "56638": [ + "Test Narrative for FeatureSet Integration Test", + 1, + 1605765178000, + "kbaseuitest", + "KBase UI Test User" + ] + }, + "objects": [ + { + "id": "WS::56638:2", + "object_name": "GCF_000759885.1", + "workspace_id": 56638, + "object_id": 2, + "object_version": 1, + "workspace_type_module": "KBaseGenomes", + "workspace_type_name": "Genome", + "workspace_type_version": "14.2", + "modified_at": 1531222570501, + "creator": "kbasedata", + "copied": "15792/64028/2", + "created_at": 1605748053000, + "index_name": "genome", + "index_version": 2, + "data": { + "genome_id": "GCF_000759885.1", + "scientific_name": "Prochlorococcus marinus str. GP2", + "publication_titles": [ + "Genomes of diverse isolates of the marine cyanobacterium Prochlorococcus", + "Direct Submission" + ], + "publication_authors": [ + "Biller,S., Berube,P., Thompson,J., Kelly,L., Roggensack,S., Awad,L., Roache-Johnson,K., Ding,H., Giovannoni,S.J., Moore,L.R. and Chisholm,S.W.", + "Biller,S.J., Berube,P.M., Berta-Thompson,J.W., Kelly,L., Roggensack,S.E., Awad,L., Roache-Johnson,K.H., Ding,H., Giovannoni,S.J., Rocap,G., Moore,L.R. and Chisholm,S.W." + ], + "size": 1624310, + "num_contigs": 11, + "genome_type": null, + "gc_content": 0.31164, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus; Prochlorococcus marinus", + "mean_contig_length": 147664.54545454544, + "external_origination_date": "11-Apr-2017", + "original_source_file_name": "GCF_000759885.1_ASM75988v1_genomic.gbff", + "cds_count": 1760, + "feature_count": 1760, + "mrna_count": 0, + "non_coding_feature_count": 89, + "assembly_ref": "15792:64027:2", + "source_id": "NZ_JNAH01000001", + "feature_counts": { + "CDS": 1760, + "gene": 1804, + "ncRNA": 3, + "non-protein_encoding_gene": 44, + "protein_encoding_gene": 1760, + "rRNA": 3, + "regulatory": 1, + "tRNA": 37, + "tmRNA": 1 + }, + "source": "RefSeq", + "warnings": [ + "SUSPECT: CDS EU91_RS03000_CDS_1 has a length of 974 which is not consistent with the length of the translation included (323 amino acids)." + ] + }, + "highlight": {} + }, + { + "id": "WS::56638:1", + "object_name": "Narrative.1605747150690", + "workspace_id": 56638, + "object_id": 1, + "object_version": 13, + "workspace_type_module": "KBaseNarrative", + "workspace_type_name": "Narrative", + "workspace_type_version": "4.0", + "modified_at": 1605765178549, + "creator": "kbaseuitest", + "copied": null, + "created_at": 1605747150000, + "index_name": "narrative", + "index_version": 2, + "data": { + "narrative_title": "Test Narrative for FeatureSet Integration Test", + "is_narratorial": false, + "data_objects": [ + { + "name": "GCF_000759885.1", + "obj_type": "KBaseGenomes.Genome-14.2" + }, + { + "name": "GCF_001766235.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, + { + "name": "featureset2", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, + { + "name": "featureset1", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, + { + "name": "mergedfeatureset", + "obj_type": "KBaseCollections.FeatureSet-4.0" + } + ], + "owner": "kbaseuitest", + "modified_at": 1605765178000, + "cells": [ + { + "desc": "Test Narrative for FeatureSet Integration Test.\nTo reproduce:\n\nremove add a markdown cell (this one!), remove the welcome cell\nImport 2 genomes\nin this case, I copied, from RefSeq refdata:\n\nAcetobacter ascendens\nProchlorococcus marinus str. GP2\n\nat the time of writing, the public data search for refseq is broken, so I copied into this narrative from their landing pages\ncreate a FeatureSet for each Genome, using Build FeatureSet from Genome\n\nchoose the first 3 features from each genome for the feature set\nname the feature set for Acetobacter \"featureset1\", for Prochlorococcus \"featureset2\"\n\ncreate another FeatureSet combining these two using \"Merge FeatureSets\"\nin the Description parameter add the text \"merged feature set\", or whatever text you like (it will need to match the integration test.)\nin \"Output FeatureSet Name\" name it \"mergedfeatureset\"\ninsert the merged feature set object into the narrative as the last cell; it should be the 6th cell.\nfinally make the narrative public.\n\n", + "cell_type": "markdown" + }, + { + "desc": "Build FeatureSet from Genome", + "cell_type": "kbase_app" + }, + { + "desc": "Merge FeatureSets - v1.0.1", + "cell_type": "kbase_app" + }, + { + "desc": "kbaseReportView", + "cell_type": "widget" + }, + { + "desc": "mergedfeatureset", + "cell_type": "data" + } + ], + "total_cells": 6, + "static_narrative_saved": null, + "static_narrative_ref": null + }, + "highlight": {} + } + ] +} \ No newline at end of file diff --git a/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/params.json b/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/params.json new file mode 100644 index 0000000..d7e1e3b --- /dev/null +++ b/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/params.json @@ -0,0 +1,40 @@ +{ + "match_filter": { + "full_text_in_all": "Prochlorococcus marinus str. GP2", + "exclude_subobjects": 1, + "source_tags": [ + "refdata", + "noindex" + ], + "source_tags_blacklist": 1 + }, + "pagination": { + "start": 0, + "count": 20 + }, + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1, + "add_access_group_info": 1 + }, + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "sorting_rules": [ + { + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, + { + "is_object_property": 0, + "property": "type", + "ascending": 1 + } + ] +} \ No newline at end of file diff --git a/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/request.json b/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/request.json new file mode 100644 index 0000000..002df5f --- /dev/null +++ b/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/request.json @@ -0,0 +1,47 @@ +{ + "params": [ + { + "match_filter": { + "full_text_in_all": "Prochlorococcus marinus str. GP2", + "exclude_subobjects": 1, + "source_tags": [ + "refdata", + "noindex" + ], + "source_tags_blacklist": 1 + }, + "pagination": { + "start": 0, + "count": 20 + }, + "post_processing": { + "ids_only": 0, + "skip_info": 0, + "skip_keys": 0, + "skip_data": 0, + "include_highlight": 1, + "add_narrative_info": 1, + "add_access_group_info": 1 + }, + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "sorting_rules": [ + { + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, + { + "is_object_property": 0, + "property": "type", + "ascending": 1 + } + ] + } + ], + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "id": "12345" +} \ No newline at end of file diff --git a/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/result.json b/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/result.json new file mode 100644 index 0000000..bac190d --- /dev/null +++ b/tests/unit/mocks/data/SearchAPI/legacy/search_objects/case-01/result.json @@ -0,0 +1,196 @@ +{ + "pagination": { + "start": 0, + "count": 20 + }, + "sorting_rules": [ + { + "is_object_property": 0, + "property": "access_group_id", + "ascending": 0 + }, + { + "is_object_property": 0, + "property": "type", + "ascending": 1 + } + ], + "total": 2, + "search_time": 103122, + "objects": [ + { + "id": "WS::56638:2", + "object_name": "GCF_000759885.1", + "workspace_id": 56638, + "object_id": 2, + "object_version": 1, + "workspace_type_module": "KBaseGenomes", + "workspace_type_name": "Genome", + "workspace_type_version": "14.2", + "modified_at": 1531222570501, + "creator": "kbasedata", + "copied": "15792/64028/2", + "created_at": 1605748053000, + "index_name": "genome", + "index_version": 2, + "data": { + "genome_id": "GCF_000759885.1", + "scientific_name": "Prochlorococcus marinus str. GP2", + "publication_titles": [ + "Genomes of diverse isolates of the marine cyanobacterium Prochlorococcus", + "Direct Submission" + ], + "publication_authors": [ + "Biller,S., Berube,P., Thompson,J., Kelly,L., Roggensack,S., Awad,L., Roache-Johnson,K., Ding,H., Giovannoni,S.J., Moore,L.R. and Chisholm,S.W.", + "Biller,S.J., Berube,P.M., Berta-Thompson,J.W., Kelly,L., Roggensack,S.E., Awad,L., Roache-Johnson,K.H., Ding,H., Giovannoni,S.J., Rocap,G., Moore,L.R. and Chisholm,S.W." + ], + "size": 1624310, + "num_contigs": 11, + "genome_type": null, + "gc_content": 0.31164, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus; Prochlorococcus marinus", + "mean_contig_length": 147664.54545454544, + "external_origination_date": "11-Apr-2017", + "original_source_file_name": "GCF_000759885.1_ASM75988v1_genomic.gbff", + "cds_count": 1760, + "feature_count": 1760, + "mrna_count": 0, + "non_coding_feature_count": 89, + "assembly_ref": "15792:64027:2", + "source_id": "NZ_JNAH01000001", + "feature_counts": { + "CDS": 1760, + "gene": 1804, + "ncRNA": 3, + "non-protein_encoding_gene": 44, + "protein_encoding_gene": 1760, + "rRNA": 3, + "regulatory": 1, + "tRNA": 37, + "tmRNA": 1 + }, + "source": "RefSeq", + "warnings": [ + "SUSPECT: CDS EU91_RS03000_CDS_1 has a length of 974 which is not consistent with the length of the translation included (323 amino acids)." + ] + }, + "highlight": { + "publication_titles": [ + "Genomes of diverse isolates of the marine cyanobacterium Prochlorococcus" + ], + "scientific_name": [ + "Prochlorococcus marinus str. GP2" + ], + "taxonomy": [ + "Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "; Prochlorococcus marinus" + ] + } + }, + { + "id": "WS::56638:1", + "object_name": "Narrative.1605747150690", + "workspace_id": 56638, + "object_id": 1, + "object_version": 13, + "workspace_type_module": "KBaseNarrative", + "workspace_type_name": "Narrative", + "workspace_type_version": "4.0", + "modified_at": 1605765178549, + "creator": "kbaseuitest", + "copied": null, + "created_at": 1605747150000, + "index_name": "narrative", + "index_version": 2, + "data": { + "narrative_title": "Test Narrative for FeatureSet Integration Test", + "is_narratorial": false, + "data_objects": [ + { + "name": "GCF_000759885.1", + "obj_type": "KBaseGenomes.Genome-14.2" + }, + { + "name": "GCF_001766235.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, + { + "name": "featureset2", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, + { + "name": "featureset1", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, + { + "name": "mergedfeatureset", + "obj_type": "KBaseCollections.FeatureSet-4.0" + } + ], + "owner": "kbaseuitest", + "modified_at": 1605765178000, + "cells": [ + { + "desc": "Test Narrative for FeatureSet Integration Test.\nTo reproduce:\n\nremove add a markdown cell (this one!), remove the welcome cell\nImport 2 genomes\nin this case, I copied, from RefSeq refdata:\n\nAcetobacter ascendens\nProchlorococcus marinus str. GP2\n\nat the time of writing, the public data search for refseq is broken, so I copied into this narrative from their landing pages\ncreate a FeatureSet for each Genome, using Build FeatureSet from Genome\n\nchoose the first 3 features from each genome for the feature set\nname the feature set for Acetobacter \"featureset1\", for Prochlorococcus \"featureset2\"\n\ncreate another FeatureSet combining these two using \"Merge FeatureSets\"\nin the Description parameter add the text \"merged feature set\", or whatever text you like (it will need to match the integration test.)\nin \"Output FeatureSet Name\" name it \"mergedfeatureset\"\ninsert the merged feature set object into the narrative as the last cell; it should be the 6th cell.\nfinally make the narrative public.\n\n", + "cell_type": "markdown" + }, + { + "desc": "Build FeatureSet from Genome", + "cell_type": "kbase_app" + }, + { + "desc": "Merge FeatureSets - v1.0.1", + "cell_type": "kbase_app" + }, + { + "desc": "kbaseReportView", + "cell_type": "widget" + }, + { + "desc": "mergedfeatureset", + "cell_type": "data" + } + ], + "total_cells": 6, + "static_narrative_saved": null, + "static_narrative_ref": null + }, + "highlight": { + "cells.desc": [ + "the welcome cell\nImport 2 genomes\nin this case, I copied, from RefSeq refdata:\n\nAcetobacter ascendens\nProchlorococcus", + "marinus str.", + "GP2\n\nat the time of writing, the public data search for refseq is broken, so I copied into this narrative", + "features from each genome for the feature set\nname the feature set for Acetobacter \"featureset1\", for Prochlorococcus" + ] + } + } + ], + "access_group_narrative_info": { + "56638": [ + "Test Narrative for FeatureSet Integration Test", + 1, + 1605765178000, + "kbaseuitest", + "KBase UI Test User" + ] + }, + "access_groups_info": { + "56638": [ + 56638, + "kbaseuitest:narrative_1605747150690", + "kbaseuitest", + "2020-11-19T05:52:58+0000", + 9, + "a", + "r", + "unlocked", + { + "cell_count": "1", + "narrative_nice_name": "Test Narrative for FeatureSet Integration Test", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "1" + } + ] + } +} \ No newline at end of file diff --git a/tests/unit/mocks/data/SearchAPI/legacy/search_types/case-01/params.json b/tests/unit/mocks/data/SearchAPI/legacy/search_types/case-01/params.json new file mode 100644 index 0000000..6701ac8 --- /dev/null +++ b/tests/unit/mocks/data/SearchAPI/legacy/search_types/case-01/params.json @@ -0,0 +1,22 @@ +{ + "match_filter": { + "full_text_in_all": "Prochlorococcus marinus str. GP2", + "exclude_subobjects": 1, + "source_tags": [ + "refdata", + "noindex" + ], + "source_tags_blacklist": 1 + }, + "access_filter": { + "with_private": 1, + "with_public": 1 + }, + "sorting_rules": [ + { + "property": "timestamp", + "is_object_property": 0, + "ascending": 1 + } + ] +} \ No newline at end of file diff --git a/tests/unit/mocks/data/SearchAPI/legacy/search_types/case-01/result.json b/tests/unit/mocks/data/SearchAPI/legacy/search_types/case-01/result.json new file mode 100644 index 0000000..675fd6d --- /dev/null +++ b/tests/unit/mocks/data/SearchAPI/legacy/search_types/case-01/result.json @@ -0,0 +1,7 @@ +{ + "type_to_count": { + "Genome": 1, + "Narrative": 1 + }, + "search_time": 2815 +} \ No newline at end of file diff --git a/tests/unit/mocks/data/UserProfile/get_profile/kbaseuitest.json b/tests/unit/mocks/data/UserProfile/get_profile/kbaseuitest.json new file mode 100644 index 0000000..ef61713 --- /dev/null +++ b/tests/unit/mocks/data/UserProfile/get_profile/kbaseuitest.json @@ -0,0 +1,109 @@ +{ + "user": { + "username": "kbaseuitest", + "realname": "KBase UI Test User" + }, + "profile": { + "metadata": { + "createdBy": "userprofile_ui_service", + "created": "2020-01-06T21:48:12.352Z" + }, + "preferences": {}, + "userdata": { + "organization": "", + "department": "", + "affiliations": [ + { + "title": "tester", + "organization": "kbase / lbnl", + "started": 2020, + "ended": 2020 + } + ], + "city": "", + "state": "California", + "postalCode": "", + "country": "", + "researchStatement": "Test user account for ui integration tests.\n\nPlease don't modify the profile.\n\nThis **can** be markdown, but who would know?", + "gravatarDefault": "monsterid", + "avatarOption": "gravatar", + "researchInterests": [ + "Comparative Genomics", + "Genome Annotation", + "Metabolic Modeling", + "Read Processing", + "Sequence Analysis" + ], + "researchInterestsOther": null, + "jobTitleOther": "My job", + "jobTitle": "Other", + "fundingSource": "" + }, + "synced": { + "gravatarHash": "b4d95f8595104614355e6ee9c4c03e3f" + }, + "plugins": { + "data-search": { + "settings": { + "history": { + "search": { + "history": [ + "Prochlorococcus marinus str. GP2", + "Prochlorococcus", + "coli", + "Prochlorococcus marinus", + "marinus", + "sphaeroides", + "abcde12345", + "cooli", + "Prochlorococcus Unconfirmed", + "Prochlorococcus Unconfirmed" + ], + "time": { + "$numberLong": "1618589834094" + } + } + } + } + }, + "jgi-search": { + "settings": { + "history": { + "search": { + "history": [ + "coli" + ], + "time": { + "$numberLong": "1582608583358" + } + } + }, + "jgiDataTerms": { + "agreed": true, + "time": { + "$numberLong": "1580251462454" + } + } + } + }, + "public-search": { + "settings": { + "history": { + "history": [ + "prochlorococcus marinus", + "prochlorococcus marnius", + "AnnotatedMetagenomeAssembly", + "AnnotatedGenomeAssembly", + "coli", + "prochlorococcus unconfirmed", + "prochlorococcus" + ], + "time": { + "$numberLong": "1615932685001" + } + } + } + } + } + } +} diff --git a/tests/unit/mocks/data/Workspace/get_object_info3/56638_2_1.json b/tests/unit/mocks/data/Workspace/get_object_info3/56638_2_1.json new file mode 100644 index 0000000..2c91986 --- /dev/null +++ b/tests/unit/mocks/data/Workspace/get_object_info3/56638_2_1.json @@ -0,0 +1,22 @@ +{ + "infos": [ + [ + 2, + "GCF_000759885.1", + "KBaseGenomes.Genome-14.2", + "2020-11-19T01:07:33+0000", + 1, + "kbaseuitest", + 56638, + "kbaseuitest:narrative_1605747150690", + "eb43f0b7ced19611f02b9dd1599fce05", + 5845963, + null + ] + ], + "paths": [ + [ + "56638/2/1" + ] + ] +} \ No newline at end of file diff --git a/tests/unit/mocks/data/Workspace/get_workspace_info/10056638.json b/tests/unit/mocks/data/Workspace/get_workspace_info/10056638.json new file mode 100644 index 0000000..6bceb46 --- /dev/null +++ b/tests/unit/mocks/data/Workspace/get_workspace_info/10056638.json @@ -0,0 +1,17 @@ +[ + 56638, + "kbaseuitest:narrative_1605747150690", + "kbaseuitestx", + "2020-11-19T05:52:58+0000", + 9, + "a", + "r", + "unlocked", + { + "cell_count": "1", + "narrative_nice_name": "Test Narrative for FeatureSet Integration Test", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "1" + } +] \ No newline at end of file diff --git a/tests/unit/mocks/data/Workspace/get_workspace_info/56638.json b/tests/unit/mocks/data/Workspace/get_workspace_info/56638.json new file mode 100644 index 0000000..d510402 --- /dev/null +++ b/tests/unit/mocks/data/Workspace/get_workspace_info/56638.json @@ -0,0 +1,17 @@ +[ + 56638, + "kbaseuitest:narrative_1605747150690", + "kbaseuitest", + "2020-11-19T05:52:58+0000", + 9, + "a", + "r", + "unlocked", + { + "cell_count": "1", + "narrative_nice_name": "Test Narrative for FeatureSet Integration Test", + "searchtags": "narrative", + "is_temporary": "false", + "narrative": "1" + } +] \ No newline at end of file diff --git a/tests/unit/mocks/data/elasticsearch/legacy/get_objects/case-02/result.json b/tests/unit/mocks/data/elasticsearch/legacy/get_objects/case-02/result.json new file mode 100644 index 0000000..9740414 --- /dev/null +++ b/tests/unit/mocks/data/elasticsearch/legacy/get_objects/case-02/result.json @@ -0,0 +1,147 @@ +{ + "count": 2, + "hits": [ + { + "index": "genome_2", + "id": "WS::56638:2", + "doc": { + "genome_id": "GCF_000759885.1", + "scientific_name": "Prochlorococcus marinus str. GP2", + "publication_titles": [ + "Genomes of diverse isolates of the marine cyanobacterium Prochlorococcus", + "Direct Submission" + ], + "publication_authors": [ + "Biller,S., Berube,P., Thompson,J., Kelly,L., Roggensack,S., Awad,L., Roache-Johnson,K., Ding,H., Giovannoni,S.J., Moore,L.R. and Chisholm,S.W.", + "Biller,S.J., Berube,P.M., Berta-Thompson,J.W., Kelly,L., Roggensack,S.E., Awad,L., Roache-Johnson,K.H., Ding,H., Giovannoni,S.J., Rocap,G., Moore,L.R. and Chisholm,S.W." + ], + "size": 1624310, + "num_contigs": 11, + "genome_type": null, + "gc_content": 0.31164, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus; Prochlorococcus marinus", + "mean_contig_length": 147664.54545454544, + "external_origination_date": "11-Apr-2017", + "original_source_file_name": "GCF_000759885.1_ASM75988v1_genomic.gbff", + "cds_count": 1760, + "feature_count": 1760, + "mrna_count": 0, + "non_coding_feature_count": 89, + "assembly_ref": "15792:64027:2", + "source_id": "NZ_JNAH01000001", + "feature_counts": { + "CDS": 1760, + "gene": 1804, + "ncRNA": 3, + "non-protein_encoding_gene": 44, + "protein_encoding_gene": 1760, + "rRNA": 3, + "regulatory": 1, + "tRNA": 37, + "tmRNA": 1 + }, + "source": "RefSeq", + "warnings": [ + "SUSPECT: CDS EU91_RS03000_CDS_1 has a length of 974 which is not consistent with the length of the translation included (323 amino acids)." + ], + "creator": "kbasedata", + "access_group": 56638, + "obj_name": "GCF_000759885.1", + "shared_users": [ + "kbaseuitest" + ], + "timestamp": 1531222570501, + "creation_date": "2020-11-19T01:07:33+0000", + "is_public": true, + "version": 1, + "obj_id": 2, + "copied": "15792/64028/2", + "tags": [ + "narrative" + ], + "obj_type_version": "14.2", + "obj_type_module": "KBaseGenomes", + "obj_type_name": "Genome", + "index_runner_ver": "1.9.17" + } + }, + { + "index": "narrative_2", + "id": "WS::56638:1", + "doc": { + "narrative_title": "Test Narrative for FeatureSet Integration Test", + "is_narratorial": false, + "data_objects": [ + { + "name": "GCF_000759885.1", + "obj_type": "KBaseGenomes.Genome-14.2" + }, + { + "name": "GCF_001766235.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, + { + "name": "featureset2", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, + { + "name": "featureset1", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, + { + "name": "mergedfeatureset", + "obj_type": "KBaseCollections.FeatureSet-4.0" + } + ], + "owner": "kbaseuitest", + "modified_at": 1605765178000, + "cells": [ + { + "desc": "Test Narrative for FeatureSet Integration Test.\nTo reproduce:\n\nremove add a markdown cell (this one!), remove the welcome cell\nImport 2 genomes\nin this case, I copied, from RefSeq refdata:\n\nAcetobacter ascendens\nProchlorococcus marinus str. GP2\n\nat the time of writing, the public data search for refseq is broken, so I copied into this narrative from their landing pages\ncreate a FeatureSet for each Genome, using Build FeatureSet from Genome\n\nchoose the first 3 features from each genome for the feature set\nname the feature set for Acetobacter \"featureset1\", for Prochlorococcus \"featureset2\"\n\ncreate another FeatureSet combining these two using \"Merge FeatureSets\"\nin the Description parameter add the text \"merged feature set\", or whatever text you like (it will need to match the integration test.)\nin \"Output FeatureSet Name\" name it \"mergedfeatureset\"\ninsert the merged feature set object into the narrative as the last cell; it should be the 6th cell.\nfinally make the narrative public.\n\n", + "cell_type": "markdown" + }, + { + "desc": "Build FeatureSet from Genome", + "cell_type": "kbase_app" + }, + { + "desc": "Merge FeatureSets - v1.0.1", + "cell_type": "kbase_app" + }, + { + "desc": "kbaseReportView", + "cell_type": "widget" + }, + { + "desc": "mergedfeatureset", + "cell_type": "data" + } + ], + "creator": "kbaseuitest", + "total_cells": 6, + "static_narrative_saved": null, + "static_narrative_ref": null, + "access_group": 56638, + "obj_name": "Narrative.1605747150690", + "shared_users": [ + "kbaseuitest" + ], + "timestamp": 1605765178549, + "creation_date": "2020-11-19T00:52:30+0000", + "is_public": true, + "version": 13, + "obj_id": 1, + "copied": null, + "tags": [ + "narrative" + ], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "obj_type_name": "Narrative", + "index_runner_ver": "1.9.17" + } + } + ], + "search_time": 91945, + "aggregations": {} +} \ No newline at end of file diff --git a/tests/unit/mocks/data/elasticsearch/legacy/search_objects/case-01/result.json b/tests/unit/mocks/data/elasticsearch/legacy/search_objects/case-01/result.json new file mode 100644 index 0000000..7ae80dd --- /dev/null +++ b/tests/unit/mocks/data/elasticsearch/legacy/search_objects/case-01/result.json @@ -0,0 +1,167 @@ +{ + "count": 2, + "hits": [ + { + "index": "genome_2", + "id": "WS::56638:2", + "doc": { + "genome_id": "GCF_000759885.1", + "scientific_name": "Prochlorococcus marinus str. GP2", + "publication_titles": [ + "Genomes of diverse isolates of the marine cyanobacterium Prochlorococcus", + "Direct Submission" + ], + "publication_authors": [ + "Biller,S., Berube,P., Thompson,J., Kelly,L., Roggensack,S., Awad,L., Roache-Johnson,K., Ding,H., Giovannoni,S.J., Moore,L.R. and Chisholm,S.W.", + "Biller,S.J., Berube,P.M., Berta-Thompson,J.W., Kelly,L., Roggensack,S.E., Awad,L., Roache-Johnson,K.H., Ding,H., Giovannoni,S.J., Rocap,G., Moore,L.R. and Chisholm,S.W." + ], + "size": 1624310, + "num_contigs": 11, + "genome_type": null, + "gc_content": 0.31164, + "taxonomy": "cellular organisms; Bacteria; Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus; Prochlorococcus marinus", + "mean_contig_length": 147664.54545454544, + "external_origination_date": "11-Apr-2017", + "original_source_file_name": "GCF_000759885.1_ASM75988v1_genomic.gbff", + "cds_count": 1760, + "feature_count": 1760, + "mrna_count": 0, + "non_coding_feature_count": 89, + "assembly_ref": "15792:64027:2", + "source_id": "NZ_JNAH01000001", + "feature_counts": { + "CDS": 1760, + "gene": 1804, + "ncRNA": 3, + "non-protein_encoding_gene": 44, + "protein_encoding_gene": 1760, + "rRNA": 3, + "regulatory": 1, + "tRNA": 37, + "tmRNA": 1 + }, + "source": "RefSeq", + "warnings": [ + "SUSPECT: CDS EU91_RS03000_CDS_1 has a length of 974 which is not consistent with the length of the translation included (323 amino acids)." + ], + "creator": "kbasedata", + "access_group": 56638, + "obj_name": "GCF_000759885.1", + "shared_users": [ + "kbaseuitest" + ], + "timestamp": 1531222570501, + "creation_date": "2020-11-19T01:07:33+0000", + "is_public": true, + "version": 1, + "obj_id": 2, + "copied": "15792/64028/2", + "tags": [ + "narrative" + ], + "obj_type_version": "14.2", + "obj_type_module": "KBaseGenomes", + "obj_type_name": "Genome", + "index_runner_ver": "1.9.17" + }, + "highlight": { + "publication_titles": [ + "Genomes of diverse isolates of the marine cyanobacterium Prochlorococcus" + ], + "scientific_name": [ + "Prochlorococcus marinus str. GP2" + ], + "taxonomy": [ + "Terrabacteria group; Cyanobacteria/Melainabacteria group; Cyanobacteria; Synechococcales; Prochloraceae; Prochlorococcus", + "; Prochlorococcus marinus" + ] + } + }, + { + "index": "narrative_2", + "id": "WS::56638:1", + "doc": { + "narrative_title": "Test Narrative for FeatureSet Integration Test", + "is_narratorial": false, + "data_objects": [ + { + "name": "GCF_000759885.1", + "obj_type": "KBaseGenomes.Genome-14.2" + }, + { + "name": "GCF_001766235.1", + "obj_type": "KBaseGenomes.Genome-17.0" + }, + { + "name": "featureset2", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, + { + "name": "featureset1", + "obj_type": "KBaseCollections.FeatureSet-4.0" + }, + { + "name": "mergedfeatureset", + "obj_type": "KBaseCollections.FeatureSet-4.0" + } + ], + "owner": "kbaseuitest", + "modified_at": 1605765178000, + "cells": [ + { + "desc": "Test Narrative for FeatureSet Integration Test.\nTo reproduce:\n\nremove add a markdown cell (this one!), remove the welcome cell\nImport 2 genomes\nin this case, I copied, from RefSeq refdata:\n\nAcetobacter ascendens\nProchlorococcus marinus str. GP2\n\nat the time of writing, the public data search for refseq is broken, so I copied into this narrative from their landing pages\ncreate a FeatureSet for each Genome, using Build FeatureSet from Genome\n\nchoose the first 3 features from each genome for the feature set\nname the feature set for Acetobacter \"featureset1\", for Prochlorococcus \"featureset2\"\n\ncreate another FeatureSet combining these two using \"Merge FeatureSets\"\nin the Description parameter add the text \"merged feature set\", or whatever text you like (it will need to match the integration test.)\nin \"Output FeatureSet Name\" name it \"mergedfeatureset\"\ninsert the merged feature set object into the narrative as the last cell; it should be the 6th cell.\nfinally make the narrative public.\n\n", + "cell_type": "markdown" + }, + { + "desc": "Build FeatureSet from Genome", + "cell_type": "kbase_app" + }, + { + "desc": "Merge FeatureSets - v1.0.1", + "cell_type": "kbase_app" + }, + { + "desc": "kbaseReportView", + "cell_type": "widget" + }, + { + "desc": "mergedfeatureset", + "cell_type": "data" + } + ], + "creator": "kbaseuitest", + "total_cells": 6, + "static_narrative_saved": null, + "static_narrative_ref": null, + "access_group": 56638, + "obj_name": "Narrative.1605747150690", + "shared_users": [ + "kbaseuitest" + ], + "timestamp": 1605765178549, + "creation_date": "2020-11-19T00:52:30+0000", + "is_public": true, + "version": 13, + "obj_id": 1, + "copied": null, + "tags": [ + "narrative" + ], + "obj_type_version": "4.0", + "obj_type_module": "KBaseNarrative", + "obj_type_name": "Narrative", + "index_runner_ver": "1.9.17" + }, + "highlight": { + "cells.desc": [ + "the welcome cell\nImport 2 genomes\nin this case, I copied, from RefSeq refdata:\n\nAcetobacter ascendens\nProchlorococcus", + "marinus str.", + "GP2\n\nat the time of writing, the public data search for refseq is broken, so I copied into this narrative", + "features from each genome for the feature set\nname the feature set for Acetobacter \"featureset1\", for Prochlorococcus" + ] + } + } + ], + "search_time": 103122, + "aggregations": {} +} diff --git a/tests/unit/mocks/data/elasticsearch/legacy/search_types/case-01/result.json b/tests/unit/mocks/data/elasticsearch/legacy/search_types/case-01/result.json new file mode 100644 index 0000000..e49cf89 --- /dev/null +++ b/tests/unit/mocks/data/elasticsearch/legacy/search_types/case-01/result.json @@ -0,0 +1,21 @@ +{ + "count": 2, + "hits": [], + "search_time": 2815, + "aggregations": { + "type_count": { + "count_err_upper_bound": 0, + "count_other_docs": 0, + "counts": [ + { + "key": "Genome", + "count": 1 + }, + { + "key": "Narrative", + "count": 1 + } + ] + } + } +} \ No newline at end of file diff --git a/tests/unit/mocks/mocked.py b/tests/unit/mocks/mocked.py new file mode 100644 index 0000000..95754b0 --- /dev/null +++ b/tests/unit/mocks/mocked.py @@ -0,0 +1,189 @@ +import json +import os + + +def get_data(name): + """Load the json test data file with the given name from ./data/legacy """ + + file_path = os.path.join(os.path.dirname(__file__), 'data', name) + if os.path.isfile(file_path): + with open(file_path) as f: + return True, json.load(f) + else: + return False, None + + +def mocked_get_workspace_info(workspace_id, auth_token): + # if auth provided, assume the private workspaces. + found, info = get_data(f'Workspace/get_workspace_info/{workspace_id}.json') + if not found: + return None + + # Variable names taken from workspace spec + [_id, _workspace, _owner, _moddate, _max_objid, user_permission, globalread, + _lockstat, _metadata] = info + if auth_token is not None: + # If authenticated, access is granted if either + # public (globalread) or private (user_permission) is + # not denied ('n') + if globalread != 'n' or user_permission != 'n': + return info + else: + # Without authentication, access is granted only if + # public (globalread) is not denied + if globalread != 'n': + return info + + return None + + +def mocked_get_user_profiles(usernames, token=None): + """ + mocks get_user_profiles in src/utils/user_profiles.py + + :param usernames: + :return: + """ + profiles = [] + for username in usernames: + found, profile = get_data(f'UserProfile/get_profile/{username}.json') + if not found: + profiles.append(None) + else: + profiles.append(profile) + return profiles + + +def handle_get_workspace_info(rpc, auth_token): + workspace_id = rpc['params'][0]['id'] + # if auth provided, assume the private workspaces. + found, info = get_data(f'Workspace/get_workspace_info/{workspace_id}.json') + if not found: + return None + + # Variable names taken from workspace spec + [_id, _workspace, _owner, _moddate, _max_objid, user_permission, globalread, + _lockstat, _metadata] = info + if auth_token is not None: + # If authenticated, access is granted if either + # public (globalread) or private (user_permission) is + # not denied ('n') + if globalread != 'n' or user_permission != 'n': + return info + else: + # Without authentication, access is granted only if + # public (globalread) is not denied + if globalread != 'n': + return info + + return None + + +def handle_get_user_profile(rpc, auth_token): + usernames = rpc['params'][0] + profiles = [] + for username in usernames: + found, profile = get_data(f'UserProfile/get_profile/{username}.json') + if not found: + profiles.append(None) + else: + profiles.append(profile) + return profiles + + +def workspace_call(request): + header = { + 'Content-Type': 'application/json' + } + auth_token = request.headers.get('Authorization') + rpc = json.loads(request.body) + method = rpc['method'] + if auth_token is not None and auth_token == 'bad_token': + return (500, header, json.dumps({ + 'version': '1.1', + 'id': rpc['id'], + 'error': { + 'name': 'JSONRPCError', + 'code': -32001, + 'message': 'INVALID TOKEN' + } + })) + + # TODO: emulate permission denied + + # TODO: support the ws methods we do support + + if method == 'Workspace.get_workspace_info': + result = handle_get_workspace_info(rpc, auth_token) + else: + # TODO: make this correct + return (500, header, json.dumps({ + 'version': '1.1', + 'id': rpc['id'], + 'error': { + 'name': 'JSONRPCError', + 'code': -32601, + 'message': f'Method Not Supported "{method}' + } + })) + + return (200, header, json.dumps({ + 'version': '1.1', + 'id': rpc['id'], + 'result': [result] + })) + + +def error_value(code, message): + return { + 'name': 'JSONRPCError', + 'code': code, + 'message': message + } + + +def error_response(request_rpc, error_value): + header = { + 'Content-Type': 'application/json' + } + return_rpc = { + 'version': '1.1', + 'error': error_value + } + if 'id' in request_rpc: + return_rpc['id'] = request_rpc['id'] + return 500, header, json.dumps(return_rpc) + + +def result_response(request_rpc, result_value): + header = { + 'Content-Type': 'application/json' + } + return_rpc = { + 'version': '1.1', + 'result': result_value + } + if 'id' in request_rpc: + return_rpc['id'] = request_rpc['id'] + return 200, header, json.dumps(return_rpc) + + +def user_profile_call(request): + auth_token = request.headers.get('Authorization') + rpc = json.loads(request.body) + method = rpc['method'] + if auth_token is not None and auth_token == 'bad_token': + return error_response(rpc, error_value(-32001, 'INVALID TOKEN')) + + # TODO: emulate permission denied + + # TODO: support the ws methods we do support + + if method == 'UserProfile.get_user_profile': + result = handle_get_user_profile(rpc, auth_token) + else: + # TODO: make this correct + return error_response(rpc, + error_value(-32601, f'Method Not Supported "{method}')) + + return result_response(rpc, [result]) diff --git a/tests/unit/search1_conversion/__init__.py b/tests/unit/search1_conversion/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/search1_conversion/data.py b/tests/unit/search1_conversion/data.py index d017d6e..0ef57a2 100644 --- a/tests/unit/search1_conversion/data.py +++ b/tests/unit/search1_conversion/data.py @@ -1,15 +1,146 @@ -mock_ws_info = [ - 1, - "test_workspace", - "username", - "2020-06-06T03:49:55+0000", - 388422, - "n", - "r", - "unlocked", - {"searchtags": "refdata"} -] +# This test data simulates a search space of two objects across two +# narratives. + +# Raw results +test_search_results = { + 'hits': [ + { + 'highlight': {'name': 'name1'}, + 'doc': { + 'access_group': 1, + 'creator': 'username', + 'obj_id': 2, + 'obj_name': 'object2', + 'version': 1, + 'obj_type_name': 'Type', + 'timestamp': 0, + 'name': 'name1' + }, + 'id': 'WS::1:2', + 'index': 'test_index1_1', + }, + { + 'highlight': {'name': 'name2'}, + 'doc': { + 'access_group': 0, + 'creator': 'username', + 'obj_id': 2, + 'obj_name': 'object2', + 'version': 1, + 'obj_type_name': 'Type', + 'timestamp': 0, + 'name': 'name2' + }, + 'id': 'WS::0:2', + 'index': 'test_index1_1', + }, + ], + 'count': 0, + 'search_time': 1 +} + +mock_ws_info = { + # public workspace, refdata + "0": [ + 0, + "workspace0", + "username", + "2020-01-02T03:04:05+0000", + 388422, + "n", + "r", + "unlocked", + { + "searchtags": "refdata" + } + ], + # public workspace, narrative + "1": [ + 1, + "workspace1", + "username", + "2020-01-02T03:04:05+0000", + 388422, + "n", + "r", + "unlocked", + { + "searchtags": "narrative", + "narrative": "1", + "narrative_nice_name": "narrative1", + "is_temporary": "f" + } + ], + # private workspace, narrative + "100": [ + 100, + "workspace100", + "username", + "2020-01-02T03:04:05+0000", + 388422, + "r", + "n", + "unlocked", + { + "searchtags": "narrative", + "narrative": "1", + "narrative_nice_name": "narrative 100", + "is_temporary": "f" + } + ], + # private, inaccessible workspace, narrative + "101": [ + 101, + "workspace101", + "username", + "2020-01-02T03:04:05+0000", + 388422, + "n", + "n", + "unlocked", + { + "searchtags": "narrative", + "narrative": "1", + "narrative_nice_name": "narrative 101", + "is_temporary": "f" + } + ] +} + +# object_info is: +# object id, object name, workspace type, date saved, version, +# saved by, workspace id, workspace name, checksum, size, metadata +mock_object_info = [[ + 2, # object id + "object2", # object name + "Module.Type-1.0", # workspace type + 1000, # saved time + 1, # version + "username", # saved by + 0, # workspace id + "workspace0", # workspace name + "ab123", # checksum + 123, # size + { + + } +], + [ + 2, # object id + "object2", # object name + "Module.Type-1.0", # workspace type + 1000, # saved time + 1, # version + "username", # saved by + 1, # workspace id + "workspace1", # workspace name + "ab123", # checksum + 123, # size + { + + } +]] mock_user_profiles = [{ "user": { @@ -42,24 +173,6 @@ } }] -test_search_results = { - 'hits': [ - { - 'highlight': {'name': 'name1'}, - 'doc': {'access_group': 1, 'timestamp': 0, 'name': 'name1'}, - 'id': '1', - 'index': 'test_index1_1', - }, - { - 'highlight': {'name': 'name2'}, - 'doc': {'access_group': 0, 'timestamp': 0, 'name': 'name2'}, - 'id': '1', - 'index': 'test_index1_1', - }, - ], - 'count': 0, - 'search_time': 1 -} expected_search_results = { "pagination": {}, @@ -68,44 +181,69 @@ "search_time": 1, "objects": [ { - "object_name": "", - "access_group": 1, - "obj_id": None, - "version": None, - "timestamp": 0, - "type": "", - "creator": None, + "id": "WS::1:2", + "object_name": "object2", + "workspace_id": 1, + "object_id": 2, + "object_version": 1, + "modified_at": 0, + "workspace_type_name": "Type", + "creator": "username", "data": {"name": "name1"}, - "guid": "WS:1/1", - "kbase_id": "1/1", "index_name": "test", - "type_ver": 0, - "key_props": {"name": "name1"}, + "index_version": 0, "highlight": {"name": "name1"} }, { - "object_name": "", - "access_group": 0, - "obj_id": None, - "version": None, - "timestamp": 0, - "type": "", - "creator": None, + "id": "WS::0:2", + "object_name": "object2", + "workspace_id": 0, + "object_id": 2, + "object_version": 1, + "modified_at": 0, + "workspace_type_name": "Type", + "creator": "username", "data": {"name": "name2"}, - "guid": "WS:1/1", - "kbase_id": "1/1", "index_name": "test", - "type_ver": 0, - "key_props": {"name": "name2"}, + "index_version": 0, "highlight": {"name": "name2"} } ], "access_group_narrative_info": { - '1': ['narrative1', 123, 1591415395, 'username', 'User Example'] + '1': ['narrative1', 1, 1577934245000, 'username', 'User Example'] }, "access_groups_info": { - "1": mock_ws_info, - "0": mock_ws_info, + # public workspace, refdata + "0": [ + 0, + "workspace0", + "username", + "2020-01-02T03:04:05+0000", + 388422, + "n", + "r", + "unlocked", + { + "searchtags": "refdata" + } + ], + # public workspace, narrative + "1": [ + 1, + "workspace1", + "username", + "2020-01-02T03:04:05+0000", + 388422, + "n", + "r", + "unlocked", + { + "searchtags": "narrative", + "narrative": "1", + "narrative_nice_name": "narrative1", + "is_temporary": "f" + } + ] } } @@ -113,43 +251,68 @@ "search_time": 1, "objects": [ { - "object_name": "", - "access_group": 1, - "obj_id": None, - "version": None, - "timestamp": 0, - "type": "", - "creator": None, + "id": "WS::1:2", + "object_name": "object2", + "workspace_id": 1, + "object_id": 2, + "object_version": 1, + "modified_at": 0, + "workspace_type_name": "Type", + "creator": "username", "data": {"name": "name1"}, - "guid": "WS:1/1", - "kbase_id": "1/1", "index_name": "test", - "type_ver": 0, - "key_props": {"name": "name1"}, + "index_version": 0, "highlight": {"name": "name1"} }, { - "object_name": "", - "access_group": 0, - "obj_id": None, - "version": None, - "timestamp": 0, - "type": "", - "creator": None, + "id": "WS::0:2", + "object_name": "object2", + "workspace_id": 0, + "object_id": 2, + "object_version": 1, + "modified_at": 0, + "workspace_type_name": "Type", + "creator": "username", "data": {"name": "name2"}, - "guid": "WS:1/1", - "kbase_id": "1/1", "index_name": "test", - "type_ver": 0, - "key_props": {"name": "name2"}, + "index_version": 0, "highlight": {"name": "name2"} } ], "access_group_narrative_info": { - '1': ['narrative1', 123, 1591415395, 'username', 'User Example'] + '1': ['narrative1', 1, 1577934245000, 'username', 'User Example'] }, "access_groups_info": { - "1": mock_ws_info, - "0": mock_ws_info + # public workspace, refdata + "0": [ + 0, + "workspace0", + "username", + "2020-01-02T03:04:05+0000", + 388422, + "n", + "r", + "unlocked", + { + "searchtags": "refdata" + } + ], + # public workspace, narrative + "1": [ + 1, + "workspace1", + "username", + "2020-01-02T03:04:05+0000", + 388422, + "n", + "r", + "unlocked", + { + "searchtags": "narrative", + "narrative": "1", + "narrative_nice_name": "narrative1", + "is_temporary": "f" + } + ] } } diff --git a/tests/unit/search1_conversion/test_search1_convert_params.py b/tests/unit/search1_conversion/test_search1_convert_params.py index e0433b1..0142b7b 100644 --- a/tests/unit/search1_conversion/test_search1_convert_params.py +++ b/tests/unit/search1_conversion/test_search1_convert_params.py @@ -1,7 +1,7 @@ import pytest from src.search1_conversion import convert_params -from src.exceptions import ResponseError +from jsonrpc11base.errors import InvalidParamsError def test_search_objects_valid(): @@ -12,7 +12,8 @@ def test_search_objects_valid(): 'query': {'bool': {}}, 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected @@ -21,18 +22,43 @@ def test_search_objects_valid(): def test_search_objects_highlight(): params = { 'match_filter': {'full_text_in_all': 'x'}, - 'include_highlight': True + 'post_processing': { + 'include_highlight': 1 + } } expected = { - 'query': {'bool': {'must': [{'match': {'agg_fields': 'x'}}]}}, + 'query': { + 'bool': { + 'must': [{ + 'match': { + 'agg_fields': { + 'query': 'x', + 'operator': 'AND' + } + } + }] + } + }, 'highlight': { 'fields': {'*': {}}, - 'highlight_query': {'bool': {'must': [{'match': {'agg_fields': 'x'}}]}}, + 'highlight_query': { + 'bool': { + 'must': [{ + 'match': { + 'agg_fields': { + 'query': 'x', + 'operator': 'AND' + } + } + }] + } + }, 'require_field_match': False, }, 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected @@ -43,10 +69,23 @@ def test_search_objects_fulltext(): 'match_filter': {'full_text_in_all': 'xyz'}, } expected = { - 'query': {'bool': {'must': [{'match': {'agg_fields': 'xyz'}}]}}, + 'query': { + 'bool': { + 'must': [{ + 'match': { + 'agg_fields': { + 'query': 'xyz', + 'operator': 'AND' + } + } + }] + } + }, 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, + 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected @@ -60,7 +99,8 @@ def test_search_objects_object_name(): 'query': {'bool': {'must': [{'match': {'obj_name': 'xyz'}}]}}, 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected @@ -74,14 +114,15 @@ def test_search_objects_timestamp(): 'query': {'bool': {'must': [{'range': {'timestamp': {'gte': 0, 'lte': 1}}}]}}, 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected def test_search_objects_timestamp_invalid(): - with pytest.raises(ResponseError): + with pytest.raises(InvalidParamsError): params = { 'match_filter': {'timestamp': {'min_date': 0, 'max_date': 0}} } @@ -96,7 +137,9 @@ def test_search_objects_source_tags(): 'query': {'bool': {'must': [{'term': {'tags': 'x'}}, {'term': {'tags': 'y'}}]}}, 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, + 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected @@ -110,7 +153,9 @@ def test_search_objects_source_tags_blacklist(): 'query': {'bool': {'must_not': [{'term': {'tags': 'x'}}, {'term': {'tags': 'y'}}]}}, 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, + 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected @@ -118,21 +163,25 @@ def test_search_objects_source_tags_blacklist(): def test_search_objects_objtypes(): params = { - 'object_types': ['x', 'y', 'GenomeFeature'] + 'object_types': ['x', 'y'] } expected = { 'query': { 'bool': { - 'should': [ - {'term': {'obj_type_name': 'x'}}, - {'term': {'obj_type_name': 'y'}} - ] + 'filter': { + 'bool': { + 'should': [ + {'term': {'obj_type_name': 'x'}}, + {'term': {'obj_type_name': 'y'}} + ] + } + } } }, - 'indexes': ['genome_features_2'], 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected @@ -149,14 +198,15 @@ def test_search_objects_sorting(): 'query': {'bool': {}}, 'sort': [{'x': {'order': 'asc'}}, {'timestamp': {'order': 'desc'}}], 'size': 20, 'from': 0, - 'public_only': False, 'private_only': False + 'only_public': False, 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected def test_search_objects_sorting_invalid_prop(): - with pytest.raises(ResponseError): + with pytest.raises(InvalidParamsError): params = { 'sorting_rules': [ {'property': 'x', 'is_object_property': False, 'ascending': False}, @@ -185,7 +235,8 @@ def test_search_objects_lookup_in_keys(): }, 'size': 20, 'from': 0, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, 'only_private': False, + 'track_total_hits': True } query = convert_params.search_objects(params) assert query == expected @@ -200,7 +251,9 @@ def test_search_types_valid(): 'size': 0, 'from': 0, 'aggs': {'type_count': {'terms': {'field': 'obj_type_name'}}}, 'sort': [{'timestamp': {'order': 'asc'}}], - 'public_only': False, 'private_only': False + 'only_public': False, + 'only_private': False, + 'track_total_hits': True } query = convert_params.search_types(params) assert query == expected @@ -208,10 +261,80 @@ def test_search_types_valid(): def test_get_objects_valid(): params = { - 'guids': ['x', 'y'] + 'ids': ['x', 'y'] } expected = { 'query': {'terms': {'_id': ['x', 'y']}} } query = convert_params.get_objects(params) assert query == expected + + +def test_search_objects_only_public(): + params = { + 'match_filter': {}, + 'access_filter': { + 'with_public': 1 + } + } + expected = { + 'query': {'bool': {}}, + 'size': 20, 'from': 0, + 'sort': [{'timestamp': {'order': 'asc'}}], + 'only_public': True, 'only_private': False, + 'track_total_hits': True + } + query = convert_params.search_objects(params) + assert query == expected + + +def test_search_objects_only_private(): + params = { + 'match_filter': {}, + 'access_filter': { + 'with_private': 1 + } + } + expected = { + 'query': {'bool': {}}, + 'size': 20, 'from': 0, + 'sort': [{'timestamp': {'order': 'asc'}}], + 'only_public': False, 'only_private': True, + 'track_total_hits': True + } + query = convert_params.search_objects(params) + assert query == expected + + +def test_search_objects_private_and_public(): + params = { + 'match_filter': {}, + 'access_filter': { + 'with_private': 1, + 'with_public': 1 + } + } + expected = { + 'query': {'bool': {}}, + 'size': 20, 'from': 0, + 'sort': [{'timestamp': {'order': 'asc'}}], + 'only_public': False, 'only_private': False, + 'track_total_hits': True + } + query = convert_params.search_objects(params) + assert query == expected + + +def test_search_objects_private_nor_public(): + params = { + 'match_filter': {}, + 'access_filter': { + 'with_private': 0, + 'with_public': 0 + } + } + with pytest.raises(InvalidParamsError) as re: + convert_params.search_objects(params) + assert re.value.code == -32602 + assert re.value.message == 'Invalid params' + assert re.value.error['message'] == 'May not specify no private data and no public data' diff --git a/tests/unit/search1_conversion/test_search1_convert_result.py b/tests/unit/search1_conversion/test_search1_convert_result.py index 970986f..ca083f4 100644 --- a/tests/unit/search1_conversion/test_search1_convert_result.py +++ b/tests/unit/search1_conversion/test_search1_convert_result.py @@ -1,92 +1,153 @@ -from unittest.mock import patch -import subprocess - +import unittest +import responses +from src.exceptions import NoAccessGroupError, NoUserProfileError from src.search1_conversion import convert_result -from src.utils.wait_for_service import wait_for_service -from tests.helpers import init_elasticsearch - -from tests.unit.search1_conversion.data import ( - mock_ws_info, - mock_user_profiles, - test_search_results, - expected_search_results, - expected_get_objects, -) - -ES_URL = 'http://localhost:9200' -subprocess.run("docker-compose up -d", shell=True) -wait_for_service(ES_URL, 'Elasticsearch') -init_elasticsearch() +from src.utils.config import config +from tests.unit.mocks.mocked import \ + get_data, \ + workspace_call, user_profile_call + # TODO test post processing # TODO test the following fields: object_name, obj_id, version, type, creator -@patch('src.search1_conversion.convert_result.get_object_info') -@patch('src.search1_conversion.convert_result.get_workspace_info') -@patch('src.search1_conversion.convert_result.get_user_profiles') -def test_search_objects_valid(user_patched, ws_patched, infos_patched): - params = { - 'post_processing': { - 'add_narrative_info': 1, - 'add_access_group_info': 1, - 'include_highlight': 1, +class Search1ConversionTest(unittest.TestCase): + @responses.activate + def test_search_objects_valid(self): + responses.add_callback(responses.POST, config['workspace_url'], + callback=workspace_call) + + responses.add_callback(responses.POST, config['user_profile_url'], + callback=user_profile_call) + + # Using case-01 params, ES result and api result. + _found, test_params = get_data( + 'SearchAPI/legacy/search_objects/case-01/params.json') + _found, test_es_search_results = get_data( + 'elasticsearch/legacy/search_objects/case-01/result.json') + _found, test_expected = get_data( + 'SearchAPI/legacy/search_objects/case-01/result.json') + + final = convert_result.search_objects(test_params, test_es_search_results, + {'auth': None}) + + # Remove unwanted comparisons. + del final['search_time'] + del test_expected['search_time'] + + self.maxDiff = None + self.assertEqual(final, test_expected) + + @responses.activate + def test_get_objects_valid(self): + responses.add_callback(responses.POST, config['workspace_url'], + callback=workspace_call) + + responses.add_callback(responses.POST, config['user_profile_url'], + callback=user_profile_call) + + _found, test_params = get_data( + 'SearchAPI/legacy/get_objects/case-02/params.json') + _found, test_es_search_results = get_data( + 'elasticsearch/legacy/get_objects/case-02/result.json') + _found, test_expected = get_data( + 'SearchAPI/legacy/get_objects/case-02/result.json') + + final = convert_result.get_objects(test_params, test_es_search_results, {'auth': None}) + self.assertEqual(final, test_expected) + + def test_search_types_valid(self): + _found, test_es_search_results = get_data( + 'elasticsearch/legacy/search_types/case-01/result.json') + _found, test_expected = get_data( + 'SearchAPI/legacy/search_types/case-01/result.json') + + # Not that converting search_types does not require any + # params or context. + final = convert_result.search_types(test_es_search_results) + + self.assertEqual(final['type_to_count'], test_expected['type_to_count']) + + def test_fetch_narrative_info_no_hits(self): + results = { + 'hits': [] } - } - infos_patched.return_value = [] - ws_patched.return_value = mock_ws_info - user_patched.return_value = mock_user_profiles - final = convert_result.search_objects(params, test_search_results, {'auth': None}) - for key in expected_search_results: - assert key in final - assert expected_search_results[key] == final[key], key - - -@patch('src.search1_conversion.convert_result.get_object_info') -@patch('src.search1_conversion.convert_result.get_workspace_info') -@patch('src.search1_conversion.convert_result.get_user_profiles') -def test_get_objects_valid(user_patched, ws_patched, infos_patched): - params = { - 'post_processing': { - 'add_narrative_info': 1, - 'add_access_group_info': 1, - 'include_highlight': 1, + ctx = {} + result = convert_result._fetch_narrative_info(results, ctx) + assert len(result) == 2 + assert result[0] == {} + assert result[1] == {} + + # TODO: This condition should not occur in any object index! + def test_fetch_narrative_info_no_access_group(self): + results = { + 'hits': [{ + 'doc': {} + }] } - } - infos_patched.return_value = [] - ws_patched.return_value = mock_ws_info - user_patched.return_value = mock_user_profiles - final = convert_result.get_objects(params, test_search_results, {'auth': None}) - for key in expected_get_objects: - assert key in final - assert expected_get_objects[key] == final[key], key - - -def test_search_types_valid(): - params = { - 'post_processing': { + with self.assertRaises(NoAccessGroupError): + convert_result._fetch_narrative_info(results, {'auth': None}) + + @responses.activate + def test_fetch_narrative_info_owner_has_profile(self): + responses.add_callback(responses.POST, config['workspace_url'], + callback=workspace_call) + + responses.add_callback(responses.POST, config['user_profile_url'], + callback=user_profile_call) + + _found, test_es_search_results = get_data( + 'elasticsearch/legacy/search_objects/case-01/result.json') + _found, test_expected = get_data( + 'SearchAPI/legacy/search_objects/case-01/result.json') + + ctx = { + 'auth': None } - } - test_results = { - 'search_time': 1, - 'hits': [], - 'aggregations': { - 'type_count': { - 'counts': [ - {'key': 'x', 'count': 10}, - {'key': 'y', 'count': 20}, - ] - } + result = convert_result._fetch_narrative_info(test_es_search_results, ctx) + self.assertEqual(len(result), 2) + + expected_result = test_expected['access_group_narrative_info'] + self.assertEqual(result[1], expected_result) + + @responses.activate + def test_fetch_narrative_info_owner_has_no_profile(self): + responses.add_callback(responses.POST, config['workspace_url'], + callback=workspace_call) + + responses.add_callback(responses.POST, config['user_profile_url'], + callback=user_profile_call) + results = { + 'hits': [{ + 'doc': { + 'access_group': 10056638 + } + }] + } + meta = { + 'auth': None } - } - expected = { - "search_time": 1, - "type_to_count": { - 'x': 10, - 'y': 20, - }, - } - final = convert_result.search_types(params, test_results, {'auth': None}) - for key in expected: - assert key in final - assert expected[key] == final[key], key + with self.assertRaises(NoUserProfileError) as e: + convert_result._fetch_narrative_info(results, meta) + self.assertEqual(e.exception.message, + 'A user profile could not be found for "kbaseuitestx"') + + def test_get_object_data_from_search_results(self): + responses.add_callback(responses.POST, config['workspace_url'], + callback=workspace_call) + responses.add_callback(responses.POST, config['user_profile_url'], + callback=user_profile_call) + + _found, test_params = get_data( + 'SearchAPI/legacy/search_objects/case-01/params.json') + _found, test_es_search_results = get_data( + 'elasticsearch/legacy/search_objects/case-01/result.json') + _found, test_expected = get_data( + 'SearchAPI/legacy/search_objects/case-01/result.json') + + post_processing = test_params['post_processing'] + converted = convert_result._get_object_data_from_search_results( + test_es_search_results, + post_processing) + self.assertEqual(converted, test_expected['objects']) diff --git a/tests/unit/search1_rpc/test_search1_rpc.py b/tests/unit/search1_rpc/test_search1_rpc.py index 5a1554d..f068b12 100644 --- a/tests/unit/search1_rpc/test_search1_rpc.py +++ b/tests/unit/search1_rpc/test_search1_rpc.py @@ -6,17 +6,15 @@ """ import json import responses -import subprocess - -from src.search1_rpc import service +import pytest from src.utils.config import config -from src.utils.wait_for_service import wait_for_service -from tests.helpers import init_elasticsearch +from src.search1_rpc import service as rpc +# For mocking workspace calls +from unittest.mock import patch +from src import exceptions +from src.search1_rpc import errors + -ES_URL = 'http://localhost:9200' -subprocess.run("docker-compose up -d", shell=True) -wait_for_service(ES_URL, 'Elasticsearch') -init_elasticsearch() mock_obj_info = { "version": "1.1", "result": [ @@ -43,7 +41,7 @@ @responses.activate -def test_get_objects_valid(): +def test_get_objects_valid(services): # Mock the obj info request responses.add(responses.POST, config['workspace_url'], json=mock_obj_info, status=200) @@ -51,44 +49,150 @@ def test_get_objects_valid(): responses.add_passthru("http://localhost:9200/") params = { "method": "KBaseSearchEngine.get_objects", - "jsonrpc": "2.0", + "version": "1.1", "id": 0, "params": [ { - 'guids': ['public-doc1'], + 'ids': ['public-doc1'], 'post_processing': {'ids_only': 1}, } ], } - result = service.call(json.dumps(params), {'auth': None}) + result = rpc.call(json.dumps(params), {'auth': None}) res = json.loads(result) - assert res['jsonrpc'] == '2.0' + assert res['version'] == '1.1' assert res['id'] == 0 + assert 'result' in res assert len(res['result']) == 1 -def test_search_objects_valid(): - params = { - "method": "KBaseSearchEngine.search_objects", - "jsonrpc": "2.0", - "id": 0, - "params": [{ - 'match_filter': {}, - 'pagination': {'count': 0, 'start': 0}, - }] +def ws_call(request): + header = { + 'Content-Type': 'application/json' } - result = service.call(json.dumps(params), {'auth': None}) - res = json.loads(result) - assert len(res['result']) == 1 + auth = request.headers.get('Authorization') + if auth is not None and auth == 'bad_token': + return (500, header, json.dumps({ + 'version': '1.1', + 'id': 'foo', + 'error': { + 'name': 'JSONRPCError', + 'code': -32001, + 'message': 'INVALID TOKEN' + } + })) + else: + return 200, header, json.dumps(mock_obj_info) -def test_search_types_valid(): +@responses.activate +def test_get_objects_bad_auth(services): + # Mock the obj info request + responses.add_callback(responses.POST, + config['workspace_url'], + callback=ws_call) + # Allow elasticsearch calls + responses.add_passthru("http://localhost:9200/") params = { - "method": "KBaseSearchEngine.search_types", - "jsonrpc": "2.0", - "id": "0", - "params": [{'object_types': ['x'], 'match_filter': {}}] + "method": "KBaseSearchEngine.get_objects", + "version": "1.1", + "params": [ + { + 'ids': ['public-doc1'], + 'post_processing': {'ids_only': 1}, + } + ], } - result = service.call(json.dumps(params), {'auth': None}) + result = rpc.call(json.dumps(params), {'auth': 'bad_token'}) res = json.loads(result) - assert len(res['result']) == 1 + assert res['version'] == '1.1' + assert 'error' in res + error = res['error'] + assert error['code'] == 2000 + assert error['message'] == 'Auth error' + assert error['name'] == 'APIError' + + +def test_search_objects_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + "method": "KBaseSearchEngine.search_objects", + "version": "1.1", + "id": 0, + "params": [{ + 'match_filter': {}, + 'pagination': {'count': 0, 'start': 0}, + }] + } + result = rpc.call(json.dumps(params), {'auth': None}) + res = json.loads(result) + assert 'result' in res + assert len(res['result']) == 1 + + +def test_search_types_valid(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + "method": "KBaseSearchEngine.search_types", + "version": "1.1", + "id": "0", + "params": [{'object_types': ['x'], 'match_filter': {}}] + } + result = rpc.call(json.dumps(params), {'auth': None}) + res = json.loads(result) + assert 'result' in res + assert len(res['result']) == 1 + + +def test_exception_conversion_unknown_type(): + def raise_unknown_type(): + raise exceptions.UnknownType('foo') + + with pytest.raises(errors.UnknownTypeError) as ute: + errors.trap_error(lambda: raise_unknown_type()) + assert ute.value.message == 'Unknown type' + + +def test_exception_conversion_auth_error(): + def raise_auth_error(): + og_error = { + 'error': { + 'message': 'foo' + } + } + raise exceptions.AuthError(og_error, 'bar') + + with pytest.raises(errors.AuthorizationError) as ute: + errors.trap_error(lambda: raise_auth_error()) + assert ute.value.message == 'Auth error' + assert ute.value.error['message'] == 'foo' + + +def test_exception_conversion_elasticsearch_error(): + def raise_elasticsearch_error(): + raise exceptions.ElasticsearchError('foo') + + with pytest.raises(errors.ElasticsearchServerError) as ute: + errors.trap_error(lambda: raise_elasticsearch_error()) + assert ute.value.message == 'Elasticsearch server error' + + +def test_exception_conversion_unknown_index(): + def raise_unknown_index(): + raise exceptions.UnknownIndex('foo') + + with pytest.raises(errors.UnknownIndexError) as ute: + errors.trap_error(lambda: raise_unknown_index()) + assert ute.value.message == 'Unknown index' + + +def test_exception_conversion_user_profile(): + def raise_user_profile_error(): + raise exceptions.UserProfileError('foo', 'bar') + + with pytest.raises(errors.UserProfileServiceError) as ute: + errors.trap_error(lambda: raise_user_profile_error()) + assert ute.value.message == 'User profile service error' + assert str(ute.value) == 'User profile service error\nResponse: bar\nURL: foo' diff --git a/tests/unit/search2_conversion/test_search2_convert_params.py b/tests/unit/search2_conversion/test_search2_convert_params.py index fd4c81f..bbb8fba 100644 --- a/tests/unit/search2_conversion/test_search2_convert_params.py +++ b/tests/unit/search2_conversion/test_search2_convert_params.py @@ -78,6 +78,8 @@ def test_search_workspace(): 'only_private': True, 'track_total_hits': True, 'indexes': ['narrative'], + 'from': 10, + 'size': 20, 'sort': [{ 'x': {'order': 'desc'} }, { @@ -102,6 +104,8 @@ def test_search_workspace_blank(): indexes = list(config['global']['ws_type_to_indexes'].values()) expected = { 'query': {'bool': {'must': []}}, + 'size': 10, + 'from': 0, 'track_total_hits': False, 'indexes': indexes, } diff --git a/tests/unit/search2_rpc/test_search2_rpc.py b/tests/unit/search2_rpc/test_search2_rpc.py index 82d797d..41c68ff 100644 --- a/tests/unit/search2_rpc/test_search2_rpc.py +++ b/tests/unit/search2_rpc/test_search2_rpc.py @@ -5,62 +5,94 @@ test search logic here. """ import json -import subprocess +# For mocking workspace calls +from unittest.mock import patch +from src.search2_rpc import service as rpc -from src.search2_rpc import service -from src.utils.wait_for_service import wait_for_service -from tests.helpers import init_elasticsearch -ES_URL = 'http://localhost:9200' -subprocess.run("docker-compose up -d", shell=True) -wait_for_service(ES_URL, 'Elasticsearch') -init_elasticsearch() +def test_show_indexes(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + "method": "show_indexes", + "jsonrpc": "2.0", + "id": 0, + } + result = rpc.call(json.dumps(params), {'auth': None}) + res = json.loads(result) + assert res['result'] + +def test_show_indexes_not_found(services): + with patch.dict('src.utils.config.config', {'index_prefix': 'foo'}): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + "method": "show_indexes", + "jsonrpc": "2.0", + "id": 0, + } + result = rpc.call(json.dumps(params), {'auth': None}) + res = json.loads(result) + assert len(res['result']) == 0 -def test_show_indexes(): - params = { - "method": "show_indexes", - "jsonrpc": "2.0", - "id": 0, - } - result = service.call(json.dumps(params), {'auth': None}) - res = json.loads(result) - assert res['result'] +def test_show_indexes_error(services): + with patch.dict('src.utils.config.config', {'index_prefix': '/'}): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + "method": "show_indexes", + "jsonrpc": "2.0", + "id": 0, + } + result = rpc.call(json.dumps(params), {'auth': None}) + res = json.loads(result) + assert res['error'] + assert res['error']['code'] == -32003 + assert res['error']['message'] == 'Server error' + assert res['error']['data']['method'] == 'show_indexes' -def test_show_config(): - params = { - "method": "show_config", - "jsonrpc": "2.0", - "id": 0, - } - result = service.call(json.dumps(params), {'auth': None}) - res = json.loads(result) - assert res['result'] +def test_show_config(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + "method": "show_config", + "jsonrpc": "2.0", + "id": 0, + } + result = rpc.call(json.dumps(params), {'auth': None}) + res = json.loads(result) + assert res['result'] -def test_search_objects(): - params = { - "method": "search_objects", - "jsonrpc": "2.0", - "id": 0, - "params": {} - } - result = service.call(json.dumps(params), {'auth': None}) - res = json.loads(result) - assert res['result']['count'] > 0 + +def test_search_objects(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + "method": "search_objects", + "jsonrpc": "2.0", + "id": 0, + "params": {} + } + result = rpc.call(json.dumps(params), {'auth': None}) + res = json.loads(result) + assert res['result']['count'] > 0 -def test_search_workspace(): - params = { - "method": "search_workspace", - "jsonrpc": "2.0", - "id": 0, - "params": { - "types": ["KBaseNarrative.Narrative"] +def test_search_workspace(services): + with patch('src.es_client.query.ws_auth') as mocked: + mocked.return_value = [0, 1] # Public workspaces + params = { + "method": "search_workspace", + "jsonrpc": "2.0", + "id": 0, + "params": { + "types": ["KBaseNarrative.Narrative"] + } } - } - result = service.call(json.dumps(params), {'auth': None}) - res = json.loads(result) - assert 'error' not in res - assert res['result']['count'] > 0 + result = rpc.call(json.dumps(params), {'auth': None}) + res = json.loads(result) + assert 'error' not in res + assert res['result']['count'] > 0 diff --git a/tests/unit/server/test_server.py b/tests/unit/server/test_server.py index 6b3427f..bab5012 100644 --- a/tests/unit/server/test_server.py +++ b/tests/unit/server/test_server.py @@ -1,23 +1,14 @@ import json import requests -import subprocess -from src.utils.wait_for_service import wait_for_service -import tests.helpers as helpers +# TODO: Remove this test file - it is actually an integration test; +# code exercised here will NOT be detected by coverage. -BASE_URL = "http://localhost:5000" -# Start the services -# This implicitly tests the "/" path -subprocess.run("docker-compose up -d", shell=True) -wait_for_service(BASE_URL, "search2") -helpers.init_elasticsearch() - - -def test_rpc_valid(): +def test_rpc_valid(services): """Test a basic valid request to /rpc""" resp = requests.post( - BASE_URL + '/rpc', + services['app_url'] + '/rpc', data=json.dumps({ "jsonrpc": "2.0", "id": 0, @@ -30,90 +21,152 @@ def test_rpc_valid(): assert result['result']['dev'] -def test_legacy_valid(): +def test_get_auth_auth_fail_resp(services): + resp = requests.post( + services['app_url'] + "/legacy", + headers={"Authorization": "xyz"}, + data=json.dumps({ + "version": "1.1", + "method": "KBaseSearchEngine.get_objects", + "params": [{"ids": ["xyz"]}], + }) + ) + result = resp.json() + assert result['version'] == '1.1' + assert 'error' in result + error = result['error'] + assert error['code'] == 2000 + assert error['message'] == 'Auth error' + assert error['name'] == 'APIError' + + +def test_search_objects_auth_fail_resp(services): + resp = requests.post( + services['app_url'] + "/legacy", + headers={"Authorization": "xyz"}, + data=json.dumps({ + "version": "1.1", + "id": "0", + "method": "KBaseSearchEngine.search_objects", + "params": [{"match_filter": {}}], + }) + ) + result = resp.json() + assert result['version'] == '1.1' + assert 'error' in result + error = result['error'] + assert error['code'] == 2000 + assert error['message'] == 'Auth error' + assert error['name'] == 'APIError' + + +def test_search_types_auth_fail_resp(services): + resp = requests.post( + services['app_url'] + "/legacy", + headers={"Authorization": "xyz"}, + data=json.dumps({ + "version": "1.1", + "id": "0", + "method": "KBaseSearchEngine.search_types", + "params": [{"match_filter": {}}], + }) + ) + result = resp.json() + assert result['version'] == '1.1' + assert 'error' in result + error = result['error'] + assert error['code'] == 2000 + assert error['message'] == 'Auth error' + assert error['name'] == 'APIError' + + +def test_legacy_valid(services): """Test a basic valid request to /legacy""" resp = requests.post( - BASE_URL + '/legacy', + services['app_url'] + '/legacy', data=json.dumps({ - "jsonrpc": "2.0", + "version": "1.1", "id": 0, "method": "KBaseSearchEngine.get_objects", "params": [{ - "guids": ['xyz'] + "ids": ['xyz'] }] }) ) + assert resp.status_code == 200 result = resp.json() assert result['id'] == 0 - assert result['jsonrpc'] == '2.0' + assert result['version'] == '1.1' + assert 'result' in result assert len(result['result']) == 1 -def test_rpc_invalid(): +def test_rpc_invalid(services): """Test a basic empty request to /rpc""" - resp = requests.get(BASE_URL + '/rpc') - assert resp.json()['error']['code'] == -32700 # Invalid params + resp = requests.get(services['app_url'] + '/rpc') + result = resp.json() + assert 'error' in result + assert result['error']['code'] == -32600 # Invalid params -def test_legacy_invalid(): +# TODO: should a get request even be accepted? +# The jsonrpc 1.1 "spec" (never actually an accepted spec), +# https://www.jsonrpc.org/historical/json-rpc-1-1-alt.html +# disfavors GET +# Also, KBase clients should not be encouraged to think that GET +# is acceptable. +def test_legacy_invalid_method(services): """Test a basic empty request to /legacy""" - resp = requests.get(BASE_URL + '/legacy') - assert resp.json()['error']['code'] == -32700 # Invalid params + resp = requests.get(services['app_url'] + '/legacy') + assert resp.status_code == 405 + resp = requests.delete(services['app_url'] + '/legacy') + assert resp.status_code == 405 + resp = requests.patch(services['app_url'] + '/legacy') + assert resp.status_code == 405 + resp = requests.head(services['app_url'] + '/legacy') + assert resp.status_code == 405 + resp = requests.put(services['app_url'] + '/legacy') + assert resp.status_code == 405 + + +# TODO: actually, I don't think CORS should be set in the service itself, +# rather in the proxy. -def test_handle_options(): +def test_handle_options(services): """Handle a cors-style options requests on all paths""" paths = ['/', '/rpc', '/status', '/legacy'] for path in paths: - resp = requests.options(BASE_URL + path) + resp = requests.options(services['app_url'] + path) assert resp.status_code == 204 assert resp.text == '' - assert resp.headers['Access-Control-Allow-Origin'] == '*' - assert resp.headers['Access-Control-Allow-Methods'] == 'POST, GET, OPTIONS' - assert resp.headers['Access-Control-Allow-Headers'] == '*' + assert resp.headers.get('Access-Control-Allow-Origin') == '*' + assert resp.headers.get('Access-Control-Allow-Methods') == 'POST, GET, OPTIONS' + assert resp.headers.get('Access-Control-Allow-Headers') == '*' -def test_404(): - resp = requests.get(BASE_URL + '/xyz') +def test_404(services): + resp = requests.get(services['app_url'] + '/xyz') assert resp.status_code == 404 assert resp.text == '' -def test_legacy_rpc_conversion(): +def test_legacy_rpc_conversion(services): """ Test that a JSON-RPC 1.1 request is still handled ok """ resp = requests.post( - BASE_URL + '/legacy', + services['app_url'] + '/legacy', data=json.dumps({ "version": "1.1", "id": 0, "method": "KBaseSearchEngine.get_objects", "params": [{ - "guids": ['xyz'] + "ids": ['xyz'] }] }) ) result = resp.json() assert result['id'] == 0 - assert result['jsonrpc'] == '2.0' - assert len(result['result']) == 1 - - -def test_sloppy_rpc_conversion(): - """ - Test that a Sloppy-RPC request is still handled ok - """ - resp = requests.post( - BASE_URL + '/legacy', - data=json.dumps({ - "method": "KBaseSearchEngine.get_objects", - "params": [{ - "guids": ['xyz'] - }] - }) - ) - result = resp.json() - assert result['id'] == '0' - assert result['jsonrpc'] == '2.0' + assert result['version'] == '1.1' assert len(result['result']) == 1 diff --git a/tests/unit/utils/test_config.py b/tests/unit/utils/test_config.py new file mode 100644 index 0000000..d859499 --- /dev/null +++ b/tests/unit/utils/test_config.py @@ -0,0 +1,15 @@ +from src.utils.config import init_config +import os +import pytest + + +def test_init_config_invalid_config_url(): + original_url = os.environ.get('GLOBAL_CONFIG_URL') + os.environ['GLOBAL_CONFIG_URL'] = "foo://bar" + with pytest.raises(RuntimeError) as rte: + init_config() + assert 'Invalid config url: foo://bar' in str(rte) + if original_url is not None: + os.environ['GLOBAL_CONFIG_URL'] = original_url + else: + os.environ.pop('GLOBAL_CONFIG_URL') diff --git a/tests/unit/utils/test_formatting.py b/tests/unit/utils/test_formatting.py new file mode 100644 index 0000000..2e6baf9 --- /dev/null +++ b/tests/unit/utils/test_formatting.py @@ -0,0 +1,15 @@ +import unittest + +from src.utils.formatting import iso8601_to_epoch_ms + + +class UtilsFormattingTest(unittest.TestCase): + def test_iso8601_to_epoch_ms_valid(self): + cases = [ + { + 'input': '1970-01-01T00:00:00Z', + 'expected': 0 + } + ] + for case in cases: + self.assertEqual(iso8601_to_epoch_ms(case['input']), case['expected']) diff --git a/tests/unit/utils/test_user_profiles.py b/tests/unit/utils/test_user_profiles.py index fe880ff..fa2027c 100644 --- a/tests/unit/utils/test_user_profiles.py +++ b/tests/unit/utils/test_user_profiles.py @@ -21,13 +21,18 @@ @responses.activate def test_get_user_profiles_valid(): responses.add(responses.POST, config['user_profile_url'], + headers={'Authorization': 'x'}, json=mock_resp, status=200) - get_user_profiles(['username'], 'x') + res = get_user_profiles(['username'], 'x') + assert res == mock_resp['result'][0] +@responses.activate def test_get_user_profiles_noauth(): + responses.add(responses.POST, config['user_profile_url'], + json=mock_resp, status=200) res = get_user_profiles(['username'], None) - assert res == [] + assert res == mock_resp['result'][0] @responses.activate diff --git a/tests/unit/utils/test_wait_for_service.py b/tests/unit/utils/test_wait_for_service.py new file mode 100644 index 0000000..43913c0 --- /dev/null +++ b/tests/unit/utils/test_wait_for_service.py @@ -0,0 +1,54 @@ +from src.utils.wait_for_service import wait_for_service, WAIT_POLL_INTERVAL +import pytest +import logging +import time +import math + +# An upper limit on clock time within wait_for_services to make +# a url get call (and other code in that pathway) +MINIMAL_CALL_TIME = 1 + + +def bad_url_with_timeout(name, url, timeout, caplog): + search2_logger = logging.getLogger('search2') + # This ensures that logs are propagated and can be captured by caplog + search2_logger.propagate = True + with caplog.at_level(logging.INFO, logger='search2'): + start = time.time() + with pytest.raises(SystemExit) as se: + wait_for_service(url, 'foo', timeout=timeout) + + # Ensure it is attempting to exit. + assert se.type == SystemExit + assert se.value.code == 1 + + # Ensure that the timeout conditions apply: + # it should have only exited after the timeout has elapsed, + # but not much longer afterwards, and always in increments of + # WAIT_POLL_INTERVAL. + elapsed = time.time() - start + assert elapsed > timeout + max_elapsed = math.ceil(timeout / WAIT_POLL_INTERVAL) * WAIT_POLL_INTERVAL + assert elapsed < max_elapsed + MINIMAL_CALL_TIME + + # These messages should have been emitted when checking and when + # failing. + assert f'Attempting to connect to {name} at {url}' in caplog.text + assert f'Unable to connect to {name} at {url}' in caplog.text + search2_logger.propagate = False + + +def test_init_config_invalid_config_url(caplog): + bad_url_with_timeout('foo', 'https://foo.bar.baz', 0, caplog) + + +def test_init_config_invalid_config_url_7_timeout(caplog): + bad_url_with_timeout('foo', 'https://foo.bar.baz', 7, caplog) + + +def test_init_config_invalid_config_url_10_timeout(caplog): + bad_url_with_timeout('foo', 'https://foo.bar.baz', 10, caplog) + + +def test_init_config_invalid_config_url_12_timeout(caplog): + bad_url_with_timeout('foo', 'https://foo.bar.baz', 12, caplog) diff --git a/tests/unit/utils/test_workspace.py b/tests/unit/utils/test_workspace.py index 06f5aa5..5e14a58 100644 --- a/tests/unit/utils/test_workspace.py +++ b/tests/unit/utils/test_workspace.py @@ -1,18 +1,78 @@ import pytest import responses +import json from src.utils.config import config -from src.utils.workspace import ws_auth, get_workspace_info, get_object_info +from src.utils.workspace import ws_auth, get_workspace_info from src.exceptions import ResponseError -mock_ws_ids = { - "version": "1.1", - "result": [ - { - "workspaces": [1, 2, 3], - "pub": [] - } - ] +# TODO: All tests should be rewritten to use an explicit service call matcher +# where applicable. This ensures that the precisely correct call has been made. + + +def service_call_matcher(method, params): + def match(request_body): + try: + if isinstance(request_body, bytes): + request_body = request_body.decode("utf-8") + + if request_body is None: + return False + + rpc = json.loads(request_body) + + if rpc['method'] != method: + return False + + if rpc['params'] != [params]: + return False + + return True + except json.JSONDecodeError: + return False + + return match + + +mock_ws_ids_with_auth = { + "version": "1.1", + "result": [ + { + "workspaces": [1, 2, 3], + "pub": [10, 11] + } + ] +} + +mock_ws_ids_with_auth_only_public = { + "version": "1.1", + "result": [ + { + "workspaces": [], + "pub": [10, 11] + } + ] +} + + +mock_ws_ids_with_auth_only_private = { + "version": "1.1", + "result": [ + { + "workspaces": [1, 2, 3], + "pub": [] + } + ] +} + +mock_ws_ids_without_auth = { + "version": "1.1", + "result": [ + { + "workspaces": [], + "pub": [10, 11] + } + ] } mock_ws_info = { @@ -80,75 +140,142 @@ @responses.activate def test_ws_auth_valid(): # Mock the workspace call - responses.add(responses.POST, config['workspace_url'], - json=mock_ws_ids, status=200) + responses.add(responses.POST, + config['workspace_url'], + headers={'Authorization': 'valid_token'}, + match=[ + service_call_matcher( + 'Workspace.list_workspace_ids', + { + 'perm': 'r', + 'onlyGlobal': 0, + 'excludeGlobal': 0 + } + ) + ], + json=mock_ws_ids_with_auth, + status=200) result = ws_auth('valid_token') + assert result == [1, 2, 3, 10, 11] + + +@responses.activate +def test_ws_auth_valid_public(): + responses.add(responses.POST, + config['workspace_url'], + headers={'Authorization': 'valid_token'}, + match=[ + service_call_matcher( + 'Workspace.list_workspace_ids', + { + 'perm': 'r', + 'onlyGlobal': 1, + 'excludeGlobal': 0 + } + ) + ], + json=mock_ws_ids_with_auth_only_public, + status=200) + result = ws_auth('valid_token', only_public=True) + assert result == [10, 11] + + +@responses.activate +def test_ws_auth_valid_private(): + # Mock the workspace call + responses.add(responses.POST, + config['workspace_url'], + headers={'Authorization': 'valid_token'}, + match=[ + service_call_matcher( + 'Workspace.list_workspace_ids', + { + 'perm': 'r', + 'onlyGlobal': 0, + 'excludeGlobal': 1 + } + ) + ], + json=mock_ws_ids_with_auth_only_private, + status=200) + result = ws_auth('valid_token', only_private=True) assert result == [1, 2, 3] +def test_ws_auth_error_private_and_public(): + # Mock the workspace call + with pytest.raises(Exception) as ex: + ws_auth('valid_token', only_private=True, only_public=True) + assert 'Only one of "only_public" or "only_private" may be set' in str(ex) + + +@responses.activate def test_ws_auth_blank(): + # Mock the workspace call + responses.add(responses.POST, + config['workspace_url'], + json=mock_ws_ids_without_auth, + status=200) result = ws_auth(None) - assert result == [] + assert result == [10, 11] @responses.activate def test_ws_auth_invalid(): - # Mock the workspace call - responses.add(responses.POST, config['workspace_url'], status=403) + # Mock the workspace daily + responses.add(responses.POST, + config['workspace_url'], + headers={'Authorization': 'invalid_token'}, + status=401) with pytest.raises(ResponseError) as ctx: - ws_auth('x') + ws_auth('invalid_token') err = ctx.value - assert err.status == 403 - assert err.code == -32001 - assert len(err.message) > 0 + assert err.jsonrpc_code == -32001 @responses.activate def test_get_workspace_info_valid(): - responses.add(responses.POST, config['workspace_url'], - json=mock_ws_info, status=200) + responses.add(responses.POST, + config['workspace_url'], + json=mock_ws_info, + status=200) result = get_workspace_info(1, 'token') assert result == mock_ws_info['result'][0] -def test_get_workspace_info_blank(): +@responses.activate +def test_get_workspace_info_public(): + responses.add(responses.POST, + config['workspace_url'], + json=mock_ws_info, + status=200) result = get_workspace_info(1, None) - assert result == [] + assert result == mock_ws_info['result'][0] @responses.activate def test_get_workspace_info_invalid(): - responses.add(responses.POST, config['workspace_url'], status=500) + responses.add(responses.POST, + config['workspace_url'], + status=500) with pytest.raises(ResponseError) as ctx: get_workspace_info(1, 'token') err = ctx.value - assert err.status == 403 - assert err.code == -32001 - assert len(err.message) > 0 + assert err.jsonrpc_code == -32001 @responses.activate def test_get_workspace_info_invalid2(): resp = { - "version": "1.1", "result": [] + "version": "1.1", + "result": [] } - responses.add(responses.POST, config['workspace_url'], - json=resp, status=200) + responses.add(responses.POST, + config['workspace_url'], + json=resp, + status=200) with pytest.raises(ResponseError) as ctx: get_workspace_info(1, 'token') err = ctx.value - assert err.status == 403 - assert err.code == -32001 + assert err.jsonrpc_code == -32001 assert len(err.message) > 0 - - -@responses.activate -def test_get_object_info_valid(): - responses.add( - responses.POST, - config['workspace_url'], - json=mock_obj_info, - status=200 - ) - infos = get_object_info([1, 2], 'token') - assert infos == mock_obj_info['result'][0]['infos']