Skip to content

Commit

Permalink
Merge pull request #92 from clowder-framework/posixpath-joins
Browse files Browse the repository at this point in the history
3.0.6 Use posixpath.join
  • Loading branch information
max-zilla authored Oct 11, 2023
2 parents 50b6b36 + 4c7724a commit c18cffc
Show file tree
Hide file tree
Showing 11 changed files with 104 additions and 108 deletions.
9 changes: 8 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/)
and this project adheres to [Semantic Versioning](https://semver.org/).

- ## 3.0.6 - 2023-10-09

## 3.0.7 - 2023-10-11

### Added

- Modified v1 and v2 endpoints to ignore trailing slashes on Clowder host URLs.

- ## 3.0.6 - 2023-10-10

### Added

Expand Down
24 changes: 12 additions & 12 deletions pyclowder/api/v1/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
import os
import tempfile

import posixpath
import requests
from pyclowder.client import ClowderClient
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
Expand All @@ -22,7 +22,7 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa
"""
logger = logging.getLogger(__name__)

url = '%s/api/datasets/createempty?key=%s' % (client.host, client.key)
url = posixpath.join(client.host, 'api/datasets/createempty?key=%s' % client.key)

if parentid:
if spaceid:
Expand Down Expand Up @@ -61,7 +61,7 @@ def delete(connector, client, datasetid):
client -- ClowderClient containing authentication credentials
datasetid -- the dataset to delete
"""
url = "%s/api/datasets/%s?key=%s" % (client.host, datasetid, client.key)
url = posixpath.join(client.host, "api/datasets/%s?key=%s" % (datasetid, client.key))

result = requests.delete(url, verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand Down Expand Up @@ -102,7 +102,7 @@ def download(connector, client, datasetid):
connector.message_process({"type": "dataset", "id": datasetid}, "Downloading dataset.")

# fetch dataset zipfile
url = '%s/api/datasets/%s/download?key=%s' % (client.host, datasetid,client.key)
url = posixpath.join(client.host, 'api/datasets/%s/download?key=%s' % datasetid,client.key)
result = requests.get(url, stream=True,
verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand All @@ -124,7 +124,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
"""
filterstring = "" if extractor is None else "&extractor=%s" % extractor
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key + filterstring)
url = posixpath.join(client.host, 'api/datasets/%s/metadata?key=%s' % (datasetid, client.key + filterstring))

# fetch data
result = requests.get(url, stream=True,
Expand All @@ -142,7 +142,7 @@ def get_info(connector, client, datasetid):
datasetid -- the dataset to get info of
"""

url = "%s/api/datasets/%s?key=%s" % (client.host, datasetid, client.key)
url = posixpath.join(client.host, "api/datasets/%s?key=%s" % (datasetid, client.key))

result = requests.get(url, verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand All @@ -157,7 +157,7 @@ def get_file_list(connector, client, datasetid):
client -- ClowderClient containing authentication credentials
datasetid -- the dataset to get filelist of
"""
url = "%s/api/datasets/%s/files?key=%s" % (client.host, datasetid, client.key)
url = posixpath.join(client.host, "api/datasets/%s/files?key=%s" % (datasetid, client.key))

result = requests.get(url, verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand All @@ -175,7 +175,7 @@ def remove_metadata(connector, client, datasetid, extractor=None):
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
"""
filterstring = "" if extractor is None else "&extractor=%s" % extractor
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
url = posixpath.join(client.host, 'api/datasets/%s/metadata?key=%s' % (datasetid, client.key))

# fetch data
result = requests.delete(url, stream=True, verify=connector.ssl_verify if connector else True)
Expand All @@ -192,7 +192,7 @@ def submit_extraction(connector, client, datasetid, extractorname):
"""
headers = {'Content-Type': 'application/json'}

url = "%s/api/datasets/%s/extractions?key=%s" % (client.host, datasetid, client.key)
url = posixpath.join(client.host, "api/datasets/%s/extractions?key=%s" % (datasetid, client.key))

result = requests.post(url,
headers=headers,
Expand Down Expand Up @@ -238,7 +238,7 @@ def upload_tags(connector, client, datasetid, tags):
connector.status_update(StatusMessage.processing, {"type": "dataset", "id": datasetid}, "Uploading dataset tags.")

headers = {'Content-Type': 'application/json'}
url = '%s/api/datasets/%s/tags?key=%s' % (client.host, datasetid, client.key)
url = posixpath.join(client.host, 'api/datasets/%s/tags?key=%s' % (datasetid, client.key))
result = connector.post(url, headers=headers, data=json.dumps(tags),
verify=connector.ssl_verify if connector else True)

Expand All @@ -255,7 +255,7 @@ def upload_metadata(connector, client, datasetid, metadata):
headers = {'Content-Type': 'application/json'}
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")

url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
url = posixpath.join(client.host, 'api/datasets/%s/metadata?key=%s' % (datasetid, client.key))
result = requests.post(url, headers=headers, data=json.dumps(metadata),
verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand Down Expand Up @@ -387,4 +387,4 @@ def add_metadata(self, dataset_id, metadata):
try:
return self.client.post("/datasets/%s/metadata" % dataset_id, metadata)
except Exception as e:
logging.error("Error upload to dataset %s: %s" % (dataset_id, str(e)))
logging.error("Error upload to dataset %s: %s" % (dataset_id, str(e)))
32 changes: 16 additions & 16 deletions pyclowder/api/v1/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import logging
import os
import tempfile

import posixpath
import requests
from requests_toolbelt.multipart.encoder import MultipartEncoder

Expand Down Expand Up @@ -43,7 +43,7 @@ def get_download_url(connector, client, fileid, intermediatefileid=None, ext="")
if not intermediatefileid:
intermediatefileid = fileid

url = '%s/api/files/%s?key=%s' % (client.host, intermediatefileid, client.key)
url = posixpath.join(client.host, 'api/files/%s?key=%s' % (intermediatefileid, client.key))
return url


Expand All @@ -65,7 +65,7 @@ def download(connector, client, fileid, intermediatefileid=None, ext=""):
if not intermediatefileid:
intermediatefileid = fileid

url = '%s/api/files/%s?key=%s' % (client.host, intermediatefileid, client.key)
url = posixpath.join(client.host, 'api/files/%s?key=%s' % (intermediatefileid, client.key))
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)

(inputfile, inputfilename) = tempfile.mkstemp(suffix=ext)
Expand All @@ -89,7 +89,7 @@ def download_info(connector, client, fileid):
fileid -- the file to fetch metadata of
"""

url = '%s/api/files/%s/metadata?key=%s' % (client.host, fileid, client.key)
url = posixpath.join(client.host, 'api/files/%s/metadata?key=%s' % (fileid, client.key))

# fetch data
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
Expand Down Expand Up @@ -121,7 +121,7 @@ def download_metadata(connector, client, fileid, extractor=None):
"""

filterstring = "" if extractor is None else "&extractor=%s" % extractor
url = '%s/api/files/%s/metadata.jsonld?key=%s%s' % (client.host, fileid, client.key, filterstring)
url = posixpath.join(client.host, 'api/files/%s/metadata.jsonld?key=%s%s' % (fileid, client.key, filterstring))

# fetch data
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
Expand All @@ -137,7 +137,7 @@ def delete(connector, client, fileid):
client -- ClowderClient containing authentication credentials
fileid -- the dataset to delete
"""
url = "%s/api/files/%s?key=%s" % (client.host, fileid, client.key)
url = posixpath.join(client.host, "api/files/%s?key=%s" % (fileid, client.key))

result = requests.delete(url, verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand All @@ -155,7 +155,7 @@ def submit_extraction(connector, client, fileid, extractorname):
extractorname -- registered name of extractor to trigger
"""

url = "%s/api/files/%s/extractions?key=%s" % (client.host, fileid, client.key)
url = posixpath.join(client.host, "api/files/%s/extractions?key=%s" % (fileid, client.key))

result = connector.post(url,
headers={'Content-Type': 'application/json'},
Expand Down Expand Up @@ -229,7 +229,7 @@ def upload_metadata(connector, client, fileid, metadata):
connector.message_process({"type": "file", "id": fileid}, "Uploading file metadata.")

headers = {'Content-Type': 'application/json'}
url = '%s/api/files/%s/metadata.jsonld?key=%s' % (client.host, fileid, client.key)
url = posixpath.join(client.host, 'api/files/%s/metadata.jsonld?key=%s' % (fileid, client.key))
result = connector.post(url, headers=headers, data=json.dumps(metadata),
verify=connector.ssl_verify if connector else True)

Expand All @@ -255,7 +255,7 @@ def upload_preview(connector, client, fileid, previewfile, previewmetadata=None,
headers = {'Content-Type': 'application/json'}

# upload preview
url = '%s/api/previews?key=%s' % (client.host, client.key)
url = posixpath.join(client.host, 'api/previews?key=%s' % client.key)
with open(previewfile, 'rb') as filebytes:
# If a custom preview file MIME type is provided, use it to generate the preview file object.
if preview_mimetype is not None:
Expand All @@ -269,13 +269,13 @@ def upload_preview(connector, client, fileid, previewfile, previewmetadata=None,

# associate uploaded preview with orginal file
if fileid and not (previewmetadata and 'section_id' in previewmetadata and previewmetadata['section_id']):
url = '%s/api/files/%s/previews/%s?key=%s' % (client.host, fileid, previewid, client.key)
url = posixpath.join(client.host, 'api/files/%s/previews/%s?key=%s' % (fileid, previewid, client.key))
result = connector.post(url, headers=headers, data=json.dumps({}),
verify=connector.ssl_verify if connector else True)

# associate metadata with preview
if previewmetadata is not None:
url = '%s/api/previews/%s/metadata?key=%s' % (client.host, previewid, client.key)
url = posixpath.join(client.host, 'api/previews/%s/metadata?key=%s' % (previewid, client.key))
result = connector.post(url, headers=headers, data=json.dumps(previewmetadata),
verify=connector.ssl_verify if connector else True)

Expand All @@ -295,7 +295,7 @@ def upload_tags(connector, client, fileid, tags):
connector.message_process({"type": "file", "id": fileid}, "Uploading file tags.")

headers = {'Content-Type': 'application/json'}
url = '%s/api/files/%s/tags?key=%s' % (client.host, fileid, client.key)
url = posixpath.join(client.host, 'api/files/%s/tags?key=%s' % (fileid, client.key))
result = connector.post(url, headers=headers, data=json.dumps(tags),
verify=connector.ssl_verify if connector else True)

Expand All @@ -311,7 +311,7 @@ def upload_thumbnail(connector, client, fileid, thumbnail):
"""

logger = logging.getLogger(__name__)
url = '%s/api/fileThumbnail?key=%s' % (client.host, client.key)
url = posixpath.join(client.host, 'api/fileThumbnail?key=%s' % client.key)

# upload preview
with open(thumbnail, 'rb') as inputfile:
Expand All @@ -322,7 +322,7 @@ def upload_thumbnail(connector, client, fileid, thumbnail):
# associate uploaded preview with original file/dataset
if fileid:
headers = {'Content-Type': 'application/json'}
url = '%s/api/files/%s/thumbnails/%s?key=%s' % (client.host, fileid, thumbnailid, client.key)
url = posixpath.join(client.host, 'api/files/%s/thumbnails/%s?key=%s' % (fileid, thumbnailid, client.key))
connector.post(url, headers=headers, data=json.dumps({}), verify=connector.ssl_verify if connector else True)

return thumbnailid
Expand Down Expand Up @@ -352,7 +352,7 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
if filepath.startswith(connector.mounted_paths[source_path]):
return _upload_to_dataset_local(connector, client, datasetid, filepath)

url = '%s/api/uploadToDataset/%s?key=%s' % (client.host, datasetid, client.key)
url = posixpath.join(client.host, 'api/uploadToDataset/%s?key=%s' % (datasetid, client.key))

if os.path.exists(filepath):
filename = os.path.basename(filepath)
Expand Down Expand Up @@ -381,7 +381,7 @@ def _upload_to_dataset_local(connector, client, datasetid, filepath):
"""

logger = logging.getLogger(__name__)
url = '%s/api/uploadToDataset/%s?key=%s' % (client.host, datasetid, client.key)
url = posixpath.join(client.host, 'api/uploadToDataset/%s?key=%s' % (datasetid, client.key))

if os.path.exists(filepath):
# Replace local path with remote path before uploading
Expand Down
32 changes: 15 additions & 17 deletions pyclowder/api/v2/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,11 @@
import logging
import os
import tempfile

import posixpath
import requests
from requests_toolbelt.multipart.encoder import MultipartEncoder

from pyclowder.client import ClowderClient
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
from pyclowder.utils import StatusMessage


def create_empty(connector, client, datasetname, description, parentid=None, spaceid=None):
Expand All @@ -30,7 +28,7 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa

logger = logging.getLogger(__name__)

url = '%s/api/v2/datasets' % client.host
url = posixpath.join(client.host, 'api/v2/datasets')
headers = {"Content-Type": "application/json",
"X-API-KEY": client.key}
result = requests.post(url, headers=headers,
Expand All @@ -54,7 +52,7 @@ def delete(connector, client , datasetid):
datasetid -- the dataset to delete
"""
headers = {"X-API-KEY": client.key}
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
url = posixpath.join(client.host, "api/v2/datasets/%s" % datasetid)

result = requests.delete(url, headers=headers, verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand Down Expand Up @@ -99,7 +97,7 @@ def download(connector, client, datasetid):

headers = {"X-API-KEY": client.key}
# fetch dataset zipfile
url = '%s/api/v2/datasets/%s/download' % (client.host, datasetid)
url = posixpath.join(client.host, 'api/v2/datasets/%s/download' % datasetid)
result = requests.get(url, stream=True, headers=headers,
verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand All @@ -124,7 +122,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
headers = {"X-API-KEY": client.key}

filterstring = "" if extractor is None else "&extractor=%s" % extractor
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
url = posixpath.join(client.host, 'api/v2/datasets/%s/metadata' % datasetid)

# fetch data
result = requests.get(url, stream=True, headers=headers,
Expand All @@ -144,7 +142,7 @@ def get_info(connector, client, datasetid):
"""
headers = {"X-API-KEY": client.key}

url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
url = posixpath.join(client.host, "api/v2/datasets/%s" % datasetid)

result = requests.get(url, headers=headers,
verify=connector.ssl_verify if connector else True)
Expand All @@ -163,7 +161,7 @@ def get_file_list(connector, client, datasetid):
"""
headers = {"X-API-KEY": client.key}

url = "%s/api/v2/datasets/%s/files" % (client.host, datasetid)
url = posixpath.join(client.host, "api/v2/datasets/%s/files" % datasetid)

result = requests.get(url, headers=headers, verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand All @@ -184,7 +182,7 @@ def remove_metadata(connector, client, datasetid, extractor=None):
headers = {"X-API-KEY": client.key}

filterstring = "" if extractor is None else "&extractor=%s" % extractor
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
url = posixpath.join(client.host, 'api/v2/datasets/%s/metadata' % datasetid)

# fetch data
result = requests.delete(url, stream=True, headers=headers,
Expand All @@ -204,7 +202,7 @@ def submit_extraction(connector, client, datasetid, extractorname):
headers = {'Content-Type': 'application/json',
"X-API-KEY": client.key}

url = "%s/api/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
url = posixpath.join(client.host, "api/v2/datasets/%s/extractions" % datasetid)

result = requests.post(url,
headers=headers,
Expand All @@ -229,7 +227,7 @@ def upload_metadata(connector, client, datasetid, metadata):
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")


url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
url = posixpath.join(client.host, 'api/v2/datasets/%s/metadata' % datasetid)
result = requests.post(url, headers=headers, data=json.dumps(metadata),
verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand Down Expand Up @@ -259,7 +257,7 @@ def upload_preview(connector, client, datasetid, previewfile, previewmetadata=No
if os.path.exists(previewfile):

# upload visualization URL
visualization_config_url = '%s/api/v2/visualizations/config' % client.host
visualization_config_url = posixpath.join(client.host, 'api/v2/visualizations/config')

if visualization_config_data is None:
visualization_config_data = dict()
Expand Down Expand Up @@ -292,8 +290,8 @@ def upload_preview(connector, client, datasetid, previewfile, previewmetadata=No
if visualization_config_id is not None:

# upload visualization URL
visualization_url = '%s/api/v2/visualizations?name=%s&description=%s&config=%s' % (
client.host, visualization_name, visualization_description, visualization_config_id)
visualization_url = posixpath.join(client.host, 'api/v2/visualizations?name=%s&description=%s&config=%s' % (
visualization_name, visualization_description, visualization_config_id))

filename = os.path.basename(previewfile)
if preview_mimetype is not None:
Expand Down Expand Up @@ -331,7 +329,7 @@ def upload_thumbnail(connector, client, datasetid, thumbnail):

connector.message_process({"type": "dataset", "id": datasetid}, "Uploading thumbnail to dataset.")

url = '%s/api/v2/thumbnails' % (client.host)
url = posixpath.join(client.host, 'api/v2/thumbnails')

if os.path.exists(thumbnail):
file_data = {"file": open(thumbnail, 'rb')}
Expand All @@ -345,7 +343,7 @@ def upload_thumbnail(connector, client, datasetid, thumbnail):
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading thumbnail to dataset.")
headers = {'Content-Type': 'application/json',
'X-API-KEY': client.key}
url = '%s/api/v2/datasets/%s/thumbnail/%s' % (client.host, datasetid, thumbnailid)
url = posixpath.join(client.host, 'api/v2/datasets/%s/thumbnail/%s' % (datasetid, thumbnailid))
result = connector.patch(url, headers=headers,
verify=connector.ssl_verify if connector else True)
return result.json()["thumbnail_id"]
Expand Down
Loading

0 comments on commit c18cffc

Please sign in to comment.