Skip to content

Commit

Permalink
Merge pull request #65 from clowder-framework/64-some-routes-in-apiv1…
Browse files Browse the repository at this point in the history
…-are-wrong-have-v2

somehow some v2 authorization and routes got in here, this fixes those
  • Loading branch information
max-zilla authored Apr 18, 2023
2 parents 1eb29b5 + cd24e1b commit 2311c7f
Showing 1 changed file with 12 additions and 25 deletions.
37 changes: 12 additions & 25 deletions pyclowder/api/v1/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
import tempfile

import requests
import pyclowder.api.v2.datasets as v2datasets
import pyclowder.api.v1.datasets as v1datasets
from pyclowder.client import ClowderClient
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
from pyclowder.utils import StatusMessage
Expand Down Expand Up @@ -63,9 +61,7 @@ def delete(connector, client, datasetid):
client -- ClowderClient containing authentication credentials
datasetid -- the dataset to delete
"""
headers = {"Authorization": "Bearer " + client.key}

url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
url = "%s/api/datasets/%s?key=%s" % (client.host, datasetid, client.key)

result = requests.delete(url, verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand Down Expand Up @@ -130,7 +126,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
headers = {"Authorization": "Bearer " + client.key}

filterstring = "" if extractor is None else "&extractor=%s" % extractor
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)

# fetch data
result = requests.get(url, stream=True, headers=headers,
Expand All @@ -147,12 +143,10 @@ def get_info(connector, client, datasetid):
client -- ClowderClient containing authentication credentials
datasetid -- the dataset to get info of
"""
headers = {"Authorization": "Bearer " + client.key}

url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
url = "%s/api/datasets/%s?key=%s" % (client.host, datasetid, client.key)

result = requests.get(url, headers=headers,
verify=connector.ssl_verify if connector else True)
result = requests.get(url, verify=connector.ssl_verify if connector else True)
result.raise_for_status()

return json.loads(result.text)
Expand All @@ -165,11 +159,9 @@ def get_file_list(connector, client, datasetid):
client -- ClowderClient containing authentication credentials
datasetid -- the dataset to get filelist of
"""
headers = {"Authorization": "Bearer " + client.key}
url = "%s/api/datasets/%s/files?key=%s" % (client.host, datasetid, client.key)

url = "%s/api/v2/datasets/%s/files" % (client.host, datasetid)

result = requests.get(url, headers=headers, verify=connector.ssl_verify if connector else True)
result = requests.get(url, verify=connector.ssl_verify if connector else True)
result.raise_for_status()

return json.loads(result.text)
Expand All @@ -184,14 +176,11 @@ def remove_metadata(connector, client, datasetid, extractor=None):
extractor -- extractor name to filter deletion
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
"""
headers = {"Authorization": "Bearer " + client.key}

filterstring = "" if extractor is None else "&extractor=%s" % extractor
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)

# fetch data
result = requests.delete(url, stream=True, headers=headers,
verify=connector.ssl_verify if connector else True)
result = requests.delete(url, stream=True, verify=connector.ssl_verify if connector else True)
result.raise_for_status()

def submit_extraction(connector, client, datasetid, extractorname):
Expand All @@ -203,10 +192,9 @@ def submit_extraction(connector, client, datasetid, extractorname):
datasetid -- the dataset UUID to submit
extractorname -- registered name of extractor to trigger
"""
headers = {'Content-Type': 'application/json',
"Authorization": "Bearer " + client.key}
headers = {'Content-Type': 'application/json'}

url = "%s/api/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
url = "%s/api/datasets/%s/extractions?key=%s" % (client.host, datasetid, client.key)

result = requests.post(url,
headers=headers,
Expand Down Expand Up @@ -266,11 +254,10 @@ def upload_metadata(connector, client, datasetid, metadata):
datasetid -- the dataset that is currently being processed
metadata -- the metadata to be uploaded
"""
headers = {'Content-Type': 'application/json',
"Authorization": "Bearer " + client.key}
headers = {'Content-Type': 'application/json'}
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")

url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
result = requests.post(url, headers=headers, data=json.dumps(metadata),
verify=connector.ssl_verify if connector else True)
result.raise_for_status()
Expand Down

0 comments on commit 2311c7f

Please sign in to comment.