diff --git a/nph_5class/Dockerfile b/nph_5class/Dockerfile new file mode 100644 index 0000000..8f8b45f --- /dev/null +++ b/nph_5class/Dockerfile @@ -0,0 +1,55 @@ +# FROM python:3.6.15-buster +# FROM nvidia/cuda:11.7.1-base-ubuntu20.04 +FROM python:3.8.13-buster + +ENV DEBIAN_FRONTEND noninteractive + +RUN apt-get -y update && \ + apt-get -y upgrade && \ + apt-get -y install python3 +RUN apt-get -y install python3-pip + +RUN apt install -y vim +RUN apt install -y wget + +# ===================Module Dependencies============================ + +RUN pip install torch torchvision numpy nibabel matplotlib scipy scikit-image SimpleITK +# RUN wget -q http://fsl.fmrib.ox.ac.uk/fsldownloads/fslinstaller.py && \ +# chmod 775 fslinstaller.py && \ +# python3 /fslinstaller.py -d /usr/local/fsl -V 6.0.4 -q && \ +# rm -f /fslinstaller.py + +# RUN mv /usr/local/fsl /usr/local/bin/fsl + + +# ===================Copy Source Code=============================== + +RUN mkdir /module +WORKDIR /module + +COPY src /module/src +# RUN chmod +x /module/src/skull_strip.sh + +# ===============bqapi for python3 Dependencies===================== +# pip install in this exact order +RUN pip3 install six +RUN pip3 install lxml +RUN pip3 install requests==2.18.4 +RUN pip3 install requests-toolbelt +RUN pip3 install tables + +# =====================Build Directory Structure==================== + +EXPOSE 8080 +EXPOSE 5000 +COPY PythonScriptWrapper.py /module/ +COPY bqapi/ /module/bqapi + +# Replace the following line with your {ModuleName}.xml +COPY NPHSegmentation.xml /module/NPHSegmentation.xml + +ENV PATH /module:$PATH:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +#:/usr/local/fsl/bin +ENV PYTHONPATH $PYTHONPATH:/module/src +# ENV FSLOUTPUTTYPE NIFTI_GZ diff --git a/nph_5class/NPHSegmentation.xml b/nph_5class/NPHSegmentation.xml new file mode 100644 index 0000000..4787292 --- /dev/null +++ b/nph_5class/NPHSegmentation.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/nph_5class/PythonScriptWrapper.py b/nph_5class/PythonScriptWrapper.py new file mode 100644 index 0000000..e2e8794 --- /dev/null +++ b/nph_5class/PythonScriptWrapper.py @@ -0,0 +1,397 @@ +import sys +from lxml import etree +import xml.etree.ElementTree as ET +import optparse +import logging +import os + + +logging.basicConfig(filename='PythonScript.log', filemode='a', level=logging.DEBUG) +log = logging.getLogger('bq.modules') + +# from bqapi.comm import BQCommError +from bqapi.comm import BQSession +from bqapi.util import fetch_blob + +# standardized naming convention for running modules. +from src.BQ_run_module import run_module + + +class ScriptError(Exception): + def __init__(self, message): + self.message = "Script error: %s" % message + + def __str__(self): + return self.message + + +class PythonScriptWrapper(object): + def __init__(self): + for file in os.listdir(): # Might change it to read parameters from .JSON or from modulePath variable + if file.endswith(".xml"): + # Get xml file name as module name + if hasattr(self, 'module_name'): + raise ScriptError('More than 1 .xml file present in directory, make appropiate changes and rebuild image') + else: + self.module_name = file[:-4] + + tree = ET.parse(self.module_name+'.xml') # Load module xml as tree + self.root = tree.getroot() # Get root node of tree + + def upload_results(self, bq): + """ + Reads output specs from xml and uploads results to Bisque using correct service + """ + + output_resources = [] + non_image_value = {} + non_image_present = False + + # Get outputs tag and its nonimage child tag + outputs_tag = self.root.find("./*[@name='outputs']") + print(outputs_tag) + nonimage_tag = outputs_tag.find("./*[@name='NonImage']") + print(nonimage_tag.tag, nonimage_tag.attrib) + + # Upload each resource with the corresponding service + for resource in (nonimage_tag.findall(".//*[@type]") + outputs_tag.findall("./*[@type='image']")): + print(resource.tag, resource.attrib) + print("NonImage type output with name %s" % resource.attrib['name']) + resource_name = resource.attrib['name'] + resource_type = resource.attrib['type'] + resource_path = self.output_data_path_dict[resource_name] + # log.info(f"***** Uploading output {resource_type} '{resource_name}' from {resource_path} ...") + log.info("***** Uploading output %s '%s' from %s ..." % (resource_type, resource_name, resource_path)) + + # Upload output resource to Bisque and get resource etree.Element + output_etree_Element = self.upload_service(bq, resource_path, data_type=resource_type) + # log.info(f"***** Uploaded output {resource_type} '{resource_name}' to {output_etree_Element.get('value')}") + log.info("***** Uploaded output %s '%s' to %s" % (resource_type, resource_name, output_etree_Element.get('value'))) + + # Set the value attribute of the each resource's tag to its corresponding resource uri + resource.set('value', output_etree_Element.get('value')) + + # Append image outputs to output resources list + if resource in outputs_tag.findall("./*[@type='image']"): + output_resource_xml = ET.tostring(resource).decode('utf-8') + output_resources.append(output_resource_xml) + else: + non_image_present = True + non_image_value[resource_name] = output_etree_Element.get('value') + + # Append all nonimage outputs to NonImage tag and append it to output resource list + if non_image_present: + template_tag = nonimage_tag.find("./template") + nonimage_tag.remove(template_tag) + for resource in non_image_value: + # ET.SubElement(nonimage_tag, 'tag', attrib={'name' : f"{resource}", 'type': 'resource', 'value': f"{non_image_value[resource]}"}) + ET.SubElement(nonimage_tag, 'tag', attrib={'name' : "%s" % resource, 'type': 'resource', 'value': "%s" % non_image_value[resource]}) + + output_resource_xml = ET.tostring(nonimage_tag).decode('utf-8') + output_resources.append(output_resource_xml) + + # log.debug(f"***** Output Resources xml : output_resources = {output_resources}") + log.debug("***** Output Resources xml : output_resources = %s" % output_resources) + # SAMPLE LOG + # ['\n \n \n '] + return output_resources + + + def fetch_input_resources(self, bq, inputs_dir_path): #TODO Not hardcoded resource_url + """ + Reads input resources from xml, fetches them from Bisque, and copies them to module container for inference + + """ + + log.info('***** Options: %s' % (self.options)) + + input_bq_objs = [] + input_path_dict = {} # Dictionary that contains the paths of the input resources + + inputs_tag = self.root.find("./*[@name='inputs']") +# print(inputs_tag) + for input_resource in inputs_tag.findall("./*[@type='resource']"): + # for child in node.iter(): + print(input_resource.tag, input_resource.attrib) + + input_name = input_resource.attrib['name'] + # log.info(f"***** Processing resource named: {input_name}") + log.info("***** Processing resource named: %s" % input_name) + resource_obj = bq.load(getattr(self.options, input_name)) + """ + bq.load returns bqapi.bqclass.BQImage object. Ex: + resource_obj: (image:name=whale.jpeg,value=file://admin/2022-02-25/whale.jpeg,type=None,uri=http://128.111.185.163:8080/data_service/00-pkGCYS4SPCtQVcdZUUj4sX,ts=2022-02-25T17:05:13.289578,resource_uniq=00-pkGCYS4SPCtQVcdZUUj4sX) + + resource_obj: (resource:name=yolov5s.pt,type=None,uri=http://128.111.185.163:8080/data_service/00-D9e6xVPhU93JtZjZZtwkLm,ts=2022-02-26T01:08:26.198330,resource_uniq=00-D9e6xVPhU93JtZjZZtwkLm) (PythonScriptWrapper.py:137) + + resource_obj: (resource:name=test.npy,type=None,uri=http://128.111.185.163:8080/data_service/00-EC53Rcbj8do86aXpea2cgW,ts=2022-02-26T01:17:12.312780,resource_uniq=00-EC53Rcbj8do86aXpea2cgW) (PythonScriptWrapper.py:137) + """ + + input_bq_objs.append(resource_obj) + # log.info(f"***** resource_obj: {resource_obj}") + log.info("***** resource_obj: %s" % resource_obj) + # log.info(f"***** resource_obj.uri: {resource_obj.uri}") + log.info("***** resource_obj.uri: %s" % resource_obj.uri) + # log.info(f"***** type(resource_obj): {type(resource_obj)}") + log.info("***** type(resource_obj): %s" % type(resource_obj)) + + # Append uri to dictionary of input paths + input_path_dict[input_name] = os.path.join(inputs_dir_path, resource_obj.name) + + # Saves resource to module container at specified dest path + # fetch_blob_output = fetch_blob(bq, resource_obj.uri, dest=input_path_dict[input_name]) + image = bq.load(resource_obj.uri) + # name = image.name or next_name("blob") + name = image.name + predictor_url = bq.service_url('blob_service', path = image.resource_uniq) + log.info("predictor_URL: %s" % (predictor_url)) + + # predictor_path = os.path.join(kw.get('stagingPath', 'source/Scans'), self.getstrtime()+'-'+image.name + '.nii') + input_path_dict[input_name] = input_path_dict[input_name].replace(".nii.gz", ".nii") + predictor_path = bq.fetchblob(predictor_url, path=input_path_dict[input_name]) + log.info("predictor_path: %s" % (predictor_path)) + # log.info(f"***** fetch_blob_output: {fetch_blob_output}") + # log.info("***** fetch_blob_output: %s" % fetch_blob_output) + + # log.info(f"***** Input path dictionary : {input_path_dict}") + log.info("***** Input path dictionary : %s" % input_path_dict) + + return input_path_dict + + + def run(self): + """ + Run Python script + + """ + bq = self.bqSession + log.info('***** self.options: %s' % (self.options)) + + # Use current directory to store input and output data for now, if changed, might have to look at teardown funct too + inputs_dir_path = os.getcwd() + outputs_dir_path = os.getcwd() + + # Fetch input resources + try: + bq.update_mex('Fetching inputs specified in xml') + input_path_dict = self.fetch_input_resources(bq, inputs_dir_path) + except (Exception, ScriptError) as e: + log.exception("***** Exception while fetching inputs specified in xml") + bq.fail_mex(msg="Exception while fetching inputs specified in xml: %s" % str(e)) + return + + + # Run module from BQ_run_module and get get a dictionary that contains the paths to the module results + try: + bq.update_mex('Running module') + self.output_data_path_dict = run_module(input_path_dict, outputs_dir_path) + except (Exception, ScriptError) as e: + log.exception("***** Exception while running module from BQ_run_module") + bq.fail_mex(msg="Exception while running module from BQ_run_module: %s" % str(e)) + return + + # Upload results to Bisque + try: + bq.update_mex('Uploading results to Bisque') + self.output_resources = self.upload_results(bq) + except (Exception, ScriptError) as e: + log.exception("***** Exception while uploading results to Bisque") + bq.fail_mex(msg="Exception while uploading results to Bisque: %s" % str(e)) + return + + + def setup(self): + """ + Pre-run initialization + """ + self.bqSession.update_mex('Initializing...') + self.mex_parameter_parser(self.bqSession.mex.xmltree) + self.output_resources = [] + + def tear_down(self): # NEED TO GENERALIZE + """ + Post the results to the mex xml + """ + self.bqSession.update_mex('Returning results') + outputTag = etree.Element('tag', name='outputs') + for r_xml in self.output_resources: + if isinstance(r_xml, str): + r_xml = etree.fromstring(r_xml) + res_type = r_xml.get('type', None) or r_xml.get( + 'resource_type', None) or r_xml.tag + # append reference to output + if res_type in ['table', 'image']: + outputTag.append(r_xml) + # etree.SubElement(outputTag, 'tag', name='output_table' if res_type=='table' else 'output_image', type=res_type, value=r_xml.get('uri','')) + else: + outputTag.append(r_xml) + # etree.SubElement(outputTag, r_xml.tag, name=r_xml.get('name', '_'), type=r_xml.get('type', 'string'), value=r_xml.get('value', '')) + log.debug('Output Mex results: %s' % + (etree.tostring(outputTag, pretty_print=True))) + self.bqSession.finish_mex(tags=[outputTag]) + + def mex_parameter_parser(self, mex_xml): + """ + Parses input of the xml and add it to options attribute (unless already set) + + @param: mex_xml + """ + # inputs are all non-"script_params" under "inputs" and all params under "script_params" + mex_inputs = mex_xml.xpath( + 'tag[@name="inputs"]/tag[@name!="script_params"] | tag[@name="inputs"]/tag[@name="script_params"]/tag') + if mex_inputs: + for tag in mex_inputs: + if tag.tag == 'tag' and tag.get('type', '') != 'system-input': # skip system input values + if not getattr(self.options, tag.get('name', ''), None): + log.debug('Set options with %s as %s' % (tag.get('name', ''), tag.get('value', ''))) + setattr(self.options, tag.get('name', ''), tag.get('value', '')) + else: + log.debug('No Inputs Found on MEX!') + + def upload_service(self, bq, filename, data_type='image'): + """ + Upload resource to specific service upon post process + """ + mex_id = bq.mex.uri.split('/')[-1] + + log.info('Up Mex: %s' % (mex_id)) + log.info('Up File: %s' % (filename)) + resource = etree.Element( + data_type, name='ModuleExecutions/' + self.module_name + '/' + filename) + t = etree.SubElement(resource, 'tag', name="datetime", value='time') + log.info('Creating upload xml data: %s ' % + str(etree.tostring(resource, pretty_print=True))) + # os.path.join("ModuleExecutions","CellSegment3D", filename) + filepath = filename + # use import service to /import/transfer activating import service + r = etree.XML(bq.postblob(filepath, xml=resource)).find('./') + if r is None or r.get('uri') is None: + bq.fail_mex(msg="Exception during upload results") + else: + log.info('Uploaded ID: %s, URL: %s' % + (r.get('resource_uniq'), r.get('uri'))) + bq.update_mex('Uploaded ID: %s, URL: %s' % + (r.get('resource_uniq'), r.get('uri'))) + self.furl = r.get('uri') + self.fname = r.get('name') + resource.set('value', self.furl) + + return resource + + def validate_input(self): + """ + Check to see if a mex with token or user with password was provided. + + @return True is returned if validation credention was provided else + False is returned + """ + if (self.options.mexURL and self.options.token): # run module through engine service + return True + if (self.options.user and self.options.pwd and self.options.root): # run module locally (note: to test module) + return True + log.debug('Insufficient options or arguments to start this module') + return False + + def main(self): + parser = optparse.OptionParser() + parser.add_option('--mex_url', dest="mexURL") + parser.add_option('--module_dir', dest="modulePath") + parser.add_option('--staging_path', dest="stagingPath") + parser.add_option('--bisque_token', dest="token") + parser.add_option('--user', dest="user") + parser.add_option('--pwd', dest="pwd") + parser.add_option('--root', dest="root") + # parser.add_option('--resource_url', dest="resource_url") + + (options, args) = parser.parse_args() + + fh = logging.FileHandler('scriptrun.log', mode='a') + fh.setLevel(logging.DEBUG) + formatter = logging.Formatter('[%(asctime)s] %(levelname)8s --- %(message)s ' + + '(%(filename)s:%(lineno)s)', datefmt='%Y-%m-%d %H:%M:%S') + fh.setFormatter(formatter) + log.addHandler(fh) + + try: # pull out the mex + + if not options.mexURL: + options.mexURL = sys.argv[-2] + if not options.token: + options.token = sys.argv[-1] + except IndexError: # no argv were set + pass + + if not options.stagingPath: + options.stagingPath = '' + + log.debug('\n\nPARAMS : %s \n\n Options: %s' % (args, options)) + self.options = options + + log.debug("Validating input") + if self.validate_input(): + + log.debug("Validated input successfully") + + # initalizes if user and password are provided + log.info(f"{self.options.user=}") + log.info(f"{self.options.pwd=}") + log.info(f"{self.options.root=}") + + if (self.options.user and self.options.pwd and self.options.root): + log.debug("User and password were provided") + + try: + self.bqSession = BQSession().init_local(self.options.user, self.options.pwd, + bisque_root=self.options.root) + log.debug("Created BQSession with init_local") + self.options.mexURL = self.bqSession.mex.uri + + except: + log.error("Unable to get options.mexURL") + return + + # initalizes if mex and mex token is provided + elif (self.options.mexURL and self.options.token): + log.debug("mex and mex token were provided") + log.info(f"{self.options.mexURL=}") + log.info(f"{self.options.token=}") + + try: + self.bqSession = BQSession().init_mex(self.options.mexURL, self.options.token) + except Exception as e: + log.info("Failed to make BQSession") + log.info(str(e)) + return + + + + else: + raise ScriptError('Insufficient options or arguments to start this module') + + try: + self.setup() + except Exception as e: + log.exception("Exception during setup") + self.bqSession.fail_mex(msg="Exception during setup: %s" % str(e)) + return + #### + try: + self.run() + except (Exception, ScriptError) as e: + log.exception("Exception during run") + self.bqSession.fail_mex(msg="Exception during run: %s" % str(e)) + return + ## + try: + self.tear_down() + except (Exception, ScriptError) as e: + log.exception("Exception during tear_down") + self.bqSession.fail_mex(msg="Exception during tear_down: %s" % str(e)) + return + + self.bqSession.close() + log.debug('Session Close') + + +if __name__ == "__main__": + PythonScriptWrapper().main() diff --git a/nph_5class/bqapi/RequestsMonkeyPatch/__init__.py b/nph_5class/bqapi/RequestsMonkeyPatch/__init__.py new file mode 100644 index 0000000..5dd8fd0 --- /dev/null +++ b/nph_5class/bqapi/RequestsMonkeyPatch/__init__.py @@ -0,0 +1 @@ +#empty \ No newline at end of file diff --git a/nph_5class/bqapi/RequestsMonkeyPatch/monkeypatch.py b/nph_5class/bqapi/RequestsMonkeyPatch/monkeypatch.py new file mode 100644 index 0000000..2b3be6e --- /dev/null +++ b/nph_5class/bqapi/RequestsMonkeyPatch/monkeypatch.py @@ -0,0 +1,5 @@ +def monkeypatch_method(cls): + def decorator(func): + setattr(cls, func.__name__, func) + return func + return decorator diff --git a/nph_5class/bqapi/RequestsMonkeyPatch/requests_patch.py b/nph_5class/bqapi/RequestsMonkeyPatch/requests_patch.py new file mode 100644 index 0000000..66508e2 --- /dev/null +++ b/nph_5class/bqapi/RequestsMonkeyPatch/requests_patch.py @@ -0,0 +1,64 @@ +""" + A patch to format_header_param in urllib3 + + If a value has unicode the header will be returned + as 'name="value"; name*=utf-8''value' else + 'name="value"' +""" + + +import email.utils +#import mimetypes +import warnings + +import requests +import requests.packages.urllib3 as urllib3 +from requests.packages.urllib3.packages import six +from .monkeypatch import monkeypatch_method + + + +REQUESTS_V = [int(s) for s in requests.__version__.split('.')] + +if REQUESTS_V < [2, 4, 0] or REQUESTS_V > [2, 19, 0]: + warnings.warn("""\ +We need to patch requests 2.4.0 up to 2.19.0, make sure your version of requests \ +needs this patch, greater than 2.4.3 we do not know if this patch applys.""" + ) + raise ImportError('Requests 2.4.0 to 2.10.0 is required!') +#elif requests_v > [3, 0, 0]: +# #does not require this patch +# pass +else: + @monkeypatch_method(urllib3.fields) + def format_header_param(name, value): + """ + Helper function to format and quote a single header parameter. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows RFC 2231, as + suggested by RFC 2388 Section 4.4. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as a unicode string. + """ + if not any(ch in value for ch in '"\\\r\n'): + result = '%s="%s"' % (name, value) + try: + result.encode('ascii') + except UnicodeEncodeError: + pass + else: + return result + + value_encode = value + if not six.PY3: # Python 2: + value_encode = value.encode('utf-8') + + value = '%s="%s"; %s*=%s' % ( + name, value, + name, email.utils.encode_rfc2231(value_encode, 'utf-8') + ) + return value diff --git a/nph_5class/bqapi/__init__.py b/nph_5class/bqapi/__init__.py new file mode 100644 index 0000000..cbc3fe7 --- /dev/null +++ b/nph_5class/bqapi/__init__.py @@ -0,0 +1,6 @@ +# + +from .comm import * +from .types import * + + diff --git a/nph_5class/bqapi/blockable_module.py b/nph_5class/bqapi/blockable_module.py new file mode 100644 index 0000000..755720b --- /dev/null +++ b/nph_5class/bqapi/blockable_module.py @@ -0,0 +1,60 @@ +import sys +import collections + +from bqapi import BQSession + + +import logging + + + +#logging.basicConfig(filename='BlockableModule.log',level=logging.DEBUG) #!!! +log = logging.getLogger('bqapi.blockable_module') + +class BlockableModule(object): + """Base class for module that can run over blocks of parameters""" + + def main(self, mex_url=None, auth_token=None, bq=None, **kw): + # Allow for testing by passing an alreay initialized session + if bq is None: + bq = BQSession().init_mex(mex_url, auth_token) + + # check for list parameters + params = bq.get_mex_inputs() + if isinstance(params, dict) or not isinstance(params, collections.Iterable): + params = [params] + # pass values directly as args + for single_params in params: + for param_name in single_params: + if 'value' in single_params[param_name]: + single_params[param_name] = single_params[param_name].get('value') + + # TODO: measure block startup time + self.start_block(bq, params) + + for kw in params: + # TODO: measure single item time + # TODO: run in parallel + if 'mex_url' in kw: + # set (innermost) single item mex + sub_bq = BQSession().init_mex(kw['mex_url'], auth_token) + else: + sub_bq = bq + self.process_single(sub_bq, **kw) + if 'mex_url' in kw: + sub_bq.close() + + # TODO: measure block teardown time + self.end_block(bq) + + sys.exit(0) + #bq.close() + + def start_block(self, bq, all_kw): + pass + + def end_block(self, bq): + pass + + def process_single(self, bq, **kw): + pass diff --git a/nph_5class/bqapi/bqclass.py b/nph_5class/bqapi/bqclass.py new file mode 100644 index 0000000..3d54a69 --- /dev/null +++ b/nph_5class/bqapi/bqclass.py @@ -0,0 +1,813 @@ +############################################################################### +## Bisquik ## +## Center for Bio-Image Informatics ## +## University of California at Santa Barbara ## +## ------------------------------------------------------------------------- ## +## ## +## Copyright (c) 2007 by the Regents of the University of California ## +## All rights reserved ## +## ## +## Redistribution and use in source and binary forms, with or without ## +## modification, are permitted provided that the following conditions are ## +## met: ## +## ## +## 1. Redistributions of source code must retain the above copyright ## +## notice, this list of conditions, and the following disclaimer. ## +## ## +## 2. Redistributions in binary form must reproduce the above copyright ## +## notice, this list of conditions, and the following disclaimer in ## +## the documentation and/or other materials provided with the ## +## distribution. ## +## ## +## 3. All advertising materials mentioning features or use of this ## +## software must display the following acknowledgement: This product ## +## includes software developed by the Center for Bio-Image Informatics## +## University of California at Santa Barbara, and its contributors. ## +## ## +## 4. Neither the name of the University nor the names of its ## +## contributors may be used to endorse or promote products derived ## +## from this software without specific prior written permission. ## +## ## +## THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS" AND ANY ## +## EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ## +## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE ## +## DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ## +## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ## +## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ## +## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ## +## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ## +## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ## +## ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ## +## POSSIBILITY OF SUCH DAMAGE. ## +## ## +############################################################################### +""" +BQ API - a set of classes that represent Bisque objects + +""" + +__module__ = "bqapi.py" +__author__ = "Dmitry Fedorov and Kris Kvilekval" +__version__ = "0.1" +__revision__ = "$Rev$" +__date__ = "$Date$" +__copyright__ = "Center for BioImage Informatics, University California, Santa Barbara" + +import os +import sys +import math +import io +import inspect +import logging +import tempfile +import collections +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree +from .xmldict import xml2nv + + +log = logging.getLogger('bqapi.class') + +__all__ = [ 'BQFactory', 'BQNode', 'BQImage', 'BQResource', 'BQValue', 'BQTag', 'BQVertex', 'BQGObject', + "BQDataset", "BQUser", "BQMex", + 'gobject_primitives', + 'BQPoint', 'BQLabel', 'BQPolyline', 'BQPolygon', 'BQCircle', 'BQEllipse', 'BQRectangle', 'BQSquare',] # 'toXml', 'fromXml' ] + +gobject_primitives = set(['point', 'label', 'polyline', 'polygon', 'circle', 'ellipse', 'rectangle', 'square', 'line']) + + +################################################################################ +# Base class for bisque resources +################################################################################ + +class BQNode (object): + '''Base class for parsing Bisque XML''' + xmltag = '' + xmlfields = [] + xmlkids = [] + + def __init__(self, *args, **kw): + for k,v in zip(self.xmlfields, args): + setattr(self, k, v) + for k in self.xmlfields: + if k in self.__dict__: + continue + setattr(self, k, kw.get(k, None)) + + def initialize(self): + 'used for class post parsing initialization' + pass + + def initializeXml(self, xmlnode): + for x in self.xmlfields: + setattr(self, x, xmlnode.get (x, None)) + + def set_parent(self, parent): + pass + + def __repr__(self): + return '(%s:%s)' % (self.xmltag, id(self) ) + + def __str__(self): + return '(%s:%s)'%(self.xmltag,','.join (['%s=%s' % (f, getattr(self,f,'')) for f in self.xmlfields])) + + def toTuple (self): + return tuple( [ x for x in self.xmlfields ] ) + +################################################################################ +# Value +################################################################################ + +class BQValue (BQNode): + '''tag value''' + xmltag = "value" + xmlfields = ['value', 'type', 'index'] + + #def __init__(self, **kw): + # super(BQValue, self).__init__(**kw) + + def set_parent(self, parent): + if self.index is not None: + parent.values.extend([None for x in range((self.index+1)-len(parent.values))]) + parent.values[self.index] = self + else: + parent.values.append(self) + + def initializeXml(self, xmlnode): + super(BQValue, self).initializeXml(xmlnode) + try: + self.index = int(self.index) + except Exception: + self.index = None + self.value = xmlnode.text + + def toetree(self, parent, baseuri): + n = etree.SubElement(parent, 'value', ) + if self.type is not None: n.set('type', str(self.type)) + if self.index is not None: n.set('index', str(self.index)) + if self.value is not None: n.text = str(self.value) + return n + +################################################################################ +# Base class for bisque resources +################################################################################ + +class BQResource (BQNode): + '''Base class for Bisque resources''' + xmltag = 'resource' + xmlfields = ['name', 'type', 'uri', 'ts', 'resource_uniq'] + xmlkids = ['kids', 'tags', 'gobjects',] # 'values'] handled differently + + def __repr__(self): + return '(%s:%s)'%(self.xmltag, self.uri) #pylint: disable=no-member + + def __init__(self, *args, **kw): + self.tags = [] + self.gobjects = [] + self.kids = [] + self.values = [] + self.parent = None + super(BQResource, self).__init__(*args, **kw) + + def toDict (self): + objs = {} + objs.update ( [ (f.name, f) for f in self.tags if f.name ] ) + objs.update ( [ (f.name, f) for f in self.gobjects if f.name ] ) + return objs + + def set_parent(self, parent): + self.parent = parent + parent.kids.append(self) + + def addTag(self, name=None, value=None, type = None, tag=None): + if tag is None: + tag = BQTag(name=name, value=value, type = type) + tag.set_parent (self) + return tag + add_tag = addTag + + def addGObject(self, name=None, value=None, type = None, gob=None): + if gob is None: + gob = BQGObject(name=name, value=value, type = type) + gob.set_parent(self) + add_gob = addGObject + + + def findall (self, name, limit=None): + "find all name that match, options limit search tag, gobject or a kid" + limit = limit or ['tag', 'gobject', 'kid'] + results =[] + if 'tag' in limit: + for tg in self.tags: + if tg.name == name: + results.append( tg ) + if 'gobject' in limit: + for tg in self.gobjects: + if tg.name == name: + results.append( tg ) + if 'kid' in limit: + for tg in self.kids: + if tg.name == name: + results.append ( tg ) + return results + + def find(self, name, limit=None): + "Find first element and return options limit search tag, gobject or a kid" + limit = limit or ['tag', 'gobject', 'kid'] + if 'tag' in limit: + for tg in self.tags: + if tg.name == name: + return tg + if 'gobject' in limit: + for tg in self.gobjects: + if tg.name == name: + return tg + if 'kid' in limit: + for tg in self.kids: + if tg.name == name: + return tg + + + # def tag(self, name): + # results = [] + # for tg in self.tags: + # if tg.name == name: + # results.append(tg) + # if len(results) == 0: + # return None + # elif len(results) == 1: + # return results[0] + # else: + # return results + + # def gob(self, name): + # results = [] + # for tg in self.gobjects: + # if tg.name == name: + # results.append(tg) + # if len(results) == 0: + # return None + # elif len(results) == 1: + # return results[0] + # else: + # return results + + def get_value(self): + if len(self.values)==0: + return None + if len(self.values)==1: + return self.values[0].value + return [ x.value for x in self.values ] + + def set_value(self, values): + if not isinstance(values, list): + values = [ values ] + self.values = [ BQValue(*v) if isinstance(v, tuple) else BQValue(v) for v in values ] + + + value = property(get_value, set_value) + + def toetree(self, parent, baseuri): + xmlkids = list(self.xmlkids) + if len(self.values)<=1: + n = create_element(self, parent, baseuri) + else: + n = create_element(self, parent, baseuri) + if 'value' in n.attrib: + del n.attrib['value'] + xmlkids.append('values') + for kid_name in xmlkids: + for x in getattr(self, kid_name, None): + toxmlnode (x, n, baseuri) + return n + + +################################################################################ +# Image +################################################################################ + +class BQImage(BQResource): + xmltag = "image" + xmlfields = ['name', 'value', 'type', 'uri', 'ts', 'resource_uniq' ] # "x", "y","z", "t", "ch" ] + xmlkids = ['tags', 'gobjects'] + + def __init__(self, *args, **kw): + super(BQImage, self).__init__(*args, **kw) + self._geometry = None + self._meta = None + self._info = {} + + # returns etree XML with image metadata + def meta(self): + 'return image meta as xml' + if self._meta is None: + info = self.pixels().meta().fetch() + self._meta = etree.XML(info) + self._info = xml2nv(self._meta) + return self._meta + + # returns dict with image metadata name:value + def info(self): + 'return image meta as dict' + if self._meta is None: + self.meta() + return self._info + + def geometry(self): + 'return x,y,z,t,ch of image' + if self._geometry is None: + info = self.meta() + geom = [] + for n in 'xyztc': + tn = info.xpath('//tag[@name="image_num_%s"]' % n) + geom.append(tn[0].get('value')) + self._geometry = tuple(map(int, geom)) + return self._geometry + + def pixels(self): + return BQImagePixels(self) + + + +class BQImagePixels(object): + """manage requests to the image pixels""" + def __init__(self, image): + self.image = image + self.ops = [] + + def _construct_url(self): + """build the final url based on the operation + """ + image_service = self.image.session.service('image_service') + return image_service.construct (path = '%s?%s'%(self.image.resource_uniq, + '&'.join ( "%s=%s" % tp for tp in self.ops ))) + #return session.service_url('image_service', + # % (self.image.resource_uniq, '&'.join(self.ops))) + + def fetch(self, path=None, stream=False): + """resolve the current and fetch the pixel + """ + url = self._construct_url() + image_service = self.image.session.service ('image_service') + if path is not None: + stream = True + + response = image_service.fetch (url, stream=stream) + if path is not None: + with open (path, 'wb') as fb: + for block in response.iter_content(chunk_size = 16 * 1024 * 1024): #16MB + fb.write(block) + response.close() + else: + return response.content + + def command(self, operation, arguments=''): + self.ops.append((operation, arguments)) + return self + + def slice(self, x='', y='',z='',t=''): + """Slice the current image""" + return self.command('slice', '%s,%s,%s,%s' % (x,y,z,t)) + + def format(self, fmt): + return self.command('format', fmt) + + def resize(self, w='',h='', interpolation=''): + """ interpoaltion may be,[ NN|,BL|,BC][,AR] + """ + return self.command('resize', '%s,%s,%s' % (w,h,interpolation)) + + def localpath(self): + return self.command('localpath') + + def meta(self): + return self.command('meta') + + def info(self): + return self.command('info') + + def asarray(self): + try: + import tifffile + except ImportError: + log.error ("Please install Tifffile (Optional)") + return None + # Force format to be tiff by removing any format and append format tiff + self.ops = [ tp for tp in self.ops if tp[0] != 'format' ] + self.format ('tiff') + url = self._construct_url() + image_service = self.image.session.service ('image_service') + with image_service.fetch (url, stream=True) as response: + #response.raw.decode_content = True + return tifffile.imread (io.BytesIO (response.content)) + + def savearray (self, fname, imdata=None, imshape=None, dtype=None, **kwargs): + try: + import tifffile + except ImportError: + log.error ("Please install Tifffile (Optional)") + return None + import_service = self.image.session.service ('import_service') + imfile = tempfile.mkstemp (suffix='.tiff') + tifffile.imsave (imfile, imdata, imshape, dtype, **kwargs) + import_service.transfer (fname, fileobj = open (imfile, 'rb')) + os.remove (imfile) + + + + + +################################################################################ +# Tag +################################################################################ + +class BQTag (BQResource): + '''tag resource''' + xmltag = "tag" + xmlfields = ['name', 'value', 'type', 'uri', 'ts'] + xmlkids = ['tags', 'gobjects', ] # handle values specially + + def set_parent(self, parent): + self.parent = parent + parent.tags.append(self) + +# def get_value(self): +# if len(self.values)==0: +# return None +# if len(self.values)==1: +# return self.values[0].value +# return [ x.value for x in self.values ] +# def set_value(self, values): +# if not isinstance(values, list): +# self.values = [ BQValue(values)] +# else: +# self.values = [ BQValue(v) for v in values ] +# +# value = property(get_value, set_value) +# +# def toetree(self, parent, baseuri): +# xmlkids = list(self.xmlkids) +# if len(self.values)<=1: +# n = create_element(self, parent, baseuri) +# else: +# n = create_element(self, parent, baseuri) +# del n.attrib['value'] +# xmlkids.append('values') +# for kid_name in xmlkids: +# for x in getattr(self, kid_name, None): +# toxmlnode (x, n, baseuri) +# return n + + + + +################################################################################ +# GObject +################################################################################ + +class BQVertex (BQNode): + '''gobject vertex''' + type = 'vertex' + xmltag = "vertex" + xmlfields = ['x', 'y', 'z', 't', 'c', 'index'] + + def __init__(self, **kw): + self.fromObj(**kw) + + def __repr__(self): + return 'vertex(x:%s,y:%s,z:%s,t:%s)'%(self.x, self.y, self.z, self.t) + + def set_parent(self, parent): + self.parent = parent + parent.vertices.append(self) + + def toTuple(self): + return (self.x, self.y, self.z, self.t) + + def fromTuple(self, v): + x,y,z,t = v + self.x=x; self.y=y; self.z=z; self.t=t + + def fromObj(self, **kw): + for k,v in list(kw.items()): + if k in self.xmlfields: + setattr(self,k,v) + +class BQGObject(BQResource): + '''Gobject resource: A grpahical annotation''' + type = 'gobject' + xmltag = "gobject" + xmlfields = ['name', 'value', 'type', 'uri'] + xmlkids = ['tags', 'gobjects', 'vertices'] + + def __init__(self, *args, **kw): + super(BQGObject, self).__init__(*args, **kw) + self.name = None + self.vertices = [] + self.type= self.type or self.xmltag + + def __str__(self): + return '(type: %s, name: %s, %s)'%(self.type, self.name, self.vertices) + + def set_parent(self, parent): + self.parent = parent + parent.gobjects.append(self) + + def verticesAsTuples(self): + return [v.toTuple() for v in self.vertices ] + + def perimeter(self): + return -1 + + def area(self): + return -1 + + +class BQPoint (BQGObject): + '''point gobject resource''' + xmltag = "point" + +class BQLabel (BQGObject): + '''label gobject resource''' + xmltag = "label" + +class BQPolyline (BQGObject): + '''polyline gobject resource''' + xmltag = "polyline" + def perimeter(self): + vx = self.verticesAsTuples() + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += math.sqrt( math.pow(x2-x1,2.0) + math.pow(y2-y1,2.0) ) + return d + + +# only does 2D version right now, polygon area is flawed if the edges are intersecting +# implement better algorithm based on triangles +class BQPolygon (BQGObject): + '''Polygon gobject resource''' + xmltag = "polygon" + # only does 2D version right now + def perimeter(self): + vx = self.verticesAsTuples() + vx.append(vx[0]) + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += math.sqrt( math.pow(x2-x1,2.0) + math.pow(y2-y1,2.0) ) + return d + + # only does 2D version right now + # area is flawed if the edges are intersecting implement better algorithm based on triangles + def area(self): + vx = self.verticesAsTuples() + vx.append(vx[0]) + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += x1*y2 - y1*x2 + return 0.5 * math.fabs(d) + +class BQCircle (BQGObject): + '''circle gobject resource''' + xmltag = "circle" + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return 2.0 * math.pi * max(math.fabs(x1-x2), math.fabs(y1-y2)) + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.pi * pow( max(math.fabs(x1-x2), math.fabs(y1-y2)), 2.0) + +class BQEllipse (BQGObject): + '''ellipse gobject resource''' + xmltag = "ellipse" + type = 'ellipse' + + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + x3,y3,z3,t3 = vx[2] + a = max(math.fabs(x1-x2), math.fabs(y1-y2)) + b = max(math.fabs(x1-x3), math.fabs(y1-y3)) + return math.pi * ( 3.0*(a+b) - math.sqrt( 10.0*a*b + 3.0*(a*a + b*b)) ) + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + x3,y3,z3,t3 = vx[2] + a = max(math.fabs(x1-x2), math.fabs(y1-y2)) + b = max(math.fabs(x1-x3), math.fabs(y1-y3)) + return math.pi * a * b + +class BQRectangle (BQGObject): + '''rectangle gobject resource''' + xmltag = "rectangle" + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.fabs(x1-x2)*2.0 + math.fabs(y1-y2)*2.0 + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.fabs(x1-x2) * math.fabs(y1-y2) + +class BQSquare (BQRectangle): + '''square gobject resource''' + xmltag = "square" + + +################################################################################ +# Advanced Objects +################################################################################ +class BQDataset(BQResource): + xmltag = "dataset" + #xmlfields = ['name', 'uri', 'ts'] + #xmlkids = ['kids', 'tags', 'gobjects'] + +class BQUser(BQResource): + xmltag = "user" + #xmlfields = ['name', 'uri', 'ts'] + #xmlkids = ['tags', 'gobjects'] + +class BQMex(BQResource): + xmltag = "mex" + #xmlfields = ['module', 'uri', 'ts', 'value'] + #xmlkids = ['tags', 'gobjects'] + +################################################################################ +# Factory +################################################################################ + +class BQFactory (object): + '''Factory for Bisque resources''' + resources = dict([ (x[1].xmltag, x[1]) for x in inspect.getmembers(sys.modules[__name__]) if inspect.isclass(x[1]) and hasattr(x[1], 'xmltag') ]) + + def __init__(self, session): + self.session = session + + @classmethod + def make(cls, xmltag, type_attr): + if xmltag == "gobject" and type_attr in gobject_primitives: + xmltag = type_attr + c = cls.resources.get(xmltag, BQResource) + return c() + + index_map = dict(vertex=('vertices',BQVertex), tag=('tags', BQTag)) + @classmethod + def index(cls, xmltag, parent, indx): + array, ctor = cls.index_map.get (xmltag, (None,None)) + if array: + objarr = getattr(parent, array) + objarr.extend ([ ctor() for x in range(((indx+1)-len(objarr)))]) + v = objarr[indx] + v.indx = indx + #log.debug ('fetching %s %s[%d]:%s' %(parent , array, indx, v)) + return v + + ################################################################################ + # Parsing + ################################################################################ + + def from_etree (self, xmlResource, resource=None, parent=None ): + """ Convert an etree to a python structure""" + stack = []; + resources = []; + # Initialize stack with a tuple of + # 1. The XML node being parsed + # 2. The current resource being filled outs + # 3. The parent resource if any + stack.append ( (xmlResource, resource, parent ) ) + while stack: + node, resource, parent = stack.pop(0); + xmltag = node.tag; + if resource is None: + type_ = node.get( 'type', '') + resource = self.make(xmltag, type_) + + resource.session = self.session + resource.initializeXml(node) + resources.append (resource) + if parent: + resource.set_parent(parent) + #resource.doc = parent.doc; + for k in node: + stack.append( (k, None, resource) ) + + resources[0].initialize() + resources[0].xmltree = xmlResource + return resources[0]; + def from_string (self, xmlstring): + et = etree.XML (xmlstring) + return self.from_etree(et) + + # Generation + + @classmethod + def to_etree(self, dbo, parent=None, baseuri='', view=''): + """Convert a BQObject to an etree object suitable for XML + generation + """ + node = toxmlnode(dbo, parent, baseuri, view) + return node; + @classmethod + def to_string (self, node): + if isinstance (node, BQNode): + node = self.to_etree(node) + return etree.tostring(node) + + @classmethod + def string2etree(self, xmlstring): + return etree.XML (xmlstring) + + +def create_element(dbo, parent, baseuri, **kw): + """Create an etree element from BQ object + """ + xtag = kw.pop('xtag', dbo.xmltag) + if not kw: + kw = model_fields (dbo, baseuri) + + if parent is not None: + node = etree.SubElement (parent, xtag, **kw) + else: + node = etree.Element (xtag, **kw) + return node + +def toxmlnode (dbo, parent, baseuri, view=None): + if hasattr(dbo, 'toetree'): + node = dbo.toetree(parent, baseuri) + else: + node = create_element (dbo, parent, baseuri) + for kid_name in dbo.xmlkids: + for x in getattr(dbo, kid_name, None): + toxmlnode (x, node, baseuri, view) + return node + + +def make_owner (dbo, fn, baseuri): + return ('owner', baseuri + str(dbo.owner)) + +def make_uri(dbo, fn, baseuri): + return ('uri', "%s%s" % (baseuri , str (dbo.uri))) + +def get_email (dbo, fn, baseuri): + return ('email', dbo.user.email_address) + +mapping_fields = { + 'mex' : None, + 'acl' : None, + # Auth + 'user_id' : get_email, + 'taggable_id': None, + 'permission': 'action', + 'resource': None, + } + +def model_fields(dbo, baseuri=None): + """Extract known fields from a BQ object, while removing any known + from C{excluded_fields} + + @rtype: dict + @return fields to be rendered in XML + """ + attrs = {} + try: + dbo_fields = dbo.xmlfields + except AttributeError: + # This occurs when the object is a fake DB objects + # The dictionary is sufficient + dbo_fields= dbo.__dict__ + for fn in dbo_fields: + fn = mapping_fields.get(fn, fn) + # Skip when map is None + if fn is None: + continue + # Map is callable, then call + if isinstance(fn, collections.Callable): + fn, attr_val = fn(dbo, fn, baseuri) + else: + attr_val = getattr(dbo, fn, None) + + # Put value in attribute dictionary + if attr_val is not None and attr_val!='': + if isinstance(attr_val, str): + attrs[fn] = attr_val + else: + attrs[fn] = str(attr_val) #unicode(attr_val,'utf-8') + return attrs diff --git a/nph_5class/bqapi/bqclass.py.bak b/nph_5class/bqapi/bqclass.py.bak new file mode 100644 index 0000000..3757202 --- /dev/null +++ b/nph_5class/bqapi/bqclass.py.bak @@ -0,0 +1,812 @@ +############################################################################### +## Bisquik ## +## Center for Bio-Image Informatics ## +## University of California at Santa Barbara ## +## ------------------------------------------------------------------------- ## +## ## +## Copyright (c) 2007 by the Regents of the University of California ## +## All rights reserved ## +## ## +## Redistribution and use in source and binary forms, with or without ## +## modification, are permitted provided that the following conditions are ## +## met: ## +## ## +## 1. Redistributions of source code must retain the above copyright ## +## notice, this list of conditions, and the following disclaimer. ## +## ## +## 2. Redistributions in binary form must reproduce the above copyright ## +## notice, this list of conditions, and the following disclaimer in ## +## the documentation and/or other materials provided with the ## +## distribution. ## +## ## +## 3. All advertising materials mentioning features or use of this ## +## software must display the following acknowledgement: This product ## +## includes software developed by the Center for Bio-Image Informatics## +## University of California at Santa Barbara, and its contributors. ## +## ## +## 4. Neither the name of the University nor the names of its ## +## contributors may be used to endorse or promote products derived ## +## from this software without specific prior written permission. ## +## ## +## THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS" AND ANY ## +## EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ## +## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE ## +## DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ## +## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ## +## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ## +## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ## +## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ## +## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ## +## ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ## +## POSSIBILITY OF SUCH DAMAGE. ## +## ## +############################################################################### +""" +BQ API - a set of classes that represent Bisque objects + +""" + +__module__ = "bqapi.py" +__author__ = "Dmitry Fedorov and Kris Kvilekval" +__version__ = "0.1" +__revision__ = "$Rev$" +__date__ = "$Date$" +__copyright__ = "Center for BioImage Informatics, University California, Santa Barbara" + +import os +import sys +import math +import io +import inspect +import logging +import tempfile +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree +from .xmldict import xml2nv + + +log = logging.getLogger('bqapi.class') + +__all__ = [ 'BQFactory', 'BQNode', 'BQImage', 'BQResource', 'BQValue', 'BQTag', 'BQVertex', 'BQGObject', + "BQDataset", "BQUser", "BQMex", + 'gobject_primitives', + 'BQPoint', 'BQLabel', 'BQPolyline', 'BQPolygon', 'BQCircle', 'BQEllipse', 'BQRectangle', 'BQSquare',] # 'toXml', 'fromXml' ] + +gobject_primitives = set(['point', 'label', 'polyline', 'polygon', 'circle', 'ellipse', 'rectangle', 'square', 'line']) + + +################################################################################ +# Base class for bisque resources +################################################################################ + +class BQNode (object): + '''Base class for parsing Bisque XML''' + xmltag = '' + xmlfields = [] + xmlkids = [] + + def __init__(self, *args, **kw): + for k,v in zip(self.xmlfields, args): + setattr(self, k, v) + for k in self.xmlfields: + if k in self.__dict__: + continue + setattr(self, k, kw.get(k, None)) + + def initialize(self): + 'used for class post parsing initialization' + pass + + def initializeXml(self, xmlnode): + for x in self.xmlfields: + setattr(self, x, xmlnode.get (x, None)) + + def set_parent(self, parent): + pass + + def __repr__(self): + return '(%s:%s)' % (self.xmltag, id(self) ) + + def __str__(self): + return '(%s:%s)'%(self.xmltag,','.join (['%s=%s' % (f, getattr(self,f,'')) for f in self.xmlfields])) + + def toTuple (self): + return tuple( [ x for x in self.xmlfields ] ) + +################################################################################ +# Value +################################################################################ + +class BQValue (BQNode): + '''tag value''' + xmltag = "value" + xmlfields = ['value', 'type', 'index'] + + #def __init__(self, **kw): + # super(BQValue, self).__init__(**kw) + + def set_parent(self, parent): + if self.index is not None: + parent.values.extend([None for x in range((self.index+1)-len(parent.values))]) + parent.values[self.index] = self + else: + parent.values.append(self) + + def initializeXml(self, xmlnode): + super(BQValue, self).initializeXml(xmlnode) + try: + self.index = int(self.index) + except Exception: + self.index = None + self.value = xmlnode.text + + def toetree(self, parent, baseuri): + n = etree.SubElement(parent, 'value', ) + if self.type is not None: n.set('type', str(self.type)) + if self.index is not None: n.set('index', str(self.index)) + if self.value is not None: n.text = str(self.value) + return n + +################################################################################ +# Base class for bisque resources +################################################################################ + +class BQResource (BQNode): + '''Base class for Bisque resources''' + xmltag = 'resource' + xmlfields = ['name', 'type', 'uri', 'ts', 'resource_uniq'] + xmlkids = ['kids', 'tags', 'gobjects',] # 'values'] handled differently + + def __repr__(self): + return '(%s:%s)'%(self.xmltag, self.uri) #pylint: disable=no-member + + def __init__(self, *args, **kw): + self.tags = [] + self.gobjects = [] + self.kids = [] + self.values = [] + self.parent = None + super(BQResource, self).__init__(*args, **kw) + + def toDict (self): + objs = {} + objs.update ( [ (f.name, f) for f in self.tags if f.name ] ) + objs.update ( [ (f.name, f) for f in self.gobjects if f.name ] ) + return objs + + def set_parent(self, parent): + self.parent = parent + parent.kids.append(self) + + def addTag(self, name=None, value=None, type = None, tag=None): + if tag is None: + tag = BQTag(name=name, value=value, type = type) + tag.set_parent (self) + return tag + add_tag = addTag + + def addGObject(self, name=None, value=None, type = None, gob=None): + if gob is None: + gob = BQGObject(name=name, value=value, type = type) + gob.set_parent(self) + add_gob = addGObject + + + def findall (self, name, limit=None): + "find all name that match, options limit search tag, gobject or a kid" + limit = limit or ['tag', 'gobject', 'kid'] + results =[] + if 'tag' in limit: + for tg in self.tags: + if tg.name == name: + results.append( tg ) + if 'gobject' in limit: + for tg in self.gobjects: + if tg.name == name: + results.append( tg ) + if 'kid' in limit: + for tg in self.kids: + if tg.name == name: + results.append ( tg ) + return results + + def find(self, name, limit=None): + "Find first element and return options limit search tag, gobject or a kid" + limit = limit or ['tag', 'gobject', 'kid'] + if 'tag' in limit: + for tg in self.tags: + if tg.name == name: + return tg + if 'gobject' in limit: + for tg in self.gobjects: + if tg.name == name: + return tg + if 'kid' in limit: + for tg in self.kids: + if tg.name == name: + return tg + + + # def tag(self, name): + # results = [] + # for tg in self.tags: + # if tg.name == name: + # results.append(tg) + # if len(results) == 0: + # return None + # elif len(results) == 1: + # return results[0] + # else: + # return results + + # def gob(self, name): + # results = [] + # for tg in self.gobjects: + # if tg.name == name: + # results.append(tg) + # if len(results) == 0: + # return None + # elif len(results) == 1: + # return results[0] + # else: + # return results + + def get_value(self): + if len(self.values)==0: + return None + if len(self.values)==1: + return self.values[0].value + return [ x.value for x in self.values ] + + def set_value(self, values): + if not isinstance(values, list): + values = [ values ] + self.values = [ BQValue(*v) if isinstance(v, tuple) else BQValue(v) for v in values ] + + + value = property(get_value, set_value) + + def toetree(self, parent, baseuri): + xmlkids = list(self.xmlkids) + if len(self.values)<=1: + n = create_element(self, parent, baseuri) + else: + n = create_element(self, parent, baseuri) + if 'value' in n.attrib: + del n.attrib['value'] + xmlkids.append('values') + for kid_name in xmlkids: + for x in getattr(self, kid_name, None): + toxmlnode (x, n, baseuri) + return n + + +################################################################################ +# Image +################################################################################ + +class BQImage(BQResource): + xmltag = "image" + xmlfields = ['name', 'value', 'type', 'uri', 'ts', 'resource_uniq' ] # "x", "y","z", "t", "ch" ] + xmlkids = ['tags', 'gobjects'] + + def __init__(self, *args, **kw): + super(BQImage, self).__init__(*args, **kw) + self._geometry = None + self._meta = None + self._info = {} + + # returns etree XML with image metadata + def meta(self): + 'return image meta as xml' + if self._meta is None: + info = self.pixels().meta().fetch() + self._meta = etree.XML(info) + self._info = xml2nv(self._meta) + return self._meta + + # returns dict with image metadata name:value + def info(self): + 'return image meta as dict' + if self._meta is None: + self.meta() + return self._info + + def geometry(self): + 'return x,y,z,t,ch of image' + if self._geometry is None: + info = self.meta() + geom = [] + for n in 'xyztc': + tn = info.xpath('//tag[@name="image_num_%s"]' % n) + geom.append(tn[0].get('value')) + self._geometry = tuple(map(int, geom)) + return self._geometry + + def pixels(self): + return BQImagePixels(self) + + + +class BQImagePixels(object): + """manage requests to the image pixels""" + def __init__(self, image): + self.image = image + self.ops = [] + + def _construct_url(self): + """build the final url based on the operation + """ + image_service = self.image.session.service('image_service') + return image_service.construct (path = '%s?%s'%(self.image.resource_uniq, + '&'.join ( "%s=%s" % tp for tp in self.ops ))) + #return session.service_url('image_service', + # % (self.image.resource_uniq, '&'.join(self.ops))) + + def fetch(self, path=None, stream=False): + """resolve the current and fetch the pixel + """ + url = self._construct_url() + image_service = self.image.session.service ('image_service') + if path is not None: + stream = True + + response = image_service.fetch (url, stream=stream) + if path is not None: + with open (path, 'wb') as fb: + for block in response.iter_content(chunk_size = 16 * 1024 * 1024): #16MB + fb.write(block) + response.close() + else: + return response.content + + def command(self, operation, arguments=''): + self.ops.append((operation, arguments)) + return self + + def slice(self, x='', y='',z='',t=''): + """Slice the current image""" + return self.command('slice', '%s,%s,%s,%s' % (x,y,z,t)) + + def format(self, fmt): + return self.command('format', fmt) + + def resize(self, w='',h='', interpolation=''): + """ interpoaltion may be,[ NN|,BL|,BC][,AR] + """ + return self.command('resize', '%s,%s,%s' % (w,h,interpolation)) + + def localpath(self): + return self.command('localpath') + + def meta(self): + return self.command('meta') + + def info(self): + return self.command('info') + + def asarray(self): + try: + import tifffile + except ImportError: + log.error ("Please install Tifffile (Optional)") + return None + # Force format to be tiff by removing any format and append format tiff + self.ops = [ tp for tp in self.ops if tp[0] != 'format' ] + self.format ('tiff') + url = self._construct_url() + image_service = self.image.session.service ('image_service') + with image_service.fetch (url, stream=True) as response: + #response.raw.decode_content = True + return tifffile.imread (io.BytesIO (response.content)) + + def savearray (self, fname, imdata=None, imshape=None, dtype=None, **kwargs): + try: + import tifffile + except ImportError: + log.error ("Please install Tifffile (Optional)") + return None + import_service = self.image.session.service ('import_service') + imfile = tempfile.mkstemp (suffix='.tiff') + tifffile.imsave (imfile, imdata, imshape, dtype, **kwargs) + import_service.transfer (fname, fileobj = open (imfile, 'rb')) + os.remove (imfile) + + + + + +################################################################################ +# Tag +################################################################################ + +class BQTag (BQResource): + '''tag resource''' + xmltag = "tag" + xmlfields = ['name', 'value', 'type', 'uri', 'ts'] + xmlkids = ['tags', 'gobjects', ] # handle values specially + + def set_parent(self, parent): + self.parent = parent + parent.tags.append(self) + +# def get_value(self): +# if len(self.values)==0: +# return None +# if len(self.values)==1: +# return self.values[0].value +# return [ x.value for x in self.values ] +# def set_value(self, values): +# if not isinstance(values, list): +# self.values = [ BQValue(values)] +# else: +# self.values = [ BQValue(v) for v in values ] +# +# value = property(get_value, set_value) +# +# def toetree(self, parent, baseuri): +# xmlkids = list(self.xmlkids) +# if len(self.values)<=1: +# n = create_element(self, parent, baseuri) +# else: +# n = create_element(self, parent, baseuri) +# del n.attrib['value'] +# xmlkids.append('values') +# for kid_name in xmlkids: +# for x in getattr(self, kid_name, None): +# toxmlnode (x, n, baseuri) +# return n + + + + +################################################################################ +# GObject +################################################################################ + +class BQVertex (BQNode): + '''gobject vertex''' + type = 'vertex' + xmltag = "vertex" + xmlfields = ['x', 'y', 'z', 't', 'c', 'index'] + + def __init__(self, **kw): + self.fromObj(**kw) + + def __repr__(self): + return 'vertex(x:%s,y:%s,z:%s,t:%s)'%(self.x, self.y, self.z, self.t) + + def set_parent(self, parent): + self.parent = parent + parent.vertices.append(self) + + def toTuple(self): + return (self.x, self.y, self.z, self.t) + + def fromTuple(self, v): + x,y,z,t = v + self.x=x; self.y=y; self.z=z; self.t=t + + def fromObj(self, **kw): + for k,v in kw.items(): + if k in self.xmlfields: + setattr(self,k,v) + +class BQGObject(BQResource): + '''Gobject resource: A grpahical annotation''' + type = 'gobject' + xmltag = "gobject" + xmlfields = ['name', 'value', 'type', 'uri'] + xmlkids = ['tags', 'gobjects', 'vertices'] + + def __init__(self, *args, **kw): + super(BQGObject, self).__init__(*args, **kw) + self.name = None + self.vertices = [] + self.type= self.type or self.xmltag + + def __str__(self): + return '(type: %s, name: %s, %s)'%(self.type, self.name, self.vertices) + + def set_parent(self, parent): + self.parent = parent + parent.gobjects.append(self) + + def verticesAsTuples(self): + return [v.toTuple() for v in self.vertices ] + + def perimeter(self): + return -1 + + def area(self): + return -1 + + +class BQPoint (BQGObject): + '''point gobject resource''' + xmltag = "point" + +class BQLabel (BQGObject): + '''label gobject resource''' + xmltag = "label" + +class BQPolyline (BQGObject): + '''polyline gobject resource''' + xmltag = "polyline" + def perimeter(self): + vx = self.verticesAsTuples() + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += math.sqrt( math.pow(x2-x1,2.0) + math.pow(y2-y1,2.0) ) + return d + + +# only does 2D version right now, polygon area is flawed if the edges are intersecting +# implement better algorithm based on triangles +class BQPolygon (BQGObject): + '''Polygon gobject resource''' + xmltag = "polygon" + # only does 2D version right now + def perimeter(self): + vx = self.verticesAsTuples() + vx.append(vx[0]) + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += math.sqrt( math.pow(x2-x1,2.0) + math.pow(y2-y1,2.0) ) + return d + + # only does 2D version right now + # area is flawed if the edges are intersecting implement better algorithm based on triangles + def area(self): + vx = self.verticesAsTuples() + vx.append(vx[0]) + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += x1*y2 - y1*x2 + return 0.5 * math.fabs(d) + +class BQCircle (BQGObject): + '''circle gobject resource''' + xmltag = "circle" + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return 2.0 * math.pi * max(math.fabs(x1-x2), math.fabs(y1-y2)) + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.pi * pow( max(math.fabs(x1-x2), math.fabs(y1-y2)), 2.0) + +class BQEllipse (BQGObject): + '''ellipse gobject resource''' + xmltag = "ellipse" + type = 'ellipse' + + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + x3,y3,z3,t3 = vx[2] + a = max(math.fabs(x1-x2), math.fabs(y1-y2)) + b = max(math.fabs(x1-x3), math.fabs(y1-y3)) + return math.pi * ( 3.0*(a+b) - math.sqrt( 10.0*a*b + 3.0*(a*a + b*b)) ) + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + x3,y3,z3,t3 = vx[2] + a = max(math.fabs(x1-x2), math.fabs(y1-y2)) + b = max(math.fabs(x1-x3), math.fabs(y1-y3)) + return math.pi * a * b + +class BQRectangle (BQGObject): + '''rectangle gobject resource''' + xmltag = "rectangle" + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.fabs(x1-x2)*2.0 + math.fabs(y1-y2)*2.0 + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.fabs(x1-x2) * math.fabs(y1-y2) + +class BQSquare (BQRectangle): + '''square gobject resource''' + xmltag = "square" + + +################################################################################ +# Advanced Objects +################################################################################ +class BQDataset(BQResource): + xmltag = "dataset" + #xmlfields = ['name', 'uri', 'ts'] + #xmlkids = ['kids', 'tags', 'gobjects'] + +class BQUser(BQResource): + xmltag = "user" + #xmlfields = ['name', 'uri', 'ts'] + #xmlkids = ['tags', 'gobjects'] + +class BQMex(BQResource): + xmltag = "mex" + #xmlfields = ['module', 'uri', 'ts', 'value'] + #xmlkids = ['tags', 'gobjects'] + +################################################################################ +# Factory +################################################################################ + +class BQFactory (object): + '''Factory for Bisque resources''' + resources = dict([ (x[1].xmltag, x[1]) for x in inspect.getmembers(sys.modules[__name__]) if inspect.isclass(x[1]) and hasattr(x[1], 'xmltag') ]) + + def __init__(self, session): + self.session = session + + @classmethod + def make(cls, xmltag, type_attr): + if xmltag == "gobject" and type_attr in gobject_primitives: + xmltag = type_attr + c = cls.resources.get(xmltag, BQResource) + return c() + + index_map = dict(vertex=('vertices',BQVertex), tag=('tags', BQTag)) + @classmethod + def index(cls, xmltag, parent, indx): + array, ctor = cls.index_map.get (xmltag, (None,None)) + if array: + objarr = getattr(parent, array) + objarr.extend ([ ctor() for x in range(((indx+1)-len(objarr)))]) + v = objarr[indx] + v.indx = indx + #log.debug ('fetching %s %s[%d]:%s' %(parent , array, indx, v)) + return v + + ################################################################################ + # Parsing + ################################################################################ + + def from_etree (self, xmlResource, resource=None, parent=None ): + """ Convert an etree to a python structure""" + stack = []; + resources = []; + # Initialize stack with a tuple of + # 1. The XML node being parsed + # 2. The current resource being filled outs + # 3. The parent resource if any + stack.append ( (xmlResource, resource, parent ) ) + while stack: + node, resource, parent = stack.pop(0); + xmltag = node.tag; + if resource is None: + type_ = node.get( 'type', '') + resource = self.make(xmltag, type_) + + resource.session = self.session + resource.initializeXml(node) + resources.append (resource) + if parent: + resource.set_parent(parent) + #resource.doc = parent.doc; + for k in node: + stack.append( (k, None, resource) ) + + resources[0].initialize() + resources[0].xmltree = xmlResource + return resources[0]; + def from_string (self, xmlstring): + et = etree.XML (xmlstring) + return self.from_etree(et) + + # Generation + + @classmethod + def to_etree(self, dbo, parent=None, baseuri='', view=''): + """Convert a BQObject to an etree object suitable for XML + generation + """ + node = toxmlnode(dbo, parent, baseuri, view) + return node; + @classmethod + def to_string (self, node): + if isinstance (node, BQNode): + node = self.to_etree(node) + return etree.tostring(node) + + @classmethod + def string2etree(self, xmlstring): + return etree.XML (xmlstring) + + +def create_element(dbo, parent, baseuri, **kw): + """Create an etree element from BQ object + """ + xtag = kw.pop('xtag', dbo.xmltag) + if not kw: + kw = model_fields (dbo, baseuri) + + if parent is not None: + node = etree.SubElement (parent, xtag, **kw) + else: + node = etree.Element (xtag, **kw) + return node + +def toxmlnode (dbo, parent, baseuri, view=None): + if hasattr(dbo, 'toetree'): + node = dbo.toetree(parent, baseuri) + else: + node = create_element (dbo, parent, baseuri) + for kid_name in dbo.xmlkids: + for x in getattr(dbo, kid_name, None): + toxmlnode (x, node, baseuri, view) + return node + + +def make_owner (dbo, fn, baseuri): + return ('owner', baseuri + str(dbo.owner)) + +def make_uri(dbo, fn, baseuri): + return ('uri', "%s%s" % (baseuri , str (dbo.uri))) + +def get_email (dbo, fn, baseuri): + return ('email', dbo.user.email_address) + +mapping_fields = { + 'mex' : None, + 'acl' : None, + # Auth + 'user_id' : get_email, + 'taggable_id': None, + 'permission': 'action', + 'resource': None, + } + +def model_fields(dbo, baseuri=None): + """Extract known fields from a BQ object, while removing any known + from C{excluded_fields} + + @rtype: dict + @return fields to be rendered in XML + """ + attrs = {} + try: + dbo_fields = dbo.xmlfields + except AttributeError: + # This occurs when the object is a fake DB objects + # The dictionary is sufficient + dbo_fields= dbo.__dict__ + for fn in dbo_fields: + fn = mapping_fields.get(fn, fn) + # Skip when map is None + if fn is None: + continue + # Map is callable, then call + if callable(fn): + fn, attr_val = fn(dbo, fn, baseuri) + else: + attr_val = getattr(dbo, fn, None) + + # Put value in attribute dictionary + if attr_val is not None and attr_val!='': + if isinstance(attr_val, basestring): + attrs[fn] = attr_val + else: + attrs[fn] = str(attr_val) #unicode(attr_val,'utf-8') + return attrs diff --git a/nph_5class/bqapi/bqfeature.py b/nph_5class/bqapi/bqfeature.py new file mode 100644 index 0000000..f526c1a --- /dev/null +++ b/nph_5class/bqapi/bqfeature.py @@ -0,0 +1,416 @@ + +#import threading +from threading import Thread +import socket +import errno +import tempfile +import urllib.request, urllib.parse, urllib.error +import os +from math import ceil +import queue +import logging +import warnings +from collections import namedtuple + +#import numpy as np + +from .exception import BQCommError + +try: #checks for lxml if not found uses python xml + from lxml import etree +except ImportError: + from xml.etree import ElementTree as etree + +log = logging.getLogger('bqapi.bqfeature') + +#requires pytables to run this portion of the api +try: + import tables +except ImportError: + warnings.warn("Pytables was not found! bqfeatures requires pytables!") + +#max requests attemps if the connection is drop when making parallel requests +MAX_ATTEMPTS = 5 + +FeatureResource = namedtuple('FeatureResource',['image','mask','gobject']) +FeatureResource.__new__.__defaults__ = (None, None, None) + +class FeatureError(Exception): + """ + Feature Communication Exception + """ + +class Feature(object): + + def fetch(self, session, name, resource_list, path=None): + """ + Requests the feature server to calculate features on provided resources. + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + @param: resource_list - list of the resources to extract. format: + [(image_url, mask_url, gobject_url),...] if a parameter is + not required just provided None + @param: path - the location were the hdf5 file is stored. If None is set the file will be placed in a tempfile and the pytables + file handle will be returned. (default: None) + + @return: returns either a pytables file handle or the file name when the path is provided + """ + url = '%s/features/%s/hdf'%(session.bisque_root,name) + + resource = etree.Element('resource') + for (image, mask, gobject) in resource_list: + sub = etree.SubElement(resource, 'feature') + query = [] + if image: query.append('image=%s' % urllib.parse.quote(image)) + if mask: query.append('mask=%s' % urllib.parse.quote(mask)) + if gobject: query.append('gobject=%s' % urllib.parse.quote(gobject)) + query = '&'.join(query) + sub.attrib['uri'] = '%s?%s'%(url,query) + + log.debug('Fetch Feature %s for %s resources'%(name, len(resource_list))) + + if path is None: + f = tempfile.NamedTemporaryFile(suffix='.h5', dir=tempfile.gettempdir(), delete=False) + f.close() + session.c.push(url, content=etree.tostring(resource), headers={'Content-Type':'text/xml', 'Accept':'application/x-bag'}, path=f.name) + return tables.open_file(f.name,'r') + log.debug('Returning feature response to %s' % path) + return session.c.push(url, content=etree.tostring(resource), headers={'Content-Type':'text/xml', 'Accept':'application/x-bag'}, path=path) + + + + def fetch_vector(self, session, name, resource_list): + """ + Requests the feature server to calculate features on provided resources. Designed more for + requests of very view features. + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + @param: resource_list - list of the resources to extract. format: + [(image_url, mask_url, gobject_url),...] if a parameter is + not required just provided None + + @return: a list of features as numpy array + + @exception: FeatureError - if any part of the request has an error the FeatureError will be raised on the + first error. + note: You can use fetch and read from the status table for the error. + warning: fetch_vector will not return response if an error occurs within the request + """ + hdf5 = self.fetch(session, name, resource_list) + status = hdf5.root.status + index = status.getWhereList('status>=400') + if index.size>0: #returns the first error that occurs + status = status[index[0]][0] + hdf5.close() + os.remove(hdf5.filename) #remove file from temp directory + raise FeatureError('%s:Error occured during feature calculations' % status) + table = hdf5.root.values + status_table = hdf5.root.status + feature_vector = table[:]['feature'] + hdf5.close() + os.remove(hdf5.filename) #remove file from temp directory + return feature_vector + + @staticmethod + def length(session, name): + """ + Returns the length of the feature + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + + @return: feature length + """ + xml = session.fetchxml('/features/%s'%name) + return int(xml.find('feature/tag[@name="feature_length"]').attrib.get('value')) + +class ParallelFeature(Feature): + + MaxThread = 8 + MaxChunk = 2000 + MinChunk = 25 + + def __init__(self): + super(ParallelFeature, self).__init__() + + + class BQRequestThread(Thread): + """ + Single Thread + """ + def __init__(self, request_queue, errorcb=None): + """ + @param: requests_queue - a queue of requests functions + @param: errorcb - a call back that is called if a BQCommError is raised + """ + self.request_queue = request_queue + + if errorcb is not None: + self.errorcb = errorcb + else: + + def error_callback(e): + """ + Default callback function + + @param: e - BQCommError object + """ + pass + + self.errorcb = error_callback + super(ParallelFeature.BQRequestThread, self).__init__() + + + def run(self): + while True: + if not self.request_queue.empty(): + request = self.request_queue.get() + try: + request() + except BQCommError as e: + self.errorcb(e) + else: + break + + + def request_thread_pool(self, request_queue, errorcb=None, thread_count = MaxThread): + """ + Runs the BQRequestThread + + @param: request_queue - a queue of request functions + @param: errorcb - is called back when a BQCommError is raised + """ + jobs = [] + log.debug('Starting Thread Pool') + for _ in range(thread_count): + r = self.BQRequestThread(request_queue, errorcb) + r.daemon = True + jobs.append(r) + r.start() + + for j in jobs: + j.join() + log.debug('Rejoining %s threads'%len(jobs)) + return + + + def set_thread_num(self, n): + """ + Overrides the internal thread parameters, chunk size must also + be set to override the request parameters + + @param: n - the number of requests made at once + """ + self.thread_num = n + + + def set_chunk_size(self, n): + """ + Overrides the chunk size, thread num must also + be set to override the request parameters + + @param: n - the size of each request + """ + self.chunk_size = n + + + def calculate_request_plan(self, l): + """ + Tries to figure out the best configuration + of concurrent requests and sizes of those + requests based on the size of the total request + and pre-set parameters + + @param: l - the list of requests + + @return: chunk_size - the amount of resources for request + @return: thread_num - the amount of concurrent requests + """ + if len(l)>self.MaxThread*self.MaxChunk: + return (self.MaxThread, self.MaxChunk) + else: + if len(l)/float(self.MaxThread)>=self.MinChunk: + return (self.MaxThread, ceil(self.MaxChunk/float(self.MaxThread))) + else: + t = ceil(len(l)/float(self.MinChunk)) + return (t, ceil(len(l)/float(t))) + + + def chunk(self, l, chunk_size): + """ + @param: l - list + @return: list of resource and sets the amount of parallel requests + """ + for i in range(0, len(l), chunk_size): + yield l[i:i+chunk_size] + + + def fetch(self, session, name, resource_list, path=None): + """ + Requests the feature server to calculate provided resources. + The request will be boken up according to the chunk size + and made in parallel depending on the amount of threads. + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + @param: resource_list - list of the resources to extract. format: [(image_url, mask_url, gobject_url),...] if a parameter is + not required just provided None + @param: path - the location were the hdf5 file is stored. If None is set the file will be placed in a tempfile and the pytables + file handle will be returned. (default: None) + + @return: returns either a pytables file handle or the file name when the path is provided + """ + if len(resource_list) < 1: + log.warning('Warning no resources were provided') + return + + log.debug('Exctracting %s on %s resources'%(name,len(resource_list))) + + if path is None: + f = tempfile.TemporaryFile(suffix='.h5', dir=tempfile.gettempdir()) + f.close() + table_path = f.name + else: + table_path = path + + stop_write_thread = False #sets a flag to stop the write thread + # when the requests threads have finished + + class WriteHDF5Thread(Thread): + """ + Copies small hdf5 feature tables + into one large hdf5 feature table + """ + + def __init__(self, h5_filename_queue): + """ + param h5_filename_queue: a queue of temporary hdf5 files + """ + self.h5_filename_queue = h5_filename_queue + tables.open_file(table_path, 'w').close() #creates a new file + super(WriteHDF5Thread, self).__init__() + + def run(self): + """ + While queue is not empty and stop_write_thread + has not been set to true, the thread will open + temporary hdf5 tables and copy them into the + main hdf5 table and then delete the temporary file. + """ + while True: + if not self.h5_filename_queue.empty(): + temp_path = self.h5_filename_queue.get() + log.debug('Writing %s to %s' % (temp_path, table_path)) + try: + with tables.open_file(temp_path, 'a') as hdf5temp: + with tables.open_file(table_path, 'a') as hdf5: + temp_table = hdf5temp.root.values + temp_status_table = hdf5temp.root.status + if not hasattr(hdf5.root, 'values'): + temp_table.copy(hdf5.root,'values') + temp_status_table.copy(hdf5.root,'status') + else: + table = hdf5.root.values + status_table = hdf5.root.status + table.append(temp_table[:]) + status_table.append(temp_status_table[:]) + table.flush() + status_table.flush() + except Exception as e: + log.exception('Could not read hdf5 file') + finally: + log.debug('Clean up: removing %s' % temp_path) + if os.path.exists(temp_path): + os.remove(temp_path) + + if stop_write_thread is True: + log.debug('Ending HDF5 write thread') + break + + write_queue = queue.Queue() + request_queue = queue.Queue() + + def request_factory(partial_resource_list): + def request(): + f = tempfile.NamedTemporaryFile(suffix='.h5', dir=tempfile.gettempdir(), delete=False) + f.close() + attempts = 0 + while True: + try: + path = super(ParallelFeature, self).fetch(session, name, partial_resource_list, path=f.name) + except socket.error as e: #if connection fails + if attempts>MAX_ATTEMPTS: + log.debug('Connection fail: Reached max attempts') + break + if e.errno == errno.WSAECONNRESET: #pylint: disable=no-member + attempts+=1 + log.debug('Connection fail: Attempting to reconnect (try: %s)' % attempts) + try: + tables.is_pytables_file(path) + except tables.HDF5ExtError: #if fail gets corrupts during download + if attempts>MAX_ATTEMPTS: + log.debug('Failed to open hdf5 file: Reached max attempts') + break + attempts+=1 + log.debug('HDF5 file may be corrupted: Attempted to redownload (try: %s)' % attempts) + if os.path.exists(path): + os.remove(path) + + write_queue.put(path) + break + + return request + + if hasattr(self,'thread_num') and hasattr(self,'chunk_size'): + thread_num = ceil(self.thread_num) + chunk_size = ceil(self.chunk_size) + + if thread_num <= 0: thread_num = 1 + if chunk_size <= 0: chunk_size = 1 + + else: + thread_num, chunk_size = self.calculate_request_plan(resource_list) + + for partial_resource_list in self.chunk(resource_list, int(chunk_size)): + request_queue.put(request_factory(partial_resource_list)) + + w = WriteHDF5Thread(write_queue) + log.debug('Starting HDF5 write thread') + w.daemon = True + w.start() + + self.request_thread_pool(request_queue, errorcb=self.errorcb, thread_count=int(thread_num)) + stop_write_thread = True + w.join() + + log.debug('Returning parallel feature response to %s' % table_path) + + if path is None: + return tables.open_file(table_path, 'r') + else: + return table_path + + def errorcb(self, e): + """ + Returns an error log + """ + log.warning('%s'%str(e)) + + + def fetch_vector(self, session, name, resource_list): + """ + Requests the feature server to calculate provided resources. + The request will be boken up according to the chunk size + and made in parallel depending on the amount of threads. + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + @param: resource_list - list of the resources to extract. format: + [(image_url, mask_url, gobject_url),...] if a parameter is + not required just provided None + + @return: a list of features as numpy array + """ + return super(ParallelFeature, self).fetch_vector(session, name, resource_list) diff --git a/nph_5class/bqapi/bqfeature.py.bak b/nph_5class/bqapi/bqfeature.py.bak new file mode 100644 index 0000000..233054e --- /dev/null +++ b/nph_5class/bqapi/bqfeature.py.bak @@ -0,0 +1,416 @@ + +#import threading +from threading import Thread +import socket +import errno +import tempfile +import urllib +import os +from math import ceil +import Queue +import logging +import warnings +from collections import namedtuple + +#import numpy as np + +from .exception import BQCommError + +try: #checks for lxml if not found uses python xml + from lxml import etree +except ImportError: + from xml.etree import ElementTree as etree + +log = logging.getLogger('bqapi.bqfeature') + +#requires pytables to run this portion of the api +try: + import tables +except ImportError: + warnings.warn("Pytables was not found! bqfeatures requires pytables!") + +#max requests attemps if the connection is drop when making parallel requests +MAX_ATTEMPTS = 5 + +FeatureResource = namedtuple('FeatureResource',['image','mask','gobject']) +FeatureResource.__new__.__defaults__ = (None, None, None) + +class FeatureError(Exception): + """ + Feature Communication Exception + """ + +class Feature(object): + + def fetch(self, session, name, resource_list, path=None): + """ + Requests the feature server to calculate features on provided resources. + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + @param: resource_list - list of the resources to extract. format: + [(image_url, mask_url, gobject_url),...] if a parameter is + not required just provided None + @param: path - the location were the hdf5 file is stored. If None is set the file will be placed in a tempfile and the pytables + file handle will be returned. (default: None) + + @return: returns either a pytables file handle or the file name when the path is provided + """ + url = '%s/features/%s/hdf'%(session.bisque_root,name) + + resource = etree.Element('resource') + for (image, mask, gobject) in resource_list: + sub = etree.SubElement(resource, 'feature') + query = [] + if image: query.append('image=%s' % urllib.quote(image)) + if mask: query.append('mask=%s' % urllib.quote(mask)) + if gobject: query.append('gobject=%s' % urllib.quote(gobject)) + query = '&'.join(query) + sub.attrib['uri'] = '%s?%s'%(url,query) + + log.debug('Fetch Feature %s for %s resources'%(name, len(resource_list))) + + if path is None: + f = tempfile.NamedTemporaryFile(suffix='.h5', dir=tempfile.gettempdir(), delete=False) + f.close() + session.c.push(url, content=etree.tostring(resource), headers={'Content-Type':'text/xml', 'Accept':'application/x-bag'}, path=f.name) + return tables.open_file(f.name,'r') + log.debug('Returning feature response to %s' % path) + return session.c.push(url, content=etree.tostring(resource), headers={'Content-Type':'text/xml', 'Accept':'application/x-bag'}, path=path) + + + + def fetch_vector(self, session, name, resource_list): + """ + Requests the feature server to calculate features on provided resources. Designed more for + requests of very view features. + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + @param: resource_list - list of the resources to extract. format: + [(image_url, mask_url, gobject_url),...] if a parameter is + not required just provided None + + @return: a list of features as numpy array + + @exception: FeatureError - if any part of the request has an error the FeatureError will be raised on the + first error. + note: You can use fetch and read from the status table for the error. + warning: fetch_vector will not return response if an error occurs within the request + """ + hdf5 = self.fetch(session, name, resource_list) + status = hdf5.root.status + index = status.getWhereList('status>=400') + if index.size>0: #returns the first error that occurs + status = status[index[0]][0] + hdf5.close() + os.remove(hdf5.filename) #remove file from temp directory + raise FeatureError('%s:Error occured during feature calculations' % status) + table = hdf5.root.values + status_table = hdf5.root.status + feature_vector = table[:]['feature'] + hdf5.close() + os.remove(hdf5.filename) #remove file from temp directory + return feature_vector + + @staticmethod + def length(session, name): + """ + Returns the length of the feature + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + + @return: feature length + """ + xml = session.fetchxml('/features/%s'%name) + return int(xml.find('feature/tag[@name="feature_length"]').attrib.get('value')) + +class ParallelFeature(Feature): + + MaxThread = 8 + MaxChunk = 2000 + MinChunk = 25 + + def __init__(self): + super(ParallelFeature, self).__init__() + + + class BQRequestThread(Thread): + """ + Single Thread + """ + def __init__(self, request_queue, errorcb=None): + """ + @param: requests_queue - a queue of requests functions + @param: errorcb - a call back that is called if a BQCommError is raised + """ + self.request_queue = request_queue + + if errorcb is not None: + self.errorcb = errorcb + else: + + def error_callback(e): + """ + Default callback function + + @param: e - BQCommError object + """ + pass + + self.errorcb = error_callback + super(ParallelFeature.BQRequestThread, self).__init__() + + + def run(self): + while True: + if not self.request_queue.empty(): + request = self.request_queue.get() + try: + request() + except BQCommError as e: + self.errorcb(e) + else: + break + + + def request_thread_pool(self, request_queue, errorcb=None, thread_count = MaxThread): + """ + Runs the BQRequestThread + + @param: request_queue - a queue of request functions + @param: errorcb - is called back when a BQCommError is raised + """ + jobs = [] + log.debug('Starting Thread Pool') + for _ in range(thread_count): + r = self.BQRequestThread(request_queue, errorcb) + r.daemon = True + jobs.append(r) + r.start() + + for j in jobs: + j.join() + log.debug('Rejoining %s threads'%len(jobs)) + return + + + def set_thread_num(self, n): + """ + Overrides the internal thread parameters, chunk size must also + be set to override the request parameters + + @param: n - the number of requests made at once + """ + self.thread_num = n + + + def set_chunk_size(self, n): + """ + Overrides the chunk size, thread num must also + be set to override the request parameters + + @param: n - the size of each request + """ + self.chunk_size = n + + + def calculate_request_plan(self, l): + """ + Tries to figure out the best configuration + of concurrent requests and sizes of those + requests based on the size of the total request + and pre-set parameters + + @param: l - the list of requests + + @return: chunk_size - the amount of resources for request + @return: thread_num - the amount of concurrent requests + """ + if len(l)>self.MaxThread*self.MaxChunk: + return (self.MaxThread, self.MaxChunk) + else: + if len(l)/float(self.MaxThread)>=self.MinChunk: + return (self.MaxThread, ceil(self.MaxChunk/float(self.MaxThread))) + else: + t = ceil(len(l)/float(self.MinChunk)) + return (t, ceil(len(l)/float(t))) + + + def chunk(self, l, chunk_size): + """ + @param: l - list + @return: list of resource and sets the amount of parallel requests + """ + for i in xrange(0, len(l), chunk_size): + yield l[i:i+chunk_size] + + + def fetch(self, session, name, resource_list, path=None): + """ + Requests the feature server to calculate provided resources. + The request will be boken up according to the chunk size + and made in parallel depending on the amount of threads. + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + @param: resource_list - list of the resources to extract. format: [(image_url, mask_url, gobject_url),...] if a parameter is + not required just provided None + @param: path - the location were the hdf5 file is stored. If None is set the file will be placed in a tempfile and the pytables + file handle will be returned. (default: None) + + @return: returns either a pytables file handle or the file name when the path is provided + """ + if len(resource_list) < 1: + log.warning('Warning no resources were provided') + return + + log.debug('Exctracting %s on %s resources'%(name,len(resource_list))) + + if path is None: + f = tempfile.TemporaryFile(suffix='.h5', dir=tempfile.gettempdir()) + f.close() + table_path = f.name + else: + table_path = path + + stop_write_thread = False #sets a flag to stop the write thread + # when the requests threads have finished + + class WriteHDF5Thread(Thread): + """ + Copies small hdf5 feature tables + into one large hdf5 feature table + """ + + def __init__(self, h5_filename_queue): + """ + param h5_filename_queue: a queue of temporary hdf5 files + """ + self.h5_filename_queue = h5_filename_queue + tables.open_file(table_path, 'w').close() #creates a new file + super(WriteHDF5Thread, self).__init__() + + def run(self): + """ + While queue is not empty and stop_write_thread + has not been set to true, the thread will open + temporary hdf5 tables and copy them into the + main hdf5 table and then delete the temporary file. + """ + while True: + if not self.h5_filename_queue.empty(): + temp_path = self.h5_filename_queue.get() + log.debug('Writing %s to %s' % (temp_path, table_path)) + try: + with tables.open_file(temp_path, 'a') as hdf5temp: + with tables.open_file(table_path, 'a') as hdf5: + temp_table = hdf5temp.root.values + temp_status_table = hdf5temp.root.status + if not hasattr(hdf5.root, 'values'): + temp_table.copy(hdf5.root,'values') + temp_status_table.copy(hdf5.root,'status') + else: + table = hdf5.root.values + status_table = hdf5.root.status + table.append(temp_table[:]) + status_table.append(temp_status_table[:]) + table.flush() + status_table.flush() + except StandardError as e: + log.exception('Could not read hdf5 file') + finally: + log.debug('Clean up: removing %s' % temp_path) + if os.path.exists(temp_path): + os.remove(temp_path) + + if stop_write_thread is True: + log.debug('Ending HDF5 write thread') + break + + write_queue = Queue.Queue() + request_queue = Queue.Queue() + + def request_factory(partial_resource_list): + def request(): + f = tempfile.NamedTemporaryFile(suffix='.h5', dir=tempfile.gettempdir(), delete=False) + f.close() + attempts = 0 + while True: + try: + path = super(ParallelFeature, self).fetch(session, name, partial_resource_list, path=f.name) + except socket.error as e: #if connection fails + if attempts>MAX_ATTEMPTS: + log.debug('Connection fail: Reached max attempts') + break + if e.errno == errno.WSAECONNRESET: #pylint: disable=no-member + attempts+=1 + log.debug('Connection fail: Attempting to reconnect (try: %s)' % attempts) + try: + tables.is_pytables_file(path) + except tables.HDF5ExtError: #if fail gets corrupts during download + if attempts>MAX_ATTEMPTS: + log.debug('Failed to open hdf5 file: Reached max attempts') + break + attempts+=1 + log.debug('HDF5 file may be corrupted: Attempted to redownload (try: %s)' % attempts) + if os.path.exists(path): + os.remove(path) + + write_queue.put(path) + break + + return request + + if hasattr(self,'thread_num') and hasattr(self,'chunk_size'): + thread_num = ceil(self.thread_num) + chunk_size = ceil(self.chunk_size) + + if thread_num <= 0: thread_num = 1 + if chunk_size <= 0: chunk_size = 1 + + else: + thread_num, chunk_size = self.calculate_request_plan(resource_list) + + for partial_resource_list in self.chunk(resource_list, int(chunk_size)): + request_queue.put(request_factory(partial_resource_list)) + + w = WriteHDF5Thread(write_queue) + log.debug('Starting HDF5 write thread') + w.daemon = True + w.start() + + self.request_thread_pool(request_queue, errorcb=self.errorcb, thread_count=int(thread_num)) + stop_write_thread = True + w.join() + + log.debug('Returning parallel feature response to %s' % table_path) + + if path is None: + return tables.open_file(table_path, 'r') + else: + return table_path + + def errorcb(self, e): + """ + Returns an error log + """ + log.warning('%s'%str(e)) + + + def fetch_vector(self, session, name, resource_list): + """ + Requests the feature server to calculate provided resources. + The request will be boken up according to the chunk size + and made in parallel depending on the amount of threads. + + @param: session - the local session + @param: name - the name of the feature one wishes to extract + @param: resource_list - list of the resources to extract. format: + [(image_url, mask_url, gobject_url),...] if a parameter is + not required just provided None + + @return: a list of features as numpy array + """ + return super(ParallelFeature, self).fetch_vector(session, name, resource_list) diff --git a/nph_5class/bqapi/bqnode.py b/nph_5class/bqapi/bqnode.py new file mode 100644 index 0000000..8689363 --- /dev/null +++ b/nph_5class/bqapi/bqnode.py @@ -0,0 +1,738 @@ +############################################################################### +## Bisquik ## +## Center for Bio-Image Informatics ## +## University of California at Santa Barbara ## +## ------------------------------------------------------------------------- ## +## ## +## Copyright (c) 2007 by the Regents of the University of California ## +## All rights reserved ## +## ## +## Redistribution and use in source and binary forms, with or without ## +## modification, are permitted provided that the following conditions are ## +## met: ## +## ## +## 1. Redistributions of source code must retain the above copyright ## +## notice, this list of conditions, and the following disclaimer. ## +## ## +## 2. Redistributions in binary form must reproduce the above copyright ## +## notice, this list of conditions, and the following disclaimer in ## +## the documentation and/or other materials provided with the ## +## distribution. ## +## ## +## 3. All advertising materials mentioning features or use of this ## +## software must display the following acknowledgement: This product ## +## includes software developed by the Center for Bio-Image Informatics## +## University of California at Santa Barbara, and its contributors. ## +## ## +## 4. Neither the name of the University nor the names of its ## +## contributors may be used to endorse or promote products derived ## +## from this software without specific prior written permission. ## +## ## +## THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS" AND ANY ## +## EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ## +## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE ## +## DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ## +## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ## +## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ## +## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ## +## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ## +## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ## +## ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ## +## POSSIBILITY OF SUCH DAMAGE. ## +## ## +############################################################################### +""" +BQ API - a set of classes that represent Bisque objects + +""" + +__module__ = "bqnode.py" +__author__ = "Dmitry Fedorov and Kris Kvilekval" +__version__ = "0.1" +__revision__ = "$Rev$" +__date__ = "$Date$" +__copyright__ = "Center for BioImage Informatics, University California, Santa Barbara" + +import sys +import math +import inspect +import logging +from urllib.parse import quote +from lxml import etree +import collections + +log = logging.getLogger('bqapi.bqnode') + +__all__ = [ 'BQFactory', 'BQNode', 'BQImage', 'BQResource', 'BQValue', 'BQTag', 'BQVertex', 'BQGObject', + "BQDataset", "BQUser", "BQMex", + 'gobject_primitives', + 'BQPoint', 'BQLabel', 'BQPolyline', 'BQPolygon', 'BQCircle', 'BQEllipse', 'BQRectangle', 'BQSquare'] + +gobject_primitives = set(['point', 'label', 'polyline', 'polygon', 'circle', 'ellipse', 'rectangle', 'square']) + + +################################################################################ +# Base class for bisque resources +################################################################################ + +class BQNode (etree.ElementBase): + '''Base class for parsing Bisque XML''' + TAG = xmltag = 'NODE' + xmlfields = [] + xmlkids = [] + + + def __getattr__(self, name): + if name in self.xmlfields: + return self.get (name) + ###return etree.ElementBase.__getattr__(self, name) + #super(etree.ElementBase, self).__getattr__(name) + + def __setattr__(self, name, val): + if name in self.xmlfields: + return self.set (name, val) + print("SETTER") + object.__setattr__(self, name, val) + + def initialize(self): + 'used for class post parsing initialization' + pass + + def initializeXml(self, xmlnode): + for x in self.xmlfields: + setattr(self, x, xmlnode.get (x, None)) + + def set_parent(self, parent): + pass + + def __repr__(self): + return '(%s#%s)' % (self.TAG, len(self)) + + def __str__(self): + return etree.tostring(self) + # return '(%s:%s:%s)'%(self.TAG,','.join (['%s=%s' % (f, getattr(self,f,'')) for f in self.attrib]), [ str (q) for q in self ] ) + + def toTuple (self): + return tuple( [ x for x in self.attr ] ) + + + + +################################################################################ +# Base class for bisque resources +################################################################################ + +class BQResource (BQNode): + '''Base class for Bisque resources''' + TAG = xmltag = 'resource' + xmlfields = ['name', 'values', 'type', 'uri', 'ts', 'resource_uniq'] + xmlkids = ['kids', 'tags', 'gobjects'] + + #def __repr__(self): + # return '(%s:%s)'%(self.xmltag, self.uri) + + + def get_tags(self): + return [ x for x in self.iter ('tag') ] + tags = property(get_tags) + def get_gobjects(self): + return [ x for x in self.iter ('gobject') ] + gobjects = property(get_gobjects) + def get_kids(self): + return [ x for x in self.iter () ] + kids = property(get_kids) + + #def __init__(self): + # self.tags = [] + # self.gobjects = [] + # self.kids = [] + + def toDict (self): + objs = {} + objs.update ( [ (f.name, f) for f in self.tags if f.name ] ) + objs.update ( [ (f.name, f) for f in self.gobjects if f.name ] ) + return objs + + def set_parent(self, parent): + parent.kids.append(self) + + def add_tag(self, name=None, value=None, type=None, tag=None): + if tag is None: + args = dict ( name=name, value=value, type=type) + tag = BQTag( **dict ((k, v) for k,v in list(args.items()) if v is not None) ) + self.append(tag) + return tag + addTag = add_tag + + def add_gob(self, name=None, value=None, type=None, gob=None): + if gob is None: + args = dict ( name=name, value=value, type=type) + gob = BQGObject( **dict ((k, v) for k,v in list(args.items()) if v is not None) ) + self.append(gob) + return gob + addGObject = add_gob + + + # def findall (pathexpress) + # Implemented by + # def tag(self, name): + # results = [] + # for tg in self.tags: + # if tg.name == name: + # results.append(tg) + # if len(results) == 0: + # return None + # elif len(results) == 1: + # return results[0] + # else: + # return results + + # def gob(self, name): + # results = [] + # for tg in self.gobjects: + # if tg.name == name: + # results.append(tg) + # if len(results) == 0: + # return None + # elif len(results) == 1: + # return results[0] + # else: + # return results + + + def set_parent(self, parent): + parent.tags.append(self) + + def get_values(self): + if 'value' in self.attrib: + return self.attrib['value'] + result = [str(v.text) for v in self.iter ('value')] + return result + + def set_values(self, values): + """Assign a list of values + Maybe single value list or a list of (value, type) tuples where type is object, integer, float, number, string + """ + if not isinstance(values, list): + self.set('value', str(values)) + else: + if 'value' in self.attrib: + del self.attrib['value'] + for child in self: + self.remove (child) + for v in values: + if isinstance (v, tuple): + val = etree.SubElement (self, 'value', type=v[1]) + v = v[0] + else: + val = etree.SubElement (self, 'value') + val.text = str(v) + + value = property(get_values, set_values) + + def toetree(self, parent, baseuri): + xmlkids = list(self.xmlkids) + if len(self.values)<=1: + n = create_element(self, parent, baseuri) + else: + n = create_element(self, parent, baseuri) + del n.attrib['value'] + xmlkids.append('values') + for kid_name in xmlkids: + for x in getattr(self, kid_name, None): + toxmlnode (x, n, baseuri) + return n + + + + +################################################################################ +# Image +################################################################################ + +class BQImage(BQResource): + TAG = xmltag = "image" + xmlfields = ['name', 'uri', 'ts' , "value", 'resource_uniq' ] # "x", "y","z", "t", "ch" ] + xmlkids = ['tags', 'gobjects'] + + #def __init__(self): + # super(BQImage, self).__init__() + # self._geometry = None + + def geometry(self): + 'return x,y,z,t,ch of image' + if self._geometry is None: + info = self.pixels().meta().fetch() + info = etree.XML(info) + geom = [] + for n in 'xyztc': + tn = info.xpath('//tag[@name="image_num_%s"]' % n) + geom.append(tn[0].get('value')) + self._geometry = tuple(map(int, geom)) + return self._geometry + + def pixels(self): + return BQImagePixels(self) + +class BQImagePixels(object): + """manage requests to the image pixels""" + def __init__(self, image): + self.image = image + self.ops = [] + + def _construct_url(self): + """build the final url based on the operation + """ + image_service = self.image.session.service('image_service') + return image_service.construct (path = '%s?%s'%(self.image.resource_uniq, + '&'.join ( "%s=%s" % tp for tp in self.ops ))) + + def fetch(self, path=None): + """resolve the current and fetch the pixel + """ + url = self._construct_url() + session = self.image.session + return session.c.fetch (url, path=path) + + def command(self, operation, arguments=''): + self.ops.append((operation, arguments)) + return self + + def slice(self, x='', y='',z='',t=''): + """Slice the current image""" + return self.command('slice', '%s,%s,%s,%s' % (x,y,z,t)) + + def format(self, fmt): + return self.command('format', fmt) + + def resize(self, w='',h='', interpolation='NN'): + """ interpoaltion may be,[ NN|,BL|,BC][,AR] + """ + return self.command('resize', '%s,%s,%s' % (w,h,interpolation)) + + def localpath(self): + return self.command('localpath') + + def meta(self): + return self.command('meta') + + def info(self): + return self.command('info') + + def asarray(self): + try: + import tifffile + except ImportError: + log.error ("Please install Tifffile (Optional)") + return None + # Force format to be tiff by removing any format and append format tiff + self.ops = [ tp for tp in self.ops if tp[0] != 'format' ] + self.format ('tiff') + url = self._construct_url() + image_service = self.image.session.service ('image_service') + with image_service.fetch (url, stream=True) as response: + #response.raw.decode_content = True + return tifffile.imread (io.BytesIO (response.content)) + + + +################################################################################ +# Tag +################################################################################ + +class BQValue (BQNode): + '''tag value''' + TAG = xmltag = "value" + xmlfields = ['value', 'type', 'index'] + + #def __init__(self, value=None, type=None, index=None): + # self.value = value + # self.type = type + # self.index = index + + def set_parent(self, parent): + if self.index is not None: + parent.values.extend([None for x in range((self.index+1)-len(parent.values))]) + parent.values[self.index] = self + else: + parent.values.append(self) + + def initializeXml(self, xmlnode): + super(BQValue, self).initializeXml(xmlnode) + try: + self.index = int(self.index) + except Exception: + self.index = None + self.value = xmlnode.text + + def toetree(self, parent, baseuri): + n = etree.SubElement(parent, 'value', ) + if self.type is not None: n.set('type', str(self.type)) + if self.index is not None: n.set('index', str(self.index)) + if self.value is not None: n.text = str(self.value) + return n + + #def __call__(self): + # if len(self.values<=0): return '' + # elif len(self.values==1): return str(self.values[0]) + # def str_join(x,y): return '%s,%s'%(x,y) + # return reduce(str_join, self.values) + +class BQTag (BQResource): + '''tag resource''' + TAG = xmltag = "tag" + xmlfields = ['name', 'type', 'uri', 'ts', 'value'] + xmlkids = ['tags', 'gobjects', ] # handle values specially + + #def __init__(self, name='', value=None, type=None): + # super(BQTag, self).__init__() + # self.name = name + # self.values = (value and [BQValue(value)]) or [] + # if type is not None: + # self.type=type + + + + +################################################################################ +# GObject +################################################################################ + +class BQVertex (BQNode): + '''gobject vertex''' + type = 'vertex' + TAG = xmltag = "vertex" + xmlfields = ['x', 'y', 'z', 't', 'c', 'index'] + + #def __init__(self, **kw): + # self.fromObj(**kw) + + def __repr__(self): + return 'vertex(x:%s,y:%s,z:%s,t:%s)'%(self.x, self.y, self.z, self.t) + + def set_parent(self, parent): + parent.vertices.append(self) + + def toTuple(self): + return (self.x, self.y, self.z, self.t) + + def fromTuple(self, v): + x,y,z,t = v + self.x=x; self.y=y; self.z=z; self.t=t + + def fromObj(self, **kw): + for k,v in list(kw.items()): + if k in self.xmlfields: + setattr(self,k,v) + +class BQGObject(BQResource): + '''Gobject resource: A grpahical annotation''' + type = 'gobject' + TAG = xmltag = "gobject" + xmlfields = ['name', 'type', 'uri'] + xmlkids = ['tags', 'gobjects', 'vertices'] + + #def __init__(self, name=None, type=None): + # super(BQGObject, self).__init__() + # self.vertices = [] + # self.name=name + # self.type= type or self.xmltag + + def __str__(self): + return '(type: %s, name: %s, %s)'%(self.type, self.name, self.vertices) + + def set_parent(self, parent): + parent.gobjects.append(self) + + def verticesAsTuples(self): + return [v.toTuple() for v in self.vertices ] + + def perimeter(self): + return -1 + + def area(self): + return -1 + + +class BQPoint (BQGObject): + '''point gobject resource''' + TAG = xmltag = "point" + +class BQLabel (BQGObject): + '''label gobject resource''' + TAG = xmltag = "label" + +class BQPolyline (BQGObject): + '''polyline gobject resource''' + TAG = xmltag = "polyline" + def perimeter(self): + vx = self.verticesAsTuples() + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += math.sqrt( math.pow(x2-x1,2.0) + math.pow(y2-y1,2.0) ) + return d + + +# only does 2D version right now, polygon area is flawed if the edges are intersecting +# implement better algorithm based on triangles +class BQPolygon (BQGObject): + '''Polygon gobject resource''' + TAG = xmltag = "polygon" + # only does 2D version right now + def perimeter(self): + vx = self.verticesAsTuples() + vx.append(vx[0]) + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += math.sqrt( math.pow(x2-x1,2.0) + math.pow(y2-y1,2.0) ) + return d + + # only does 2D version right now + # area is flawed if the edges are intersecting implement better algorithm based on triangles + def area(self): + vx = self.verticesAsTuples() + vx.append(vx[0]) + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += x1*y2 - y1*x2 + return 0.5 * math.fabs(d) + +class BQCircle (BQGObject): + '''circle gobject resource''' + TAG = xmltag = "circle" + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return 2.0 * math.pi * max(math.fabs(x1-x2), math.fabs(y1-y2)) + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.pi * pow( max(math.fabs(x1-x2), math.fabs(y1-y2)), 2.0) + +class BQEllipse (BQGObject): + '''ellipse gobject resource''' + TAG = xmltag = "ellipse" + type = 'ellipse' + + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + x3,y3,z3,t3 = vx[2] + a = max(math.fabs(x1-x2), math.fabs(y1-y2)) + b = max(math.fabs(x1-x3), math.fabs(y1-y3)) + return math.pi * ( 3.0*(a+b) - math.sqrt( 10.0*a*b + 3.0*(a*a + b*b)) ) + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + x3,y3,z3,t3 = vx[2] + a = max(math.fabs(x1-x2), math.fabs(y1-y2)) + b = max(math.fabs(x1-x3), math.fabs(y1-y3)) + return math.pi * a * b + +class BQRectangle (BQGObject): + '''rectangle gobject resource''' + TAG = xmltag = "rectangle" + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.fabs(x1-x2)*2.0 + math.fabs(y1-y2)*2.0 + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.fabs(x1-x2) * math.fabs(y1-y2) + +class BQSquare (BQRectangle): + '''square gobject resource''' + TAG = xmltag = "square" + + +################################################################################ +# Advanced Objects +################################################################################ +class BQDataset(BQResource): + TAG = xmltag = "dataset" + #xmlfields = ['name', 'uri', 'ts'] + #xmlkids = ['kids', 'tags', 'gobjects'] + + + + + + +class BQUser(BQResource): + TAG = xmltag = "user" + #xmlfields = ['name', 'uri', 'ts'] + #xmlkids = ['tags', 'gobjects'] + +class BQMex(BQResource): + TAG = xmltag = "mex" + #xmlfields = ['module', 'uri', 'ts', 'value'] + #xmlkids = ['tags', 'gobjects'] + + + +################################################################################ +# Factory +################################################################################ + +class BQFactory (etree.PythonElementClassLookup): + '''Factory for Bisque resources''' + resources = dict([ (x[1].xmltag, x[1]) for x in inspect.getmembers(sys.modules[__name__]) if inspect.isclass(x[1]) and hasattr(x[1], 'xmltag') ]) + + + def __init__(self, session): + self.session = session + self.parser = etree.XMLParser() + self.parser.set_element_class_lookup(self) + + def lookup (self, document, element): + return self.find(element.tag, element.get ('type', '')) + + @classmethod + def find(cls, xmltag, type_attr): + if xmltag == "gobject" and type_attr in gobject_primitives: + xmltag = type_attr + c = cls.resources.get(xmltag, BQResource) + return c + + @classmethod + def make(cls, xmltag, type_attr): + c = cls.find (xmltag, type_attr) + return c() + + index_map = dict(vertex=('vertices',BQVertex), tag=('tags', BQTag)) + @classmethod + def index(cls, xmltag, parent, indx): + array, ctor = cls.index_map.get (xmltag, (None,None)) + if array: + objarr = getattr(parent, array) + objarr.extend ([ ctor() for x in range(((indx+1)-len(objarr)))]) + v = objarr[indx] + v.indx = indx; + #log.debug ('fetching %s %s[%d]:%s' %(parent , array, indx, v)) + return v + + # Parsing + def from_etree(self, node): + """ Convert an etree to a python structure""" + return node + + def from_string(self, xmlstring): + return etree.XML(xmlstring, self.parser) + + # Generation + @classmethod + def to_string (self, nodes): + return etree.tostring (nodes) + + @classmethod + def to_etree(self, bqnode): + """Convert BQNode to elementTree""" + return bqnode + + def string2etree(self, xmlstring): + return etree.XML (xmlstring, self.parser) + + + +################################################################################ +# Generation +################################################################################ + +def toXml(dbo, parent=None, baseuri='', view=''): + """Convert a BQObject to an etree object suitable for XML + generation + """ + node = dbo + return node; + + +def create_element(dbo, parent, baseuri, **kw): + """Create an etree element from BQ object + """ + xtag = kw.pop('xtag', dbo.xmltag) + if not kw: + kw = model_fields (dbo, baseuri) + if parent is not None: + node = etree.SubElement (parent, xtag, **kw) + else: + node = etree.Element (xtag, **kw) + return node + +def toxmlnode (dbo, parent, baseuri, view=None): + if hasattr(dbo, 'toetree'): + node = dbo.toetree(parent, baseuri) + else: + node = create_element (dbo, parent, baseuri) + for kid_name in dbo.xmlkids: + for x in getattr(dbo, kid_name, None): + toxmlnode (x, node, baseuri, view) + return node + + +def make_owner (dbo, fn, baseuri): + return ('owner', baseuri + str(dbo.owner)) + +def make_uri(dbo, fn, baseuri): + return ('uri', "%s%s" % (baseuri , str (dbo.uri))) + +def get_email (dbo, fn, baseuri): + return ('email', dbo.user.email_address) + +mapping_fields = { + 'mex' : None, + 'acl' : None, + # Auth + 'user_id' : get_email, + 'taggable_id': None, + 'permission': 'action', + 'resource': None, + } + +def model_fields(dbo, baseuri=None): + """Extract known fields from a BQ object, while removing any known + from C{excluded_fields} + + @rtype: dict + @return fields to be rendered in XML + """ + attrs = {} + try: + dbo_fields = dbo.xmlfields + except AttributeError: + # This occurs when the object is a fake DB objects + # The dictionary is sufficient + dbo_fields= dbo.__dict__ + for fn in dbo_fields: + fn = mapping_fields.get(fn, fn) + # Skip when map is None + if fn is None: + continue + # Map is callable, then call + if isinstance(fn, collections.Callable): + fn, attr_val = fn(dbo, fn, baseuri) + else: + attr_val = getattr(dbo, fn, None) + + # Put value in attribute dictionary + if attr_val is not None and attr_val!='': + if isinstance(attr_val, str): + attrs[fn] = attr_val + else: + attrs[fn] = str(attr_val) #unicode(attr_val,'utf-8') + return attrs diff --git a/nph_5class/bqapi/bqnode.py.bak b/nph_5class/bqapi/bqnode.py.bak new file mode 100644 index 0000000..ad5f68b --- /dev/null +++ b/nph_5class/bqapi/bqnode.py.bak @@ -0,0 +1,737 @@ +############################################################################### +## Bisquik ## +## Center for Bio-Image Informatics ## +## University of California at Santa Barbara ## +## ------------------------------------------------------------------------- ## +## ## +## Copyright (c) 2007 by the Regents of the University of California ## +## All rights reserved ## +## ## +## Redistribution and use in source and binary forms, with or without ## +## modification, are permitted provided that the following conditions are ## +## met: ## +## ## +## 1. Redistributions of source code must retain the above copyright ## +## notice, this list of conditions, and the following disclaimer. ## +## ## +## 2. Redistributions in binary form must reproduce the above copyright ## +## notice, this list of conditions, and the following disclaimer in ## +## the documentation and/or other materials provided with the ## +## distribution. ## +## ## +## 3. All advertising materials mentioning features or use of this ## +## software must display the following acknowledgement: This product ## +## includes software developed by the Center for Bio-Image Informatics## +## University of California at Santa Barbara, and its contributors. ## +## ## +## 4. Neither the name of the University nor the names of its ## +## contributors may be used to endorse or promote products derived ## +## from this software without specific prior written permission. ## +## ## +## THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS" AND ANY ## +## EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ## +## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE ## +## DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ## +## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ## +## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ## +## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ## +## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ## +## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ## +## ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ## +## POSSIBILITY OF SUCH DAMAGE. ## +## ## +############################################################################### +""" +BQ API - a set of classes that represent Bisque objects + +""" + +__module__ = "bqnode.py" +__author__ = "Dmitry Fedorov and Kris Kvilekval" +__version__ = "0.1" +__revision__ = "$Rev$" +__date__ = "$Date$" +__copyright__ = "Center for BioImage Informatics, University California, Santa Barbara" + +import sys +import math +import inspect +import logging +from urllib import quote +from lxml import etree + +log = logging.getLogger('bqapi.bqnode') + +__all__ = [ 'BQFactory', 'BQNode', 'BQImage', 'BQResource', 'BQValue', 'BQTag', 'BQVertex', 'BQGObject', + "BQDataset", "BQUser", "BQMex", + 'gobject_primitives', + 'BQPoint', 'BQLabel', 'BQPolyline', 'BQPolygon', 'BQCircle', 'BQEllipse', 'BQRectangle', 'BQSquare'] + +gobject_primitives = set(['point', 'label', 'polyline', 'polygon', 'circle', 'ellipse', 'rectangle', 'square']) + + +################################################################################ +# Base class for bisque resources +################################################################################ + +class BQNode (etree.ElementBase): + '''Base class for parsing Bisque XML''' + TAG = xmltag = 'NODE' + xmlfields = [] + xmlkids = [] + + + def __getattr__(self, name): + if name in self.xmlfields: + return self.get (name) + ###return etree.ElementBase.__getattr__(self, name) + #super(etree.ElementBase, self).__getattr__(name) + + def __setattr__(self, name, val): + if name in self.xmlfields: + return self.set (name, val) + print "SETTER" + object.__setattr__(self, name, val) + + def initialize(self): + 'used for class post parsing initialization' + pass + + def initializeXml(self, xmlnode): + for x in self.xmlfields: + setattr(self, x, xmlnode.get (x, None)) + + def set_parent(self, parent): + pass + + def __repr__(self): + return '(%s#%s)' % (self.TAG, len(self)) + + def __str__(self): + return etree.tostring(self) + # return '(%s:%s:%s)'%(self.TAG,','.join (['%s=%s' % (f, getattr(self,f,'')) for f in self.attrib]), [ str (q) for q in self ] ) + + def toTuple (self): + return tuple( [ x for x in self.attr ] ) + + + + +################################################################################ +# Base class for bisque resources +################################################################################ + +class BQResource (BQNode): + '''Base class for Bisque resources''' + TAG = xmltag = 'resource' + xmlfields = ['name', 'values', 'type', 'uri', 'ts', 'resource_uniq'] + xmlkids = ['kids', 'tags', 'gobjects'] + + #def __repr__(self): + # return '(%s:%s)'%(self.xmltag, self.uri) + + + def get_tags(self): + return [ x for x in self.iter ('tag') ] + tags = property(get_tags) + def get_gobjects(self): + return [ x for x in self.iter ('gobject') ] + gobjects = property(get_gobjects) + def get_kids(self): + return [ x for x in self.iter () ] + kids = property(get_kids) + + #def __init__(self): + # self.tags = [] + # self.gobjects = [] + # self.kids = [] + + def toDict (self): + objs = {} + objs.update ( [ (f.name, f) for f in self.tags if f.name ] ) + objs.update ( [ (f.name, f) for f in self.gobjects if f.name ] ) + return objs + + def set_parent(self, parent): + parent.kids.append(self) + + def add_tag(self, name=None, value=None, type=None, tag=None): + if tag is None: + args = dict ( name=name, value=value, type=type) + tag = BQTag( **dict ((k, v) for k,v in args.items() if v is not None) ) + self.append(tag) + return tag + addTag = add_tag + + def add_gob(self, name=None, value=None, type=None, gob=None): + if gob is None: + args = dict ( name=name, value=value, type=type) + gob = BQGObject( **dict ((k, v) for k,v in args.items() if v is not None) ) + self.append(gob) + return gob + addGObject = add_gob + + + # def findall (pathexpress) + # Implemented by + # def tag(self, name): + # results = [] + # for tg in self.tags: + # if tg.name == name: + # results.append(tg) + # if len(results) == 0: + # return None + # elif len(results) == 1: + # return results[0] + # else: + # return results + + # def gob(self, name): + # results = [] + # for tg in self.gobjects: + # if tg.name == name: + # results.append(tg) + # if len(results) == 0: + # return None + # elif len(results) == 1: + # return results[0] + # else: + # return results + + + def set_parent(self, parent): + parent.tags.append(self) + + def get_values(self): + if 'value' in self.attrib: + return self.attrib['value'] + result = [str(v.text) for v in self.iter ('value')] + return result + + def set_values(self, values): + """Assign a list of values + Maybe single value list or a list of (value, type) tuples where type is object, integer, float, number, string + """ + if not isinstance(values, list): + self.set('value', str(values)) + else: + if 'value' in self.attrib: + del self.attrib['value'] + for child in self: + self.remove (child) + for v in values: + if isinstance (v, tuple): + val = etree.SubElement (self, 'value', type=v[1]) + v = v[0] + else: + val = etree.SubElement (self, 'value') + val.text = str(v) + + value = property(get_values, set_values) + + def toetree(self, parent, baseuri): + xmlkids = list(self.xmlkids) + if len(self.values)<=1: + n = create_element(self, parent, baseuri) + else: + n = create_element(self, parent, baseuri) + del n.attrib['value'] + xmlkids.append('values') + for kid_name in xmlkids: + for x in getattr(self, kid_name, None): + toxmlnode (x, n, baseuri) + return n + + + + +################################################################################ +# Image +################################################################################ + +class BQImage(BQResource): + TAG = xmltag = "image" + xmlfields = ['name', 'uri', 'ts' , "value", 'resource_uniq' ] # "x", "y","z", "t", "ch" ] + xmlkids = ['tags', 'gobjects'] + + #def __init__(self): + # super(BQImage, self).__init__() + # self._geometry = None + + def geometry(self): + 'return x,y,z,t,ch of image' + if self._geometry is None: + info = self.pixels().meta().fetch() + info = etree.XML(info) + geom = [] + for n in 'xyztc': + tn = info.xpath('//tag[@name="image_num_%s"]' % n) + geom.append(tn[0].get('value')) + self._geometry = tuple(map(int, geom)) + return self._geometry + + def pixels(self): + return BQImagePixels(self) + +class BQImagePixels(object): + """manage requests to the image pixels""" + def __init__(self, image): + self.image = image + self.ops = [] + + def _construct_url(self): + """build the final url based on the operation + """ + image_service = self.image.session.service('image_service') + return image_service.construct (path = '%s?%s'%(self.image.resource_uniq, + '&'.join ( "%s=%s" % tp for tp in self.ops ))) + + def fetch(self, path=None): + """resolve the current and fetch the pixel + """ + url = self._construct_url() + session = self.image.session + return session.c.fetch (url, path=path) + + def command(self, operation, arguments=''): + self.ops.append((operation, arguments)) + return self + + def slice(self, x='', y='',z='',t=''): + """Slice the current image""" + return self.command('slice', '%s,%s,%s,%s' % (x,y,z,t)) + + def format(self, fmt): + return self.command('format', fmt) + + def resize(self, w='',h='', interpolation='NN'): + """ interpoaltion may be,[ NN|,BL|,BC][,AR] + """ + return self.command('resize', '%s,%s,%s' % (w,h,interpolation)) + + def localpath(self): + return self.command('localpath') + + def meta(self): + return self.command('meta') + + def info(self): + return self.command('info') + + def asarray(self): + try: + import tifffile + except ImportError: + log.error ("Please install Tifffile (Optional)") + return None + # Force format to be tiff by removing any format and append format tiff + self.ops = [ tp for tp in self.ops if tp[0] != 'format' ] + self.format ('tiff') + url = self._construct_url() + image_service = self.image.session.service ('image_service') + with image_service.fetch (url, stream=True) as response: + #response.raw.decode_content = True + return tifffile.imread (io.BytesIO (response.content)) + + + +################################################################################ +# Tag +################################################################################ + +class BQValue (BQNode): + '''tag value''' + TAG = xmltag = "value" + xmlfields = ['value', 'type', 'index'] + + #def __init__(self, value=None, type=None, index=None): + # self.value = value + # self.type = type + # self.index = index + + def set_parent(self, parent): + if self.index is not None: + parent.values.extend([None for x in range((self.index+1)-len(parent.values))]) + parent.values[self.index] = self + else: + parent.values.append(self) + + def initializeXml(self, xmlnode): + super(BQValue, self).initializeXml(xmlnode) + try: + self.index = int(self.index) + except Exception: + self.index = None + self.value = xmlnode.text + + def toetree(self, parent, baseuri): + n = etree.SubElement(parent, 'value', ) + if self.type is not None: n.set('type', str(self.type)) + if self.index is not None: n.set('index', str(self.index)) + if self.value is not None: n.text = str(self.value) + return n + + #def __call__(self): + # if len(self.values<=0): return '' + # elif len(self.values==1): return str(self.values[0]) + # def str_join(x,y): return '%s,%s'%(x,y) + # return reduce(str_join, self.values) + +class BQTag (BQResource): + '''tag resource''' + TAG = xmltag = "tag" + xmlfields = ['name', 'type', 'uri', 'ts', 'value'] + xmlkids = ['tags', 'gobjects', ] # handle values specially + + #def __init__(self, name='', value=None, type=None): + # super(BQTag, self).__init__() + # self.name = name + # self.values = (value and [BQValue(value)]) or [] + # if type is not None: + # self.type=type + + + + +################################################################################ +# GObject +################################################################################ + +class BQVertex (BQNode): + '''gobject vertex''' + type = 'vertex' + TAG = xmltag = "vertex" + xmlfields = ['x', 'y', 'z', 't', 'c', 'index'] + + #def __init__(self, **kw): + # self.fromObj(**kw) + + def __repr__(self): + return 'vertex(x:%s,y:%s,z:%s,t:%s)'%(self.x, self.y, self.z, self.t) + + def set_parent(self, parent): + parent.vertices.append(self) + + def toTuple(self): + return (self.x, self.y, self.z, self.t) + + def fromTuple(self, v): + x,y,z,t = v + self.x=x; self.y=y; self.z=z; self.t=t + + def fromObj(self, **kw): + for k,v in kw.items(): + if k in self.xmlfields: + setattr(self,k,v) + +class BQGObject(BQResource): + '''Gobject resource: A grpahical annotation''' + type = 'gobject' + TAG = xmltag = "gobject" + xmlfields = ['name', 'type', 'uri'] + xmlkids = ['tags', 'gobjects', 'vertices'] + + #def __init__(self, name=None, type=None): + # super(BQGObject, self).__init__() + # self.vertices = [] + # self.name=name + # self.type= type or self.xmltag + + def __str__(self): + return '(type: %s, name: %s, %s)'%(self.type, self.name, self.vertices) + + def set_parent(self, parent): + parent.gobjects.append(self) + + def verticesAsTuples(self): + return [v.toTuple() for v in self.vertices ] + + def perimeter(self): + return -1 + + def area(self): + return -1 + + +class BQPoint (BQGObject): + '''point gobject resource''' + TAG = xmltag = "point" + +class BQLabel (BQGObject): + '''label gobject resource''' + TAG = xmltag = "label" + +class BQPolyline (BQGObject): + '''polyline gobject resource''' + TAG = xmltag = "polyline" + def perimeter(self): + vx = self.verticesAsTuples() + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += math.sqrt( math.pow(x2-x1,2.0) + math.pow(y2-y1,2.0) ) + return d + + +# only does 2D version right now, polygon area is flawed if the edges are intersecting +# implement better algorithm based on triangles +class BQPolygon (BQGObject): + '''Polygon gobject resource''' + TAG = xmltag = "polygon" + # only does 2D version right now + def perimeter(self): + vx = self.verticesAsTuples() + vx.append(vx[0]) + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += math.sqrt( math.pow(x2-x1,2.0) + math.pow(y2-y1,2.0) ) + return d + + # only does 2D version right now + # area is flawed if the edges are intersecting implement better algorithm based on triangles + def area(self): + vx = self.verticesAsTuples() + vx.append(vx[0]) + d = 0 + for i in range(0, len(vx)-1): + x1,y1,z1,t1 = vx[i] + x2,y2,z2,t2 = vx[i+1] + d += x1*y2 - y1*x2 + return 0.5 * math.fabs(d) + +class BQCircle (BQGObject): + '''circle gobject resource''' + TAG = xmltag = "circle" + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return 2.0 * math.pi * max(math.fabs(x1-x2), math.fabs(y1-y2)) + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.pi * pow( max(math.fabs(x1-x2), math.fabs(y1-y2)), 2.0) + +class BQEllipse (BQGObject): + '''ellipse gobject resource''' + TAG = xmltag = "ellipse" + type = 'ellipse' + + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + x3,y3,z3,t3 = vx[2] + a = max(math.fabs(x1-x2), math.fabs(y1-y2)) + b = max(math.fabs(x1-x3), math.fabs(y1-y3)) + return math.pi * ( 3.0*(a+b) - math.sqrt( 10.0*a*b + 3.0*(a*a + b*b)) ) + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + x3,y3,z3,t3 = vx[2] + a = max(math.fabs(x1-x2), math.fabs(y1-y2)) + b = max(math.fabs(x1-x3), math.fabs(y1-y3)) + return math.pi * a * b + +class BQRectangle (BQGObject): + '''rectangle gobject resource''' + TAG = xmltag = "rectangle" + def perimeter(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.fabs(x1-x2)*2.0 + math.fabs(y1-y2)*2.0 + + def area(self): + vx = self.verticesAsTuples() + x1,y1,z1,t1 = vx[0] + x2,y2,z2,t2 = vx[1] + return math.fabs(x1-x2) * math.fabs(y1-y2) + +class BQSquare (BQRectangle): + '''square gobject resource''' + TAG = xmltag = "square" + + +################################################################################ +# Advanced Objects +################################################################################ +class BQDataset(BQResource): + TAG = xmltag = "dataset" + #xmlfields = ['name', 'uri', 'ts'] + #xmlkids = ['kids', 'tags', 'gobjects'] + + + + + + +class BQUser(BQResource): + TAG = xmltag = "user" + #xmlfields = ['name', 'uri', 'ts'] + #xmlkids = ['tags', 'gobjects'] + +class BQMex(BQResource): + TAG = xmltag = "mex" + #xmlfields = ['module', 'uri', 'ts', 'value'] + #xmlkids = ['tags', 'gobjects'] + + + +################################################################################ +# Factory +################################################################################ + +class BQFactory (etree.PythonElementClassLookup): + '''Factory for Bisque resources''' + resources = dict([ (x[1].xmltag, x[1]) for x in inspect.getmembers(sys.modules[__name__]) if inspect.isclass(x[1]) and hasattr(x[1], 'xmltag') ]) + + + def __init__(self, session): + self.session = session + self.parser = etree.XMLParser() + self.parser.set_element_class_lookup(self) + + def lookup (self, document, element): + return self.find(element.tag, element.get ('type', '')) + + @classmethod + def find(cls, xmltag, type_attr): + if xmltag == "gobject" and type_attr in gobject_primitives: + xmltag = type_attr + c = cls.resources.get(xmltag, BQResource) + return c + + @classmethod + def make(cls, xmltag, type_attr): + c = cls.find (xmltag, type_attr) + return c() + + index_map = dict(vertex=('vertices',BQVertex), tag=('tags', BQTag)) + @classmethod + def index(cls, xmltag, parent, indx): + array, ctor = cls.index_map.get (xmltag, (None,None)) + if array: + objarr = getattr(parent, array) + objarr.extend ([ ctor() for x in range(((indx+1)-len(objarr)))]) + v = objarr[indx] + v.indx = indx; + #log.debug ('fetching %s %s[%d]:%s' %(parent , array, indx, v)) + return v + + # Parsing + def from_etree(self, node): + """ Convert an etree to a python structure""" + return node + + def from_string(self, xmlstring): + return etree.XML(xmlstring, self.parser) + + # Generation + @classmethod + def to_string (self, nodes): + return etree.tostring (nodes) + + @classmethod + def to_etree(self, bqnode): + """Convert BQNode to elementTree""" + return bqnode + + def string2etree(self, xmlstring): + return etree.XML (xmlstring, self.parser) + + + +################################################################################ +# Generation +################################################################################ + +def toXml(dbo, parent=None, baseuri='', view=''): + """Convert a BQObject to an etree object suitable for XML + generation + """ + node = dbo + return node; + + +def create_element(dbo, parent, baseuri, **kw): + """Create an etree element from BQ object + """ + xtag = kw.pop('xtag', dbo.xmltag) + if not kw: + kw = model_fields (dbo, baseuri) + if parent is not None: + node = etree.SubElement (parent, xtag, **kw) + else: + node = etree.Element (xtag, **kw) + return node + +def toxmlnode (dbo, parent, baseuri, view=None): + if hasattr(dbo, 'toetree'): + node = dbo.toetree(parent, baseuri) + else: + node = create_element (dbo, parent, baseuri) + for kid_name in dbo.xmlkids: + for x in getattr(dbo, kid_name, None): + toxmlnode (x, node, baseuri, view) + return node + + +def make_owner (dbo, fn, baseuri): + return ('owner', baseuri + str(dbo.owner)) + +def make_uri(dbo, fn, baseuri): + return ('uri', "%s%s" % (baseuri , str (dbo.uri))) + +def get_email (dbo, fn, baseuri): + return ('email', dbo.user.email_address) + +mapping_fields = { + 'mex' : None, + 'acl' : None, + # Auth + 'user_id' : get_email, + 'taggable_id': None, + 'permission': 'action', + 'resource': None, + } + +def model_fields(dbo, baseuri=None): + """Extract known fields from a BQ object, while removing any known + from C{excluded_fields} + + @rtype: dict + @return fields to be rendered in XML + """ + attrs = {} + try: + dbo_fields = dbo.xmlfields + except AttributeError: + # This occurs when the object is a fake DB objects + # The dictionary is sufficient + dbo_fields= dbo.__dict__ + for fn in dbo_fields: + fn = mapping_fields.get(fn, fn) + # Skip when map is None + if fn is None: + continue + # Map is callable, then call + if callable(fn): + fn, attr_val = fn(dbo, fn, baseuri) + else: + attr_val = getattr(dbo, fn, None) + + # Put value in attribute dictionary + if attr_val is not None and attr_val!='': + if isinstance(attr_val, basestring): + attrs[fn] = attr_val + else: + attrs[fn] = str(attr_val) #unicode(attr_val,'utf-8') + return attrs diff --git a/nph_5class/bqapi/casauth.py b/nph_5class/bqapi/casauth.py new file mode 100644 index 0000000..be48e88 --- /dev/null +++ b/nph_5class/bqapi/casauth.py @@ -0,0 +1,31 @@ +import logging +import requests + +from bs4 import BeautifulSoup as soupy + +def login_elements(tag): + """A filter to find cas login form elements""" + return 'name' in tag and 'value' in tag + +def caslogin(session, caslogin, username, password, service=None): + if service: + params = {'service' : service} + else: + params = None + + cas_page = session.get(caslogin, params = params) + # Move past any redirects + caslogin = cas_page.url + cas_doc = soupy(cas_page.text) + form_inputs = cas_doc.find_all(login_elements) + login_data = dict() + for tag in form_inputs: + login_data[tag['name']] = tag['value'] + login_data['username'] = username + login_data['password'] = password + + signin_page = session.post(caslogin, login_data, cookies=cas_page.cookies, params = params) + if signin_page.status_code != requests.codes.ok: #pylint: disable=no-member + logging.warn ("ERROR on CAS signin headers %s cookies %s text %s", + signin_page.headers, signin_page.cookies, signin_page.text) + return signin_page.status_code == requests.codes.ok #pylint: disable=no-member diff --git a/nph_5class/bqapi/casauth.py.bak b/nph_5class/bqapi/casauth.py.bak new file mode 100644 index 0000000..46f72d3 --- /dev/null +++ b/nph_5class/bqapi/casauth.py.bak @@ -0,0 +1,31 @@ +import logging +import requests + +from bs4 import BeautifulSoup as soupy + +def login_elements(tag): + """A filter to find cas login form elements""" + return tag.has_key('name') and tag.has_key('value') + +def caslogin(session, caslogin, username, password, service=None): + if service: + params = {'service' : service} + else: + params = None + + cas_page = session.get(caslogin, params = params) + # Move past any redirects + caslogin = cas_page.url + cas_doc = soupy(cas_page.text) + form_inputs = cas_doc.find_all(login_elements) + login_data = dict() + for tag in form_inputs: + login_data[tag['name']] = tag['value'] + login_data['username'] = username + login_data['password'] = password + + signin_page = session.post(caslogin, login_data, cookies=cas_page.cookies, params = params) + if signin_page.status_code != requests.codes.ok: #pylint: disable=no-member + logging.warn ("ERROR on CAS signin headers %s cookies %s text %s", + signin_page.headers, signin_page.cookies, signin_page.text) + return signin_page.status_code == requests.codes.ok #pylint: disable=no-member diff --git a/nph_5class/bqapi/comm.py b/nph_5class/bqapi/comm.py new file mode 100644 index 0000000..31f91ce --- /dev/null +++ b/nph_5class/bqapi/comm.py @@ -0,0 +1,935 @@ +############################################################################### +## Bisquik ## +## Center for Bio-Image Informatics ## +## University of California at Santa Barbara ## +## ------------------------------------------------------------------------- ## +## ## +## Copyright (c) 2007,2008,2009,2010,2011 ## +## by the Regents of the University of California ## +## All rights reserved ## +## ## +## Redistribution and use in source and binary forms, with or without ## +## modification, are permitted provided that the following conditions are ## +## met: ## +## ## +## 1. Redistributions of source code must retain the above copyright ## +## notice, this list of conditions, and the following disclaimer. ## +## ## +## 2. Redistributions in binary form must reproduce the above copyright ## +## notice, this list of conditions, and the following disclaimer in ## +## the documentation and/or other materials provided with the ## +## distribution. ## +## ## +## 3. All advertising materials mentioning features or use of this ## +## software must display the following acknowledgement: This product ## +## includes software developed by the Center for Bio-Image Informatics## +## University of California at Santa Barbara, and its contributors. ## +## ## +## 4. Neither the name of the University nor the names of its ## +## contributors may be used to endorse or promote products derived ## +## from this software without specific prior written permission. ## +## ## +## THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS" AND ANY ## +## EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ## +## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE ## +## DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ## +## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ## +## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ## +## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ## +## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ## +## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ## +## ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ## +## POSSIBILITY OF SUCH DAMAGE. ## +## ## +############################################################################### +""" +SYNOPSIS +======== + +DESCRIPTION +=========== + +""" + + +import os +import sys +#import urlparse +#import urllib +import logging +import itertools +import tempfile +import mimetypes +import warnings +import posixpath + +from six.moves import urllib + + +import requests +from requests.auth import HTTPBasicAuth +from requests.auth import AuthBase +from requests import Session +#from requests_toolbelt import MultipartEncoder + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree + +from .types import BQMex, BQNode, BQFactory +from .util import d2xml #parse_qs, make_qs, xml2d, d2xml, normalize_unicode +from .services import ServiceFactory +from .exception import BQCommError, BQApiError +from .RequestsMonkeyPatch import requests_patch#allows multipart form to accept unicode + +try: + from .casauth import caslogin + CAS_SUPPORT=True +except ImportError: + CAS_SUPPORT = False + + +log = logging.getLogger('bqapi.comm') + +#SERVICES = [''] + + +class MexAuth(AuthBase): + """ + Bisque's Mex Authentication + """ + def __init__(self, token, user=None): + """ + Sets a mex authentication for the requests + + @param token: Token for authenticating a mex. The token can contain the user name + and a user name does not have to be provided. + @param user: The user the mex is attached. (default: None) + """ + if user is None: + self.username = "Mex %s"%(token) + elif user in token.split(':')[0]: #check if token contains user + self.username = "Mex %s"%(token) + else: + self.username = "Mex %s:%s"%(user, token) + + def __call__(self, r): + """ + Sets the authorization on the headers of the requests. + @param r: the requests + """ + r.headers['Authorization'] = self.username + return r + + +class BQServer(Session): + """ A reference to Bisque server + Allow communucation with a bisque server + + A wrapper over requests.Session + """ + + def __init__(self): + super(BQServer, self).__init__() + # Disable https session authentication.. + #self.verify = False + self.root = None + + + def authenticate_mex(self, token, user=None): + """ + Sets mex authorization to the requests + + @param token: this can be a combination of both token and user or just the token + @param user: the user attached to the mex + + """ + self.auth = MexAuth(token, user=user) + + + def authenticate_basic(self, user, pwd): + """ + Sets basic authorization along with the request + + @param user: The user for the requests. + @param pwd: The password for the user. + """ + self.auth = HTTPBasicAuth(user, pwd) + + + def prepare_headers(self, user_headers): + """ + + """ + headers = {} + headers.update(self.auth) + if user_headers: + headers.update(user_headers) + return headers + + + def prepare_url(self, url, **params): + """ + Prepares the url + + @param url: if the url is not provided with a root and a root has been provided to the session + the root will be added to the url + @param odict: ordered dictionary object, addes to the query in the order provided + @param params: adds params to query potion of the url + + @return prepared url + """ + + u = urllib.parse.urlsplit(url) + + #root + if u.scheme and u.netloc: + scheme = u.scheme + netloc = u.netloc + elif self.root and u.netloc=='': + #adds root request if no root is provided in the url + r = urllib.parse.urlsplit(self.root) + scheme = r.scheme + netloc = r.netloc + else: #no root provided + raise BQApiError("No root provided") + + #query + query = ['%s=%s'%(k,v) for k,v in urllib.parse.parse_qsl(u.query, True)] + unordered_query = [] + ordered_query = [] + + if 'odict' in params: + odict = params['odict'] + del params['odict'] + if odict and isinstance(odict,OrderedDict): + while len(odict)>0: + ordered_query.append('%s=%s'%odict.popitem(False)) + + if params: + unordered_query = ['%s=%s'%(k,v) for k,v in list(params.items())] + + query = query + unordered_query + ordered_query + query = '&'.join(query) + + return urllib.parse.urlunsplit([scheme,netloc,u.path,query,u.fragment]) + + + + + def webreq(self, method, url, headers = None, path=None, **params): + """ + Makes a http GET to the url given + + @param url: the url that is fetched + @param headers: headers provided for this specific fetch (default: None) + @param path: the location to where the contents will be stored on the file system (default:None) + if no path is provided the contents of the response will be returned + @param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (connect timeout, read timeout) tuple + + @return returns either the contents of the rests or the file name if a path is provided + + @exception: BQCommError if the requests returns an error code and message + """ + log.debug("%s: %s req header=%s" , method, url, headers) + timeout = params.get('timeout', None) + r = self.request(method=method, url=url, headers=headers, stream = (path is not None), timeout=timeout) + + try: + r.raise_for_status() + except requests.exceptions.HTTPError: + log.exception ("issue with %s", r) + raise BQCommError(r) + + if path: + with open(path, 'wb') as f: + f.write(r.content) +# for line in r.iter_content(): #really slow +# f.write(line) + return f.name + else: + return r.content + + def fetch(self, url, headers = None, path=None): + return self.webreq(method='get', url=url, headers=headers, path=path) + + def push(self, url, content=None, files=None, headers=None, path=None, method="POST", boundary=None, timeout=None): + """ + Makes a http request + + @param url: the url the request is made with + @param content: an xml document that will be sent along with the url + @param files: a dictonary with the format {filename: file handle or string}, sends as a multipart form + @param headers: headers provided for this specific request (default: None) + @param path: the location to where the contents will be stored on the file system (default:None) + if no path is provided the contents of the response will be returned + @param method: the method of the http request (HEAD,GET,POST,PUT,DELETE,...) (default: POST) + + @return returns either the contents of the rests or the file name if a path is provided + + @exception: BQCommError if the requests returns an error code and message + """ + log.debug("POST %s req %s" % (url, headers)) + + try: #error checking + r = self.request(method, url, data=content, headers=headers, files=files, timeout=timeout) + r.raise_for_status() + except requests.exceptions.HTTPError: + log.exception("In push request: %s %s %s" % (method, url, r.content)) + raise BQCommError(r) + + if path: + with open(path, 'wb') as f: + f.write(r.content) + return f.name + else: + return r.content + + +class BQSession(object): + """ + Top level Bisque communication object + """ + def __init__(self): + self.c = BQServer() + self.mex = None + self.services = {} + self.new = set() + self.dirty = set() + self.deleted = set() + self.bisque_root = None + self.factory = BQFactory(self) + self.dryrun = False + + + ############################ + # Establish a bisque session + ############################ + def _create_mex (self, user, moduleuri): + mex = BQMex() + mex.name = moduleuri or 'script:%s' % " ".join (sys.argv) + mex.status = 'RUNNING' + self.mex = self.save(mex, url=self.service_url('module_service', 'mex')) + if self.mex: + mextoken = self.mex.resource_uniq + self.c.authenticate_mex(mextoken, user) + # May not be needed + for c in range (100): + try: + self.load(url = self.service_url('module_service', path = "/".join (['mex', mextoken]))) + return True + except BQCommError: + pass + return False + def _check_session(self): + """Used to check that session is actuall active""" + r = self.fetchxml (self.service_url("auth_service", 'session')) + users = r.findall('./tag[@name="user"]') + return len(users) > 0 + + def init(self, bisque_url, credentials=None, moduleuri = None, create_mex=False): + """Create session by connect to with bisque_url + + @param bisque_url: The bisque root or MEX url + @param credetials : A tuple (user, pass) or (mex, token) + @param moduleuri : The module URI of the mex for this session + @param create_mex : Create a new Mex session for this run + """ + self.bisque_root = self.c.root = bisque_url + self._load_services() + if credentials: + if credentials[0].lower() == 'mex': + return self.init_mex(bisque_url, credentials[1]) + auth_service = self.service ('auth_service') + logins = auth_service.login_providers (render='xml') + login_type = None + if logins is not None and logins[0]: + login_type = logins[0].get ('type') + if login_type == 'cas': + return self.init_cas (credentials[0], credentials[1], bisque_url, moduleuri=moduleuri, create_mex=create_mex) + return self.init_local (user=credentials[0], pwd=credentials[1], bisque_root=bisque_url, moduleuri=moduleuri, create_mex=create_mex) + return self + + + def init_local(self, user, pwd, moduleuri=None, bisque_root=None, create_mex=True): + """ + Initalizes a local session + + @param: user - a bisque user + @param: pwd - the bisque user's password + @param: moduleuri - module uri to be set to the mex (Only matter if create mex is set to true) (moduleuri: None) + @param: bisque_root - the root of the bisque system the user is trying to access (bisque_root: None) + @param: create_mex - creates a mex session under the user (default: True) + + @return: self + """ + + if bisque_root != None: + self.bisque_root = bisque_root + self.c.root = bisque_root + + self.c.authenticate_basic(user, pwd) + self._load_services() + if not self._check_session(): + log.error("Session failed to be created.. please check credentials") + return None + + self.mex = None + + if create_mex: + self._create_mex(user, moduleuri) + + return self + + + def init_mex(self, mex_url, token, user=None, bisque_root=None): + """ + Initalizing a local session from a mex + + @param: mex_url - the mex url to initalize the session from + @param: token - the mex token to access the mex + @param: user - the owner of the mex (Does not have to be provided if already + provided in the token) (default: None) + @param: bisque_root - the root of the bisque system the user is trying to access (bisque_root: None) + + @return self + """ + if bisque_root is None: + # This assumes that bisque_root is http://host.org:port/ + mex_tuple = list(urllib.parse.urlparse(mex_url)) + mex_tuple[2:5] = '','','' + bisque_root = urllib.parse.urlunparse(mex_tuple) + + self.bisque_root = bisque_root + self.c.root = bisque_root + self.c.authenticate_mex(token, user=user) + self._load_services() + self.mex = self.load(mex_url, view='deep') + return self + + + def init_cas(self, user, pwd, moduleuri=None, bisque_root=None, create_mex=False): + """Initalizes a cas session + + @param: user - a bisque user + @param: pwd - the bisque user's password + @param: moduleuri - module uri to be set to the mex (Only matter if create mex is set to true) (moduleuri: None) + @param: bisque_root - the root of the bisque system the user is trying to access (bisque_root: None) + @param: create_mex - creates a mex session under the user (default: True) + @return: self + + Example + >>>from bqapi import BQSession + >>>s = BQSession() + >>>s.init_cas (CASNAME, CASPASS, bisque_root='http://bisque.iplantcollaborative.org', create_mex=False) + >>>s.fetchxml('/data_serice/image', limit=10) + """ + if not CAS_SUPPORT: + raise BQApiError ("CAS not support.. please check installation") + + if bisque_root == None: + raise BQApiError ("cas login requires bisque_root") + + self.bisque_root = bisque_root + self.c.root = bisque_root + + caslogin (self.c, bisque_root + "/auth_service/login", user, pwd) + self._load_services() + if not self._check_session(): + log.error("Session failed to be created.. please check credentials") + return None + self.mex = None + + if create_mex: + self._create_mex(user, moduleuri) + return self + + def init_session (self, user, pwd, moduleuri=None, bisque_root=None, create_mex = False): + providers = { 'cas' : self.init_cas, 'internal' : self.init_local } + if bisque_root == None: + raise BQApiError ("cas login requires bisque_root") + + + self.bisque_root = bisque_root + self.c.root = bisque_root + self._load_services() + queryurl = self.service_url('auth_service', 'login_providers') + login_providers = self.fetchxml (queryurl) + login_type = login_providers.find ("./*[@type]") + login_type = login_type.get ('type') if login_type is not None else None + provider = providers.get (login_type, None) + if provider: + return provider (user=user, pwd=pwd, moduleuri=moduleuri, bisque_root=bisque_root, create_mex=create_mex) + + + def close(self): + pass + + def parameter(self, name): + if self.mex is None: + return None + return self.mex.xmltree.find('tag[@name="inputs"]//tag[@name="%s"]'%name) + + def get_value_safe(self, v, t): + try: + if t == 'boolean': + return v.lower() == 'true' + elif t == 'number': + return float(v) + return v + except AttributeError: + return None + + def parameter_value(self, name=None, p=None): + if p is None: + p = self.parameter(name) + else: + name = p.get('name') + + if p is None: return None + values = p.xpath('value') + if len(values)<1: + v = p.get('value') + t = p.get('type', '').lower() + return self.get_value_safe(v, t) + + r = [] + for vv in values: + v = vv.text + t = vv.get('type', '').lower() + r.append(self.get_value_safe(v, t)) + return r + + def parameters(self): + p = {} + if self.mex is None: + return p + inputs = self.mex.xmltree.iterfind('tag[@name="inputs"]//tag') + for i in inputs: + p[i.get('name')] = self.parameter_value(p=i) + return p + + def get_mex_inputs(self): + """ + Get all input parameters in mex. + + @return: map parameter name -> {'type':..., 'value':..., ...} or [ map parameter name -> {'type':..., 'value':..., ...}, ... ] if blocked iter + """ + def _xml2dict(e): + kids = { key:e.attrib[key] for key in e.attrib if key in ['type', 'value'] } + if e.text: + kids['value'] = e.text + for k, g in itertools.groupby(e, lambda x: x.tag): + g = [ _xml2dict(x) for x in g ] + kids[k] = g + return kids + + def _get_mex_params(mextree): + p = {} + for inp in mextree.iterfind('tag[@name="inputs"]/tag'): + p[inp.get('name')] = _xml2dict(inp) + p['mex_url']['value'] = mextree.get('uri') + return p + + # assemble map param name -> param value + if self.mex is None: + return {} + # check if outside is a block mex + if self.mex.xmltree.get('type') == 'block': + res = [] + for inner_mex in self.mex.xmltree.iterfind('./mex'): + res.append(_get_mex_params(inner_mex)) + else: + res = _get_mex_params(self.mex.xmltree) + return res + + def get_mex_execute_options(self): + """ + Get execute options in mex. + + @return: map option name -> value + """ + p = {} + if self.mex is None: + return p + for exop in self.mex.xmltree.iterfind('tag[@name="execute_options"]/tag'): + p[exop.get('name')] = exop.get('value') + return p + + def fetchxml(self, url, path=None, **params): + """ + Fetch an xml object from the url + + @param: url - A url to fetch from + @param: path - a location on the file system were one wishes the response to be stored (default: None) + @param: odict - ordered dictionary of params will be added to url for when the order matters + @param: params - params will be added to url + + @return xml etree + """ + url = self.c.prepare_url(url, **params) + log.debug('fetchxml %s ' % url) + if path: + return self.c.fetch(url, headers={'Content-Type':'text/xml', 'Accept':'text/xml'}, path=path) + else: + r = self.c.fetch(url, headers = {'Content-Type':'text/xml', 'Accept':'text/xml'}) + return self.factory.string2etree(r) + + + def postxml(self, url, xml, path=None, method="POST", **params): + """ + Post xml allowed with files to bisque + + @param: url - the url to make to the request + @param: xml - an xml document that is post at the url location (excepts either string or etree._Element) + @param: path - a location on the file system were one wishes the response to be stored (default: None) + @param method - the method of the http request (HEAD,GET,POST,PUT,DELETE,...) (default: POST) + @param: odict - ordered dictionary of params will be added to url for when the order matters + @param: params - params will be added to url + + @return: xml etree or path to the file were the response was stored + """ + + if not isinstance(xml, str): + xml = self.factory.to_string (xml) + + log.debug('postxml %s content %s ' % (url, xml)) + + url = self.c.prepare_url(url, **params) + + try: + r = None + if not self.dryrun: + r = self.c.push(url, content=xml, method=method, path=path, headers={'Content-Type':'text/xml', 'Accept': 'text/xml' }) + if path is not None: + return r + return r and self.factory.string2etree(r) + except etree.ParseError as e: + log.exception("Problem with post response %s", e) + return r + + def deletexml(self, url): + "Delete a resource" + url = self.c.prepare_url(url) + r = self.c.webreq (method='delete', url=url) + return r + + + def fetchblob(self, url, path=None, **params): + """ + Requests for a blob + + @param: url - filename of the blob + @param: path - a location on the file system were one wishes the response to be stored (default: None) + @param: params - params will be added to url query + + @return: contents or filename + """ + url = self.c.prepare_url(url, **params) + return self.c.fetch(url, path=path ) + + + def postblob(self, filename, xml=None, path=None, method="POST", **params): + """ + Create Multipart Post with blob to blob service + + @param filename: filename of the blob + @param xml: xml to be posted along with the file + @param params: params will be added to url query + @return: a uri="URI to BLOB" > + """ + + import_service = self.service ("import") + if xml!=None: + if not isinstance(xml, str): + xml = self.factory.to_string(xml) + response = import_service.transfer (filename=filename, xml=xml) + return response.content + + # import_service_url = self.service_url('import', path='transfer') + # if import_service_url is None: + # raise BQApiError('Could not find import service to post blob.') + # url = self.c.prepare_url(import_service_url, **params) + # if xml!=None: + # if not isinstance(xml, basestring): + # xml = self.factory.to_string(xml) + # fields = {} + # if filename is not None: + # filename = normalize_unicode(filename) + # fields['file'] = (filename, open(filename, 'rb'), 'application/octet-stream') + # if xml is not None: + # fields['file_resource'] = xml + # if fields: + # # https://github.com/requests/toolbelt/issues/75 + # m = MultipartEncoder(fields = fields ) + # m._read = m.read + # m.read = lambda size: m._read (8129*1024) # 8MB + # return self.c.push(url, + # content=m, + # headers={'Accept': 'text/xml', 'Content-Type':m.content_type}, + # path=path, method=method) + # raise BQApiError("improper parameters for postblob: must use paramater xml or filename or both ") + + + def service_url(self, service_type, path = "" , query=None): + """ + @param service_type: + @param path: + @param query: + + @return + """ + root = self.service_map.get(service_type, None) + if root is None: + raise BQApiError('Not a service type') + if query: + path = "%s?%s" % (path, urllib.parse.urlencode(query)) + return urllib.parse.urljoin(root, path) + + + def _load_services(self): + """ + @return + """ + services = self.load (posixpath.join(self.bisque_root , "services")) + smap = {} + for service in services.tags: + smap [service.type] = service.value + self.service_map = smap + + def service (self, service_name): + return ServiceFactory.make (self, service_name) + + + ############################# + # Classes and Type + ############################# + def element(self, ty, **attrib): + elem = etree.Element(ty, **attrib) + + + def append(self, mex, tags=[], gobjects=[], children=[]): + def append_mex (mex, type_tup): + type_, elems = type_tup + for tg in elems: + if isinstance(tg, dict): + tg = d2xml({ type_ : tg}) + elif isinstance(tg, BQNode): + tg = BQFactory.to_etree(tg) + elif isinstance(tg, etree._Element): + pass + else: + raise BQApiError('bad values in tag/gobject list %s' % tg) + mex.append(tg) + + append_mex(mex, ('tag', tags)) + append_mex(mex, ('gobject', gobjects)) + for elem in children: + append_mex(mex, elem) + + + ############################## + # Mex + ############################## + def update_mex(self, status, tags = [], gobjects = [], children=[], reload=False, merge=False): + """save an updated mex with the addition + + @param status: The current status of the mex + @param tags: list of etree.Element|BQTags|dict objects of form { 'name': 'x', 'value':'z' } + @param gobjects: same as etree.Element|BQGobject|dict objects of form { 'name': 'x', 'value':'z' } + @param children: list of tuple (type, obj array) i.e ('mex', dict.. ) + @param reload: + @param merge: merge "outputs"/"inputs" section if needed + @return + """ + if merge: + mex = self.fetchxml(self.mex.uri, view='deep') # get old version of MEX, so it can be merged if needed + mex.set('value', status) + else: + mex = etree.Element('mex', value = status, uri = self.mex.uri) + #self.mex.value = status + def append_mex (mex, type_tup): + type_, elems = type_tup + for tg in elems: + if isinstance(tg, dict): + tg = d2xml({ type_ : tg}) + elif isinstance(tg, BQNode): + tg = self.factory.to_etree(tg) + elif isinstance(tg, etree._Element): #pylint: disable=protected-access + pass + else: + raise BQApiError('bad values in tag/gobject list %s' % tg) + was_merged = False + if merge and tg.tag == 'tag' and tg.get('name', '') in ['inputs', 'outputs']: + hits = mex.xpath('./tag[@name="%s"]' % tg.get('name', '')) + if hits: + assert len(hits) == 1 + hits[0].extend(list(tg)) + was_merged = True + log.debug("merged '%s' section in MEX", tg.get('name', '')) + if not was_merged: + mex.append(tg) + + append_mex(mex, ('tag', tags)) + append_mex(mex, ('gobject', gobjects)) + for elem in children: + append_mex(mex, elem) + + #mex = { 'mex' : { 'uri' : self.mex.uri, + # 'status' : status, + # 'tag' : tags, + # 'gobject': gobjects }} + content = self.postxml(self.mex.uri, mex, view='deep' if reload else 'short') + if reload and content is not None: + self.mex = self.factory.from_string(content) + return self.mex + return None + + + def finish_mex(self, status="FINISHED", tags=[], gobjects=[], children=[], msg=None ): + """ + @param status: + @param tags: + @param gobject: + @param children: + @param msg: + + @return + """ + if msg is not None: + tags.append( { 'name':'message', 'value': msg }) + try: + return self.update_mex(status, tags=tags, gobjects=gobjects, children=children, reload=False, merge=True) + except BQCommError as ce: + log.error ("Problem during finish mex %s" % ce.response.request.headers) + try: + return self.update_mex( status='FAILED',tags= [ { 'name':'error_message', 'value': "Error during saving (status %s)" % ce.response.status_code } ] ) + except: + log.exception ("Cannot finish/fail Mex ") + + def fail_mex(self, msg): + """ + @param msg: + """ + if msg is not None: + tags = [ { 'name':'error_message', 'value': msg } ] + self.finish_mex( status='FAILED', tags=tags) + + def _begin_mex(self, moduleuri): + """create a mex on the server for this run""" + pass + + + + ############################## + # Module control + ############################## + def run_modules(self, module_list, pre_run=None, post_run=None, callback_fct=None): + """Run one or more modules in parallel. + + :param module_list: List of modules to run + :type module_list: [ { moduleuri: ..., inputs: { param1:val1, param2:val2, ...}, parent_mex: ... }, {...}, ... ] + :param pre_run: module entrypoint to call before run (or None if no prerun) + :type pre_run: str + :param post_run: module entrypoint to call after run (or None if no postrun) + :type post_run: str + :param callback_fct: function to call on completion (None: block until completion) + :type callback_fct: fct(mex_list=list(str)) + :returns: list of mex URIs, one for each module + :rtype: list(str) + """ + # TODO: create MEX according to params and POST it to module_service + pass + + ############################## + # Resources + ############################## + def query(self, resource_type, **kw): + """Query for a resource + tag_query=None, tag_order=None, offset=None, limit=None + """ + results = [] + queryurl = self.service_url ('data_service', path=resource_type, query=kw) + items = self.fetchxml (queryurl) + for item in items: + results.append (self.factory.from_etree(item)) + return results + + + def load(self, url, **params): + """Load a bisque object + + @param url: + @param params: + + @return + """ + #if view not in url: + # url = url + "?view=%s" % view + try: + xml = self.fetchxml(url, **params) + if xml.tag == "response": + xml = xml[0] + bqo = self.factory.from_etree(xml) + return bqo + except BQCommError as ce: + log.exception('communication issue while loading %s' % ce) + return None + + def delete(self, bqo, url=None, **kw): + "Delete an object and all children" + url = bqo.uri or url + if url is not None: + return self.deletexml(url) + + + def save(self, bqo, url=None, **kw): + """ + @param bqo: + @param url: + @param kw: + + @return + """ + try: + original = bqo + + # Find an object (or parent with a valild uri) + url = url or bqo.uri + if url is None: + while url is None and bqo.parent: + bqo = bqo.parent + url= bqo.parent.uri + if url is None: + url = self.service_url ('data_service') + + xml = self.factory.to_etree(bqo) + xml = self.postxml(url, xml, **kw) + return xml is not None and self.factory.from_etree(xml) + except BQCommError as ce: + log.exception('communication issue while saving %s' , ce) + return None + + def saveblob(self, bqo, filename): + """Save a blob to the server and return metadata structure + """ + + try: + xml = self.factory.to_etree(bqo) + xmlstr = self.postblob (filename=filename, xml= xml) + xmlet = self.factory.string2etree (xmlstr) + if xmlet.tag == 'resource' and xmlet.get ('type') == 'uploaded': + # return inside + bqo = self.factory.from_etree(xmlet[0]) + return bqo + return None + except BQCommError as ce: + log.exception('communication issue while saving %s' , filename) + return None diff --git a/nph_5class/bqapi/comm.py.bak b/nph_5class/bqapi/comm.py.bak new file mode 100644 index 0000000..1b6f60a --- /dev/null +++ b/nph_5class/bqapi/comm.py.bak @@ -0,0 +1,935 @@ +############################################################################### +## Bisquik ## +## Center for Bio-Image Informatics ## +## University of California at Santa Barbara ## +## ------------------------------------------------------------------------- ## +## ## +## Copyright (c) 2007,2008,2009,2010,2011 ## +## by the Regents of the University of California ## +## All rights reserved ## +## ## +## Redistribution and use in source and binary forms, with or without ## +## modification, are permitted provided that the following conditions are ## +## met: ## +## ## +## 1. Redistributions of source code must retain the above copyright ## +## notice, this list of conditions, and the following disclaimer. ## +## ## +## 2. Redistributions in binary form must reproduce the above copyright ## +## notice, this list of conditions, and the following disclaimer in ## +## the documentation and/or other materials provided with the ## +## distribution. ## +## ## +## 3. All advertising materials mentioning features or use of this ## +## software must display the following acknowledgement: This product ## +## includes software developed by the Center for Bio-Image Informatics## +## University of California at Santa Barbara, and its contributors. ## +## ## +## 4. Neither the name of the University nor the names of its ## +## contributors may be used to endorse or promote products derived ## +## from this software without specific prior written permission. ## +## ## +## THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS" AND ANY ## +## EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ## +## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE ## +## DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ## +## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ## +## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ## +## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ## +## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ## +## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ## +## ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ## +## POSSIBILITY OF SUCH DAMAGE. ## +## ## +############################################################################### +""" +SYNOPSIS +======== + +DESCRIPTION +=========== + +""" + + +import os +import sys +#import urlparse +#import urllib +import logging +import itertools +import tempfile +import mimetypes +import warnings +import posixpath + +from six.moves import urllib + + +import requests +from requests.auth import HTTPBasicAuth +from requests.auth import AuthBase +from requests import Session +#from requests_toolbelt import MultipartEncoder + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree + +from .types import BQMex, BQNode, BQFactory +from .util import d2xml #parse_qs, make_qs, xml2d, d2xml, normalize_unicode +from .services import ServiceFactory +from .exception import BQCommError, BQApiError +from .RequestsMonkeyPatch import requests_patch#allows multipart form to accept unicode + +try: + from .casauth import caslogin + CAS_SUPPORT=True +except ImportError: + CAS_SUPPORT = False + + +log = logging.getLogger('bqapi.comm') + +#SERVICES = [''] + + +class MexAuth(AuthBase): + """ + Bisque's Mex Authentication + """ + def __init__(self, token, user=None): + """ + Sets a mex authentication for the requests + + @param token: Token for authenticating a mex. The token can contain the user name + and a user name does not have to be provided. + @param user: The user the mex is attached. (default: None) + """ + if user is None: + self.username = "Mex %s"%(token) + elif user in token.split(':')[0]: #check if token contains user + self.username = "Mex %s"%(token) + else: + self.username = "Mex %s:%s"%(user, token) + + def __call__(self, r): + """ + Sets the authorization on the headers of the requests. + @param r: the requests + """ + r.headers['Authorization'] = self.username + return r + + +class BQServer(Session): + """ A reference to Bisque server + Allow communucation with a bisque server + + A wrapper over requests.Session + """ + + def __init__(self): + super(BQServer, self).__init__() + # Disable https session authentication.. + #self.verify = False + self.root = None + + + def authenticate_mex(self, token, user=None): + """ + Sets mex authorization to the requests + + @param token: this can be a combination of both token and user or just the token + @param user: the user attached to the mex + + """ + self.auth = MexAuth(token, user=user) + + + def authenticate_basic(self, user, pwd): + """ + Sets basic authorization along with the request + + @param user: The user for the requests. + @param pwd: The password for the user. + """ + self.auth = HTTPBasicAuth(user, pwd) + + + def prepare_headers(self, user_headers): + """ + + """ + headers = {} + headers.update(self.auth) + if user_headers: + headers.update(user_headers) + return headers + + + def prepare_url(self, url, **params): + """ + Prepares the url + + @param url: if the url is not provided with a root and a root has been provided to the session + the root will be added to the url + @param odict: ordered dictionary object, addes to the query in the order provided + @param params: adds params to query potion of the url + + @return prepared url + """ + + u = urllib.parse.urlsplit(url) + + #root + if u.scheme and u.netloc: + scheme = u.scheme + netloc = u.netloc + elif self.root and u.netloc=='': + #adds root request if no root is provided in the url + r = urllib.parse.urlsplit(self.root) + scheme = r.scheme + netloc = r.netloc + else: #no root provided + raise BQApiError("No root provided") + + #query + query = ['%s=%s'%(k,v) for k,v in urllib.parse.parse_qsl(u.query, True)] + unordered_query = [] + ordered_query = [] + + if 'odict' in params: + odict = params['odict'] + del params['odict'] + if odict and isinstance(odict,OrderedDict): + while len(odict)>0: + ordered_query.append('%s=%s'%odict.popitem(False)) + + if params: + unordered_query = ['%s=%s'%(k,v) for k,v in params.items()] + + query = query + unordered_query + ordered_query + query = '&'.join(query) + + return urllib.parse.urlunsplit([scheme,netloc,u.path,query,u.fragment]) + + + + + def webreq(self, method, url, headers = None, path=None, **params): + """ + Makes a http GET to the url given + + @param url: the url that is fetched + @param headers: headers provided for this specific fetch (default: None) + @param path: the location to where the contents will be stored on the file system (default:None) + if no path is provided the contents of the response will be returned + @param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (connect timeout, read timeout) tuple + + @return returns either the contents of the rests or the file name if a path is provided + + @exception: BQCommError if the requests returns an error code and message + """ + log.debug("%s: %s req header=%s" , method, url, headers) + timeout = params.get('timeout', None) + r = self.request(method=method, url=url, headers=headers, stream = (path is not None), timeout=timeout) + + try: + r.raise_for_status() + except requests.exceptions.HTTPError: + log.exception ("issue with %s", r) + raise BQCommError(r) + + if path: + with open(path, 'wb') as f: + f.write(r.content) +# for line in r.iter_content(): #really slow +# f.write(line) + return f.name + else: + return r.content + + def fetch(self, url, headers = None, path=None): + return self.webreq(method='get', url=url, headers=headers, path=path) + + def push(self, url, content=None, files=None, headers=None, path=None, method="POST", boundary=None, timeout=None): + """ + Makes a http request + + @param url: the url the request is made with + @param content: an xml document that will be sent along with the url + @param files: a dictonary with the format {filename: file handle or string}, sends as a multipart form + @param headers: headers provided for this specific request (default: None) + @param path: the location to where the contents will be stored on the file system (default:None) + if no path is provided the contents of the response will be returned + @param method: the method of the http request (HEAD,GET,POST,PUT,DELETE,...) (default: POST) + + @return returns either the contents of the rests or the file name if a path is provided + + @exception: BQCommError if the requests returns an error code and message + """ + log.debug("POST %s req %s" % (url, headers)) + + try: #error checking + r = self.request(method, url, data=content, headers=headers, files=files, timeout=timeout) + r.raise_for_status() + except requests.exceptions.HTTPError: + log.exception("In push request: %s %s %s" % (method, url, r.content)) + raise BQCommError(r) + + if path: + with open(path, 'wb') as f: + f.write(r.content) + return f.name + else: + return r.content + + +class BQSession(object): + """ + Top level Bisque communication object + """ + def __init__(self): + self.c = BQServer() + self.mex = None + self.services = {} + self.new = set() + self.dirty = set() + self.deleted = set() + self.bisque_root = None + self.factory = BQFactory(self) + self.dryrun = False + + + ############################ + # Establish a bisque session + ############################ + def _create_mex (self, user, moduleuri): + mex = BQMex() + mex.name = moduleuri or 'script:%s' % " ".join (sys.argv) + mex.status = 'RUNNING' + self.mex = self.save(mex, url=self.service_url('module_service', 'mex')) + if self.mex: + mextoken = self.mex.resource_uniq + self.c.authenticate_mex(mextoken, user) + # May not be needed + for c in range (100): + try: + self.load(url = self.service_url('module_service', path = "/".join (['mex', mextoken]))) + return True + except BQCommError: + pass + return False + def _check_session(self): + """Used to check that session is actuall active""" + r = self.fetchxml (self.service_url("auth_service", 'session')) + users = r.findall('./tag[@name="user"]') + return len(users) > 0 + + def init(self, bisque_url, credentials=None, moduleuri = None, create_mex=False): + """Create session by connect to with bisque_url + + @param bisque_url: The bisque root or MEX url + @param credetials : A tuple (user, pass) or (mex, token) + @param moduleuri : The module URI of the mex for this session + @param create_mex : Create a new Mex session for this run + """ + self.bisque_root = self.c.root = bisque_url + self._load_services() + if credentials: + if credentials[0].lower() == 'mex': + return self.init_mex(bisque_url, credentials[1]) + auth_service = self.service ('auth_service') + logins = auth_service.login_providers (render='xml') + login_type = None + if logins is not None and logins[0]: + login_type = logins[0].get ('type') + if login_type == 'cas': + return self.init_cas (credentials[0], credentials[1], bisque_url, moduleuri=moduleuri, create_mex=create_mex) + return self.init_local (user=credentials[0], pwd=credentials[1], bisque_root=bisque_url, moduleuri=moduleuri, create_mex=create_mex) + return self + + + def init_local(self, user, pwd, moduleuri=None, bisque_root=None, create_mex=True): + """ + Initalizes a local session + + @param: user - a bisque user + @param: pwd - the bisque user's password + @param: moduleuri - module uri to be set to the mex (Only matter if create mex is set to true) (moduleuri: None) + @param: bisque_root - the root of the bisque system the user is trying to access (bisque_root: None) + @param: create_mex - creates a mex session under the user (default: True) + + @return: self + """ + + if bisque_root != None: + self.bisque_root = bisque_root + self.c.root = bisque_root + + self.c.authenticate_basic(user, pwd) + self._load_services() + if not self._check_session(): + log.error("Session failed to be created.. please check credentials") + return None + + self.mex = None + + if create_mex: + self._create_mex(user, moduleuri) + + return self + + + def init_mex(self, mex_url, token, user=None, bisque_root=None): + """ + Initalizing a local session from a mex + + @param: mex_url - the mex url to initalize the session from + @param: token - the mex token to access the mex + @param: user - the owner of the mex (Does not have to be provided if already + provided in the token) (default: None) + @param: bisque_root - the root of the bisque system the user is trying to access (bisque_root: None) + + @return self + """ + if bisque_root is None: + # This assumes that bisque_root is http://host.org:port/ + mex_tuple = list(urllib.parse.urlparse(mex_url)) + mex_tuple[2:5] = '','','' + bisque_root = urllib.parse.urlunparse(mex_tuple) + + self.bisque_root = bisque_root + self.c.root = bisque_root + self.c.authenticate_mex(token, user=user) + self._load_services() + self.mex = self.load(mex_url, view='deep') + return self + + + def init_cas(self, user, pwd, moduleuri=None, bisque_root=None, create_mex=False): + """Initalizes a cas session + + @param: user - a bisque user + @param: pwd - the bisque user's password + @param: moduleuri - module uri to be set to the mex (Only matter if create mex is set to true) (moduleuri: None) + @param: bisque_root - the root of the bisque system the user is trying to access (bisque_root: None) + @param: create_mex - creates a mex session under the user (default: True) + @return: self + + Example + >>>from bqapi import BQSession + >>>s = BQSession() + >>>s.init_cas (CASNAME, CASPASS, bisque_root='http://bisque.iplantcollaborative.org', create_mex=False) + >>>s.fetchxml('/data_serice/image', limit=10) + """ + if not CAS_SUPPORT: + raise BQApiError ("CAS not support.. please check installation") + + if bisque_root == None: + raise BQApiError ("cas login requires bisque_root") + + self.bisque_root = bisque_root + self.c.root = bisque_root + + caslogin (self.c, bisque_root + "/auth_service/login", user, pwd) + self._load_services() + if not self._check_session(): + log.error("Session failed to be created.. please check credentials") + return None + self.mex = None + + if create_mex: + self._create_mex(user, moduleuri) + return self + + def init_session (self, user, pwd, moduleuri=None, bisque_root=None, create_mex = False): + providers = { 'cas' : self.init_cas, 'internal' : self.init_local } + if bisque_root == None: + raise BQApiError ("cas login requires bisque_root") + + + self.bisque_root = bisque_root + self.c.root = bisque_root + self._load_services() + queryurl = self.service_url('auth_service', 'login_providers') + login_providers = self.fetchxml (queryurl) + login_type = login_providers.find ("./*[@type]") + login_type = login_type.get ('type') if login_type is not None else None + provider = providers.get (login_type, None) + if provider: + return provider (user=user, pwd=pwd, moduleuri=moduleuri, bisque_root=bisque_root, create_mex=create_mex) + + + def close(self): + pass + + def parameter(self, name): + if self.mex is None: + return None + return self.mex.xmltree.find('tag[@name="inputs"]//tag[@name="%s"]'%name) + + def get_value_safe(self, v, t): + try: + if t == 'boolean': + return v.lower() == 'true' + elif t == 'number': + return float(v) + return v + except AttributeError: + return None + + def parameter_value(self, name=None, p=None): + if p is None: + p = self.parameter(name) + else: + name = p.get('name') + + if p is None: return None + values = p.xpath('value') + if len(values)<1: + v = p.get('value') + t = p.get('type', '').lower() + return self.get_value_safe(v, t) + + r = [] + for vv in values: + v = vv.text + t = vv.get('type', '').lower() + r.append(self.get_value_safe(v, t)) + return r + + def parameters(self): + p = {} + if self.mex is None: + return p + inputs = self.mex.xmltree.iterfind('tag[@name="inputs"]//tag') + for i in inputs: + p[i.get('name')] = self.parameter_value(p=i) + return p + + def get_mex_inputs(self): + """ + Get all input parameters in mex. + + @return: map parameter name -> {'type':..., 'value':..., ...} or [ map parameter name -> {'type':..., 'value':..., ...}, ... ] if blocked iter + """ + def _xml2dict(e): + kids = { key:e.attrib[key] for key in e.attrib if key in ['type', 'value'] } + if e.text: + kids['value'] = e.text + for k, g in itertools.groupby(e, lambda x: x.tag): + g = [ _xml2dict(x) for x in g ] + kids[k] = g + return kids + + def _get_mex_params(mextree): + p = {} + for inp in mextree.iterfind('tag[@name="inputs"]/tag'): + p[inp.get('name')] = _xml2dict(inp) + p['mex_url']['value'] = mextree.get('uri') + return p + + # assemble map param name -> param value + if self.mex is None: + return {} + # check if outside is a block mex + if self.mex.xmltree.get('type') == 'block': + res = [] + for inner_mex in self.mex.xmltree.iterfind('./mex'): + res.append(_get_mex_params(inner_mex)) + else: + res = _get_mex_params(self.mex.xmltree) + return res + + def get_mex_execute_options(self): + """ + Get execute options in mex. + + @return: map option name -> value + """ + p = {} + if self.mex is None: + return p + for exop in self.mex.xmltree.iterfind('tag[@name="execute_options"]/tag'): + p[exop.get('name')] = exop.get('value') + return p + + def fetchxml(self, url, path=None, **params): + """ + Fetch an xml object from the url + + @param: url - A url to fetch from + @param: path - a location on the file system were one wishes the response to be stored (default: None) + @param: odict - ordered dictionary of params will be added to url for when the order matters + @param: params - params will be added to url + + @return xml etree + """ + url = self.c.prepare_url(url, **params) + log.debug('fetchxml %s ' % url) + if path: + return self.c.fetch(url, headers={'Content-Type':'text/xml', 'Accept':'text/xml'}, path=path) + else: + r = self.c.fetch(url, headers = {'Content-Type':'text/xml', 'Accept':'text/xml'}) + return self.factory.string2etree(r) + + + def postxml(self, url, xml, path=None, method="POST", **params): + """ + Post xml allowed with files to bisque + + @param: url - the url to make to the request + @param: xml - an xml document that is post at the url location (excepts either string or etree._Element) + @param: path - a location on the file system were one wishes the response to be stored (default: None) + @param method - the method of the http request (HEAD,GET,POST,PUT,DELETE,...) (default: POST) + @param: odict - ordered dictionary of params will be added to url for when the order matters + @param: params - params will be added to url + + @return: xml etree or path to the file were the response was stored + """ + + if not isinstance(xml, basestring): + xml = self.factory.to_string (xml) + + log.debug('postxml %s content %s ' % (url, xml)) + + url = self.c.prepare_url(url, **params) + + try: + r = None + if not self.dryrun: + r = self.c.push(url, content=xml, method=method, path=path, headers={'Content-Type':'text/xml', 'Accept': 'text/xml' }) + if path is not None: + return r + return r and self.factory.string2etree(r) + except etree.ParseError as e: + log.exception("Problem with post response %s", e) + return r + + def deletexml(self, url): + "Delete a resource" + url = self.c.prepare_url(url) + r = self.c.webreq (method='delete', url=url) + return r + + + def fetchblob(self, url, path=None, **params): + """ + Requests for a blob + + @param: url - filename of the blob + @param: path - a location on the file system were one wishes the response to be stored (default: None) + @param: params - params will be added to url query + + @return: contents or filename + """ + url = self.c.prepare_url(url, **params) + return self.c.fetch(url, path=path ) + + + def postblob(self, filename, xml=None, path=None, method="POST", **params): + """ + Create Multipart Post with blob to blob service + + @param filename: filename of the blob + @param xml: xml to be posted along with the file + @param params: params will be added to url query + @return: a uri="URI to BLOB" > + """ + + import_service = self.service ("import") + if xml!=None: + if not isinstance(xml, basestring): + xml = self.factory.to_string(xml) + response = import_service.transfer (filename=filename, xml=xml) + return response.content + + # import_service_url = self.service_url('import', path='transfer') + # if import_service_url is None: + # raise BQApiError('Could not find import service to post blob.') + # url = self.c.prepare_url(import_service_url, **params) + # if xml!=None: + # if not isinstance(xml, basestring): + # xml = self.factory.to_string(xml) + # fields = {} + # if filename is not None: + # filename = normalize_unicode(filename) + # fields['file'] = (filename, open(filename, 'rb'), 'application/octet-stream') + # if xml is not None: + # fields['file_resource'] = xml + # if fields: + # # https://github.com/requests/toolbelt/issues/75 + # m = MultipartEncoder(fields = fields ) + # m._read = m.read + # m.read = lambda size: m._read (8129*1024) # 8MB + # return self.c.push(url, + # content=m, + # headers={'Accept': 'text/xml', 'Content-Type':m.content_type}, + # path=path, method=method) + # raise BQApiError("improper parameters for postblob: must use paramater xml or filename or both ") + + + def service_url(self, service_type, path = "" , query=None): + """ + @param service_type: + @param path: + @param query: + + @return + """ + root = self.service_map.get(service_type, None) + if root is None: + raise BQApiError('Not a service type') + if query: + path = "%s?%s" % (path, urllib.parse.urlencode(query)) + return urllib.parse.urljoin(root, path) + + + def _load_services(self): + """ + @return + """ + services = self.load (posixpath.join(self.bisque_root , "services")) + smap = {} + for service in services.tags: + smap [service.type] = service.value + self.service_map = smap + + def service (self, service_name): + return ServiceFactory.make (self, service_name) + + + ############################# + # Classes and Type + ############################# + def element(self, ty, **attrib): + elem = etree.Element(ty, **attrib) + + + def append(self, mex, tags=[], gobjects=[], children=[]): + def append_mex (mex, type_tup): + type_, elems = type_tup + for tg in elems: + if isinstance(tg, dict): + tg = d2xml({ type_ : tg}) + elif isinstance(tg, BQNode): + tg = BQFactory.to_etree(tg) + elif isinstance(tg, etree._Element): + pass + else: + raise BQApiError('bad values in tag/gobject list %s' % tg) + mex.append(tg) + + append_mex(mex, ('tag', tags)) + append_mex(mex, ('gobject', gobjects)) + for elem in children: + append_mex(mex, elem) + + + ############################## + # Mex + ############################## + def update_mex(self, status, tags = [], gobjects = [], children=[], reload=False, merge=False): + """save an updated mex with the addition + + @param status: The current status of the mex + @param tags: list of etree.Element|BQTags|dict objects of form { 'name': 'x', 'value':'z' } + @param gobjects: same as etree.Element|BQGobject|dict objects of form { 'name': 'x', 'value':'z' } + @param children: list of tuple (type, obj array) i.e ('mex', dict.. ) + @param reload: + @param merge: merge "outputs"/"inputs" section if needed + @return + """ + if merge: + mex = self.fetchxml(self.mex.uri, view='deep') # get old version of MEX, so it can be merged if needed + mex.set('value', status) + else: + mex = etree.Element('mex', value = status, uri = self.mex.uri) + #self.mex.value = status + def append_mex (mex, type_tup): + type_, elems = type_tup + for tg in elems: + if isinstance(tg, dict): + tg = d2xml({ type_ : tg}) + elif isinstance(tg, BQNode): + tg = self.factory.to_etree(tg) + elif isinstance(tg, etree._Element): #pylint: disable=protected-access + pass + else: + raise BQApiError('bad values in tag/gobject list %s' % tg) + was_merged = False + if merge and tg.tag == 'tag' and tg.get('name', '') in ['inputs', 'outputs']: + hits = mex.xpath('./tag[@name="%s"]' % tg.get('name', '')) + if hits: + assert len(hits) == 1 + hits[0].extend(list(tg)) + was_merged = True + log.debug("merged '%s' section in MEX", tg.get('name', '')) + if not was_merged: + mex.append(tg) + + append_mex(mex, ('tag', tags)) + append_mex(mex, ('gobject', gobjects)) + for elem in children: + append_mex(mex, elem) + + #mex = { 'mex' : { 'uri' : self.mex.uri, + # 'status' : status, + # 'tag' : tags, + # 'gobject': gobjects }} + content = self.postxml(self.mex.uri, mex, view='deep' if reload else 'short') + if reload and content is not None: + self.mex = self.factory.from_string(content) + return self.mex + return None + + + def finish_mex(self, status="FINISHED", tags=[], gobjects=[], children=[], msg=None ): + """ + @param status: + @param tags: + @param gobject: + @param children: + @param msg: + + @return + """ + if msg is not None: + tags.append( { 'name':'message', 'value': msg }) + try: + return self.update_mex(status, tags=tags, gobjects=gobjects, children=children, reload=False, merge=True) + except BQCommError as ce: + log.error ("Problem during finish mex %s" % ce.response.request.headers) + try: + return self.update_mex( status='FAILED',tags= [ { 'name':'error_message', 'value': "Error during saving (status %s)" % ce.response.status_code } ] ) + except: + log.exception ("Cannot finish/fail Mex ") + + def fail_mex(self, msg): + """ + @param msg: + """ + if msg is not None: + tags = [ { 'name':'error_message', 'value': msg } ] + self.finish_mex( status='FAILED', tags=tags) + + def _begin_mex(self, moduleuri): + """create a mex on the server for this run""" + pass + + + + ############################## + # Module control + ############################## + def run_modules(self, module_list, pre_run=None, post_run=None, callback_fct=None): + """Run one or more modules in parallel. + + :param module_list: List of modules to run + :type module_list: [ { moduleuri: ..., inputs: { param1:val1, param2:val2, ...}, parent_mex: ... }, {...}, ... ] + :param pre_run: module entrypoint to call before run (or None if no prerun) + :type pre_run: str + :param post_run: module entrypoint to call after run (or None if no postrun) + :type post_run: str + :param callback_fct: function to call on completion (None: block until completion) + :type callback_fct: fct(mex_list=list(str)) + :returns: list of mex URIs, one for each module + :rtype: list(str) + """ + # TODO: create MEX according to params and POST it to module_service + pass + + ############################## + # Resources + ############################## + def query(self, resource_type, **kw): + """Query for a resource + tag_query=None, tag_order=None, offset=None, limit=None + """ + results = [] + queryurl = self.service_url ('data_service', path=resource_type, query=kw) + items = self.fetchxml (queryurl) + for item in items: + results.append (self.factory.from_etree(item)) + return results + + + def load(self, url, **params): + """Load a bisque object + + @param url: + @param params: + + @return + """ + #if view not in url: + # url = url + "?view=%s" % view + try: + xml = self.fetchxml(url, **params) + if xml.tag == "response": + xml = xml[0] + bqo = self.factory.from_etree(xml) + return bqo + except BQCommError as ce: + log.exception('communication issue while loading %s' % ce) + return None + + def delete(self, bqo, url=None, **kw): + "Delete an object and all children" + url = bqo.uri or url + if url is not None: + return self.deletexml(url) + + + def save(self, bqo, url=None, **kw): + """ + @param bqo: + @param url: + @param kw: + + @return + """ + try: + original = bqo + + # Find an object (or parent with a valild uri) + url = url or bqo.uri + if url is None: + while url is None and bqo.parent: + bqo = bqo.parent + url= bqo.parent.uri + if url is None: + url = self.service_url ('data_service') + + xml = self.factory.to_etree(bqo) + xml = self.postxml(url, xml, **kw) + return xml is not None and self.factory.from_etree(xml) + except BQCommError as ce: + log.exception('communication issue while saving %s' , ce) + return None + + def saveblob(self, bqo, filename): + """Save a blob to the server and return metadata structure + """ + + try: + xml = self.factory.to_etree(bqo) + xmlstr = self.postblob (filename=filename, xml= xml) + xmlet = self.factory.string2etree (xmlstr) + if xmlet.tag == 'resource' and xmlet.get ('type') == 'uploaded': + # return inside + bqo = self.factory.from_etree(xmlet[0]) + return bqo + return None + except BQCommError as ce: + log.exception('communication issue while saving %s' , filename) + return None diff --git a/nph_5class/bqapi/exception.py b/nph_5class/bqapi/exception.py new file mode 100644 index 0000000..1826ccc --- /dev/null +++ b/nph_5class/bqapi/exception.py @@ -0,0 +1,30 @@ +class BQException(Exception): + """ + BQException + """ + +class BQApiError(BQException): + """Exception in API usage""" + + + +class BQCommError(BQException): + + def __init__(self, response): + """ + @param: status - error code + @param: headers - dictionary of response headers + @param: content - body of the response (default: None) + + """ + #print 'Status: %s'%status + #print 'Headers: %s'%headers + self.response = response + + + def __str__(self): + content = "%s...%s" % (self.response.content[:64], self.response.content[-64:]) if len (self.response.content) > 64 else self.response.content + return "BQCommError(%s, status=%s, req headers=%s)%s" % (self.response.url, + self.response.status_code, + self.response.request.headers, + content ) diff --git a/nph_5class/bqapi/services.py b/nph_5class/bqapi/services.py new file mode 100644 index 0000000..c4a80d9 --- /dev/null +++ b/nph_5class/bqapi/services.py @@ -0,0 +1,347 @@ +import os +#import urllib +#import urlparse + +import random +import string +import logging +import tempfile +import json +import shutil + +from six.moves import urllib + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree + +try: + import tables +except ImportError: + logging.warn ("pytables services not available") + +from requests_toolbelt import MultipartEncoder +from .util import normalize_unicode +from .exception import BQCommError + + + +#DEFAULT_TIMEOUT=None +DEFAULT_TIMEOUT=60*60 # 1 hour + +#### +#### KGK +#### Still working on filling this out +#### would be cool to have service definition language to make these. +#### TODO more service, renders etc. + +class BaseServiceProxy(object): + + def __init__(self, session, service_name, timeout=DEFAULT_TIMEOUT): + self.session = session + self.service_url = session.service_map [service_name] + self.service_name = service_name + self.timeout = timeout + + def construct(self, path, params=None): + url = self.service_url + if params: + path = "%s?%s" % (path, urllib.parse.urlencode(params)) + if path: + url = urllib.parse.urljoin (url, path) + return url + + def request (self, path=None, params=None, method='get', render=None, **kw): + """ + @param path: a path on the service + @param params: a diction of value to encode as params + @return a reuqest.response + """ + if path and path[0] == "/": + path = path[1:] + if path: + path = urllib.parse.urljoin (self.service_url, path) + else: + path = self.service_url + + # no longer in session https://github.com/requests/requests/issues/3341 + timeout = kw.pop('timeout', self.timeout) + headers = kw.pop('headers', self.session.c.headers) + if render in ("xml", 'etree'): + headers.update ({'Content-Type':'text/xml', 'Accept': 'text/xml'}) + + try: + response = self.session.c.request (url=path, params=params, method=method, timeout=timeout, headers=headers, **kw) + if render in ("xml", 'etree'): + return etree.fromstring (response.content) + return response + except etree.ParseError: + #self.session.log.error ("xml parse error in %s", response.content) + raise BQCommError(response) + + def fetch(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, **kw) + def get(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, **kw) + def post(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, method='post', **kw) + def put(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, method='put', **kw) + def delete(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, method='delete', **kw) + + +class AdminProxy (BaseServiceProxy): + def login_as (self, user_name): + data = self.session.service ('data_service') + userxml = data.fetch ("user", params = { 'wpublic' :'1', 'resource_name': user_name}, render="xml") + user_uniq = userxml.find ("user").get ('resource_uniq') + self.fetch ('/user/{}/login'.format(user_uniq)) + + +class AuthProxy (BaseServiceProxy): + def login_providers (self, **kw): + return self.request ('login_providers', **kw) + + def credentials (self, **kw): + return self.request ('credentials', **kw) + + def get_session (self, **kw): # hides session + return self.request ('session', **kw) + +class BlobProxy (BaseServiceProxy): + def _resource_element (self, args_tag_file=None, args_resource_type=None, args_srcpath=None, **kw): + """Check the args and create a compatible resource element for posting or linking + """ + if args_tag_file: + # Load file into resource + try: + resource = etree.parse (args_tag_file).getroot() + except etree.ParseError as pe: + raise BQCommError('Parse failure: aborting: ') + else: + resource = etree.Element (args_resource_type or 'resource') + + for fld in ('permission', 'hidden'): + if fld in kw: + resource.set (fld, kw.get(fld)) + if args_srcpath: + resource.set('value', args_srcpath) + resource.set('name', os.path.basename (args_srcpath)) + return resource + + def path_link(self, srcpath, alias=None, resource_type=None, tag_file=None): + url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/insert' ) + params = {} + resource = self._resource_element(args_srcpath=srcpath, args_resource_type=resource_type, args_tag_file=tag_file) + payload = etree.tostring (resource) + if alias: + params['user'] = alias + r = self.post(url, data=payload, params=params, headers={'content-type': 'application/xml'}) + return r + + def path_delete(self, srcpath, alias=None): + url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/remove' ) + params = {'path': srcpath} + if alias: + params['user'] = alias + r = self.get(url, params=params) + return r + + def path_rename(self, srcpath, dstpath, alias=None): + url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/move' ) + params = {'path': srcpath, 'destination': dstpath} + if alias: + params['user'] = alias + r = self.get(url, params=params) + return r + + def path_list(self, srcpath, alias=None): + url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/list' ) + params = { 'path' : srcpath } + if alias: + params['user'] = alias + r = self.get(url, params=params) + return r + +def id_generator(size=6, chars=string.ascii_uppercase + string.digits): + return ''.join(random.choice(chars) for _ in range(size)) + +class ImportProxy(BaseServiceProxy): + def transfer (self, filename, fileobj=None, xml=None): + fields = {} + if fileobj is None and filename is None: + raise BQCommError('Filename or fileobj are required for transfer') + if fileobj is None and os.path.exists (filename): + fileobj = open (filename, 'rb') + if fileobj is not None and filename is None: + filename = fileobj.name + + if fileobj is not None: + filename = normalize_unicode(filename) + fields['file'] = (os.path.basename(filename), fileobj, 'application/octet-stream') + if xml is not None: + fields['file_resource'] = xml + if fields: + # https://github.com/requests/toolbelt/issues/75 + m = MultipartEncoder(fields = fields ) + m._read = m.read #pylint: disable=protected-access + m.read = lambda size: m._read (8129*1024) # 8MB + # ID generator is used to force load balancing operations + response = self.post("transfer_"+id_generator(), + data=m, + headers={'Accept': 'text/xml', 'Content-Type':m.content_type}) + return response + +class DatasetProxy (BaseServiceProxy): + + def delete (self, dataset_uniq, members=False, **kw): + if members: + params = kw.pop('params', {}) + params['duri'] = dataset_uniq + return self.fetch("delete", params=params, **kw) + data = self.session.service('data_service') + return data.delete (dataset_uniq) + + def append_member (self, dataset_uniq, resource_uniq, **kw): + """Append an element + """ + data = self.session.service('data_service') + member = etree.Element('value', type='object') + member.text = data.contruct (resource_uniq) + self.post (dataset_uniq, data=etree.tostring(member), render='etree') + + def delete_member (self, dataset_uniq, resource_uniq, **kw): + """Delete a member.. + @return new dataset if success or None + """ + data = self.session.service('data_service') + dataset = data.fetch ( dataset_uniq, params = {'view':'full'}, render='etree') + members = dataset.xpath ('value[text()="%s"]' % data.construct (resource_uniq)) + for member in members: + dataset.remove (member) + if len (members): + for val in dataset.iter ('value'): + _ = val.attrib.pop ('index', 0) + return data.put (dataset_uniq, data = etree.tostring (dataset), render='etree') + return None + + +class ModuleProxy (BaseServiceProxy): + def execute (self, module_name, **module_parms): + pass + def register(self, engine_url): + return self.request (path='register_engine', params = { 'engine_url':engine_url }) + def unregister (self, engine_url): + return self.request (path='unregister_engine', params = { 'engine_url':engine_url }) + + + + +class TableProxy (BaseServiceProxy): + def load_array(self, table_uniq, path, slices=[]): + """ + Load array from BisQue. + """ + if table_uniq.startswith('http'): + table_uniq = table_uniq.split('/')[-1] + slice_list = [] + for single_slice in slices: + if isinstance(single_slice, slice): + slice_list.append("%s;%s" % (single_slice.start or '', '' if single_slice.stop is None else single_slice.stop-1)) + elif isinstance(single_slice, int): + slice_list.append("%s;%s" % (single_slice, single_slice)) + else: + raise BQCommError("malformed slice parameter") + path = '/'.join([table_uniq.strip('/'), path.strip('/')]) + info_url = '/'.join([path, 'info', 'format:json']) + response = self.get(info_url) + try: + num_dims = len(json.loads(response.content).get('sizes')) + except ValueError: + raise BQCommError('array could not be read') + # fill slices with missing dims + for _ in range(num_dims-len(slice_list)): + slice_list.append(';') + data_url = '/'.join([path, ','.join(slice_list), 'format:hdf']) + response = self.get(data_url) + # convert HDF5 to Numpy array (preserve indices??) + with tables.open_file('array.h5', driver="H5FD_CORE", driver_core_image=response.content, driver_core_backing_store=0) as h5file: + return h5file.root.array.read() + + def store_array(self, array, name): + """ + Store numpy array in BisQue and return resource doc. + """ + try: + dirpath = tempfile.mkdtemp() + # (1) store array as HDF5 file + out_file = os.path.join(dirpath, "%s.h5" % name) # importer needs extension .h5 + with tables.open_file(out_file, "w", filters = tables.Filters(complevel=5)) as h5file: # compression level 5 + h5file.create_array(h5file.root, name, array) + # (2) call bisque importer with file + importer = self.session.service('import') + response = importer.transfer(out_file) + # (3) return resource xml + res = etree.fromstring (response.content) + if res.tag != 'resource' or res.get('type') != 'uploaded': + raise BQCommError('array could not be stored') + else: + return res[0] + finally: + if os.path.isfile(out_file): + os.remove(out_file) + os.rmdir(dirpath) + + +class ImageProxy(BaseServiceProxy): + def get_thumbnail (self, image_uniq, **kw): + url = urllib.parse.urljoin( self.session.service_map['image_service'], image_uniq, 'thumbnail' ) + r = self.get(url) + return r + +class ExportProxy(BaseServiceProxy): + valid_param = set (['files', 'datasets', 'dirs', 'urls', 'users']) + def fetch_export(self, **kw): + params = { key:val for key,val in list(kw.items()) if key in self.valid_param and val is not None } + response = self.fetch ('stream', params = params, stream=kw.pop ('stream', True) ) + return response + def fetch_export_local(self, localpath, stream=True, **kw): + response = self.fetch_export (stream=stream, **kw ) + with open(localpath, 'wb') as f: + shutil.copyfileobj(response.raw, f) + return localpath + +SERVICE_PROXIES = { + 'admin' : AdminProxy, + 'auth_service' : AuthProxy, + 'import' : ImportProxy, + 'blob_service': BlobProxy, + 'dataset_service': DatasetProxy, + 'table': TableProxy, + 'image_service' : ImageProxy, + 'export' : ExportProxy, +} + +class ServiceFactory (object): + @classmethod + def make (cls, session, service_name): + svc = SERVICE_PROXIES.get (service_name, BaseServiceProxy) + if service_name in session.service_map: + return svc (session, service_name ) + return None + + +def test_module(): + from bqapi import BQSession + session = BQSession ().init_local ('admin', 'admin', 'http://localhost:8080') + admin = session.service('admin') + data = session.service('data_service') + #admin.user(uniq).login().fetch () + xml = data.get ("user", params = {'wpublic':'1', 'resource_name' : 'admin'}, render='xml') + user_uniq = xml.find ("user").get ('resource_uniq') + admin.fetch ('/user/{}/login'.format( user_uniq)) + +if __name__ == "__main__": + test_module() diff --git a/nph_5class/bqapi/services.py.bak b/nph_5class/bqapi/services.py.bak new file mode 100644 index 0000000..3ed2d1e --- /dev/null +++ b/nph_5class/bqapi/services.py.bak @@ -0,0 +1,347 @@ +import os +#import urllib +#import urlparse + +import random +import string +import logging +import tempfile +import json +import shutil + +from six.moves import urllib + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree + +try: + import tables +except ImportError: + logging.warn ("pytables services not available") + +from requests_toolbelt import MultipartEncoder +from .util import normalize_unicode +from .exception import BQCommError + + + +#DEFAULT_TIMEOUT=None +DEFAULT_TIMEOUT=60*60 # 1 hour + +#### +#### KGK +#### Still working on filling this out +#### would be cool to have service definition language to make these. +#### TODO more service, renders etc. + +class BaseServiceProxy(object): + + def __init__(self, session, service_name, timeout=DEFAULT_TIMEOUT): + self.session = session + self.service_url = session.service_map [service_name] + self.service_name = service_name + self.timeout = timeout + + def construct(self, path, params=None): + url = self.service_url + if params: + path = "%s?%s" % (path, urllib.parse.urlencode(params)) + if path: + url = urllib.parse.urljoin (url, path) + return url + + def request (self, path=None, params=None, method='get', render=None, **kw): + """ + @param path: a path on the service + @param params: a diction of value to encode as params + @return a reuqest.response + """ + if path and path[0] == "/": + path = path[1:] + if path: + path = urllib.parse.urljoin (self.service_url, path) + else: + path = self.service_url + + # no longer in session https://github.com/requests/requests/issues/3341 + timeout = kw.pop('timeout', self.timeout) + headers = kw.pop('headers', self.session.c.headers) + if render in ("xml", 'etree'): + headers.update ({'Content-Type':'text/xml', 'Accept': 'text/xml'}) + + try: + response = self.session.c.request (url=path, params=params, method=method, timeout=timeout, headers=headers, **kw) + if render in ("xml", 'etree'): + return etree.fromstring (response.content) + return response + except etree.ParseError: + #self.session.log.error ("xml parse error in %s", response.content) + raise BQCommError(response) + + def fetch(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, **kw) + def get(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, **kw) + def post(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, method='post', **kw) + def put(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, method='put', **kw) + def delete(self, path=None, params=None, render=None, **kw): + return self.request(path=path, params=params, render=render, method='delete', **kw) + + +class AdminProxy (BaseServiceProxy): + def login_as (self, user_name): + data = self.session.service ('data_service') + userxml = data.fetch ("user", params = { 'wpublic' :'1', 'resource_name': user_name}, render="xml") + user_uniq = userxml.find ("user").get ('resource_uniq') + self.fetch ('/user/{}/login'.format(user_uniq)) + + +class AuthProxy (BaseServiceProxy): + def login_providers (self, **kw): + return self.request ('login_providers', **kw) + + def credentials (self, **kw): + return self.request ('credentials', **kw) + + def get_session (self, **kw): # hides session + return self.request ('session', **kw) + +class BlobProxy (BaseServiceProxy): + def _resource_element (self, args_tag_file=None, args_resource_type=None, args_srcpath=None, **kw): + """Check the args and create a compatible resource element for posting or linking + """ + if args_tag_file: + # Load file into resource + try: + resource = etree.parse (args_tag_file).getroot() + except etree.ParseError as pe: + raise BQCommError('Parse failure: aborting: ') + else: + resource = etree.Element (args_resource_type or 'resource') + + for fld in ('permission', 'hidden'): + if fld in kw: + resource.set (fld, kw.get(fld)) + if args_srcpath: + resource.set('value', args_srcpath) + resource.set('name', os.path.basename (args_srcpath)) + return resource + + def path_link(self, srcpath, alias=None, resource_type=None, tag_file=None): + url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/insert' ) + params = {} + resource = self._resource_element(args_srcpath=srcpath, args_resource_type=resource_type, args_tag_file=tag_file) + payload = etree.tostring (resource) + if alias: + params['user'] = alias + r = self.post(url, data=payload, params=params, headers={'content-type': 'application/xml'}) + return r + + def path_delete(self, srcpath, alias=None): + url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/remove' ) + params = {'path': srcpath} + if alias: + params['user'] = alias + r = self.get(url, params=params) + return r + + def path_rename(self, srcpath, dstpath, alias=None): + url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/move' ) + params = {'path': srcpath, 'destination': dstpath} + if alias: + params['user'] = alias + r = self.get(url, params=params) + return r + + def path_list(self, srcpath, alias=None): + url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/list' ) + params = { 'path' : srcpath } + if alias: + params['user'] = alias + r = self.get(url, params=params) + return r + +def id_generator(size=6, chars=string.ascii_uppercase + string.digits): + return ''.join(random.choice(chars) for _ in range(size)) + +class ImportProxy(BaseServiceProxy): + def transfer (self, filename, fileobj=None, xml=None): + fields = {} + if fileobj is None and filename is None: + raise BQCommError('Filename or fileobj are required for transfer') + if fileobj is None and os.path.exists (filename): + fileobj = open (filename, 'rb') + if fileobj is not None and filename is None: + filename = fileobj.name + + if fileobj is not None: + filename = normalize_unicode(filename) + fields['file'] = (os.path.basename(filename), fileobj, 'application/octet-stream') + if xml is not None: + fields['file_resource'] = xml + if fields: + # https://github.com/requests/toolbelt/issues/75 + m = MultipartEncoder(fields = fields ) + m._read = m.read #pylint: disable=protected-access + m.read = lambda size: m._read (8129*1024) # 8MB + # ID generator is used to force load balancing operations + response = self.post("transfer_"+id_generator(), + data=m, + headers={'Accept': 'text/xml', 'Content-Type':m.content_type}) + return response + +class DatasetProxy (BaseServiceProxy): + + def delete (self, dataset_uniq, members=False, **kw): + if members: + params = kw.pop('params', {}) + params['duri'] = dataset_uniq + return self.fetch("delete", params=params, **kw) + data = self.session.service('data_service') + return data.delete (dataset_uniq) + + def append_member (self, dataset_uniq, resource_uniq, **kw): + """Append an element + """ + data = self.session.service('data_service') + member = etree.Element('value', type='object') + member.text = data.contruct (resource_uniq) + self.post (dataset_uniq, data=etree.tostring(member), render='etree') + + def delete_member (self, dataset_uniq, resource_uniq, **kw): + """Delete a member.. + @return new dataset if success or None + """ + data = self.session.service('data_service') + dataset = data.fetch ( dataset_uniq, params = {'view':'full'}, render='etree') + members = dataset.xpath ('value[text()="%s"]' % data.construct (resource_uniq)) + for member in members: + dataset.remove (member) + if len (members): + for val in dataset.iter ('value'): + _ = val.attrib.pop ('index', 0) + return data.put (dataset_uniq, data = etree.tostring (dataset), render='etree') + return None + + +class ModuleProxy (BaseServiceProxy): + def execute (self, module_name, **module_parms): + pass + def register(self, engine_url): + return self.request (path='register_engine', params = { 'engine_url':engine_url }) + def unregister (self, engine_url): + return self.request (path='unregister_engine', params = { 'engine_url':engine_url }) + + + + +class TableProxy (BaseServiceProxy): + def load_array(self, table_uniq, path, slices=[]): + """ + Load array from BisQue. + """ + if table_uniq.startswith('http'): + table_uniq = table_uniq.split('/')[-1] + slice_list = [] + for single_slice in slices: + if isinstance(single_slice, slice): + slice_list.append("%s;%s" % (single_slice.start or '', '' if single_slice.stop is None else single_slice.stop-1)) + elif isinstance(single_slice, int): + slice_list.append("%s;%s" % (single_slice, single_slice)) + else: + raise BQCommError("malformed slice parameter") + path = '/'.join([table_uniq.strip('/'), path.strip('/')]) + info_url = '/'.join([path, 'info', 'format:json']) + response = self.get(info_url) + try: + num_dims = len(json.loads(response.content).get('sizes')) + except ValueError: + raise BQCommError('array could not be read') + # fill slices with missing dims + for _ in range(num_dims-len(slice_list)): + slice_list.append(';') + data_url = '/'.join([path, ','.join(slice_list), 'format:hdf']) + response = self.get(data_url) + # convert HDF5 to Numpy array (preserve indices??) + with tables.open_file('array.h5', driver="H5FD_CORE", driver_core_image=response.content, driver_core_backing_store=0) as h5file: + return h5file.root.array.read() + + def store_array(self, array, name): + """ + Store numpy array in BisQue and return resource doc. + """ + try: + dirpath = tempfile.mkdtemp() + # (1) store array as HDF5 file + out_file = os.path.join(dirpath, "%s.h5" % name) # importer needs extension .h5 + with tables.open_file(out_file, "w", filters = tables.Filters(complevel=5)) as h5file: # compression level 5 + h5file.create_array(h5file.root, name, array) + # (2) call bisque importer with file + importer = self.session.service('import') + response = importer.transfer(out_file) + # (3) return resource xml + res = etree.fromstring (response.content) + if res.tag != 'resource' or res.get('type') != 'uploaded': + raise BQCommError('array could not be stored') + else: + return res[0] + finally: + if os.path.isfile(out_file): + os.remove(out_file) + os.rmdir(dirpath) + + +class ImageProxy(BaseServiceProxy): + def get_thumbnail (self, image_uniq, **kw): + url = urllib.parse.urljoin( self.session.service_map['image_service'], image_uniq, 'thumbnail' ) + r = self.get(url) + return r + +class ExportProxy(BaseServiceProxy): + valid_param = set (['files', 'datasets', 'dirs', 'urls', 'users']) + def fetch_export(self, **kw): + params = { key:val for key,val in kw.items() if key in self.valid_param and val is not None } + response = self.fetch ('stream', params = params, stream=kw.pop ('stream', True) ) + return response + def fetch_export_local(self, localpath, stream=True, **kw): + response = self.fetch_export (stream=stream, **kw ) + with open(localpath, 'wb') as f: + shutil.copyfileobj(response.raw, f) + return localpath + +SERVICE_PROXIES = { + 'admin' : AdminProxy, + 'auth_service' : AuthProxy, + 'import' : ImportProxy, + 'blob_service': BlobProxy, + 'dataset_service': DatasetProxy, + 'table': TableProxy, + 'image_service' : ImageProxy, + 'export' : ExportProxy, +} + +class ServiceFactory (object): + @classmethod + def make (cls, session, service_name): + svc = SERVICE_PROXIES.get (service_name, BaseServiceProxy) + if service_name in session.service_map: + return svc (session, service_name ) + return None + + +def test_module(): + from bqapi import BQSession + session = BQSession ().init_local ('admin', 'admin', 'http://localhost:8080') + admin = session.service('admin') + data = session.service('data_service') + #admin.user(uniq).login().fetch () + xml = data.get ("user", params = {'wpublic':'1', 'resource_name' : 'admin'}, render='xml') + user_uniq = xml.find ("user").get ('resource_uniq') + admin.fetch ('/user/{}/login'.format( user_uniq)) + +if __name__ == "__main__": + test_module() diff --git a/nph_5class/bqapi/tests/__init__.py b/nph_5class/bqapi/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/nph_5class/bqapi/tests/conftest.py b/nph_5class/bqapi/tests/conftest.py new file mode 100644 index 0000000..e2ee066 --- /dev/null +++ b/nph_5class/bqapi/tests/conftest.py @@ -0,0 +1,30 @@ +## +## Add local fixtures here +import pytest +from collections import OrderedDict, namedtuple + +from bq.util.bunch import Bunch +from bq.util.mkdir import _mkdir +from .util import fetch_file +from bqapi import BQServer + +@pytest.fixture(scope="module") +def server(): + return BQServer() + + +LocalFile = namedtuple('LocalFile', ['name', 'location']) + +@pytest.fixture(scope="module") +def stores(config): + samples = config.store.samples_url + inputs = config.store.input_dir + results = config.store.results_dir + _mkdir(results) + + files = [] + for name in [ x.strip() for x in config.store.files.split() ]: + print "Fetching", name + files.append (LocalFile (name, fetch_file(name, samples, inputs))) + + return Bunch(samples=samples, inputs=inputs, results=results, files=files) diff --git a/nph_5class/bqapi/tests/setup.cfg.sample b/nph_5class/bqapi/tests/setup.cfg.sample new file mode 100644 index 0000000..5fb4ed5 --- /dev/null +++ b/nph_5class/bqapi/tests/setup.cfg.sample @@ -0,0 +1,14 @@ +[Host] +root: http://localhost:8080 +user: test +password: test + +[Store] +location: http://hammer.ece.ucsb.edu/~bisque/test_data/images/ +local_location: SampleData +results_location: Results + +#simple rgb image +filename1: flowers_24bit_nointr.png + +[nosetests] diff --git a/nph_5class/bqapi/tests/test_bqapi.py b/nph_5class/bqapi/tests/test_bqapi.py new file mode 100644 index 0000000..bdc494d --- /dev/null +++ b/nph_5class/bqapi/tests/test_bqapi.py @@ -0,0 +1,33 @@ +import pytest + +from lxml import etree +from bqapi import BQSession +from bqapi.bqclass import BQFactory +from tg import config + + +pytestmark = pytest.mark.functional + + + +def test_load (session): + 'Check that loading works' + + #host = config.get ('host.root') + #user = config.get ('host.user') + #passwd = config.get ('host.password') + #bq = BQSession() + #bq.init_local (user, passwd, bisque_root = host, create_mex = False) + x = session.load ('/data_service/image/?limit=10') + print "loading /data_service/images->", BQFactory.to_string((x)) + + +def test_load_pixels(session): + 'check that you can load pixels from an image' + #bq = BQSession() + x = session.load ('/data_service/image/?limit=10') + + if len(x.kids): + i0 = x.kids[0] + pixels = i0.pixels().slice(z=1,t=1).fetch() + print len(pixels) diff --git a/nph_5class/bqapi/tests/test_bqclass.py b/nph_5class/bqapi/tests/test_bqclass.py new file mode 100644 index 0000000..893077d --- /dev/null +++ b/nph_5class/bqapi/tests/test_bqclass.py @@ -0,0 +1,34 @@ +import pytest + +from lxml import etree +from bqapi.bqclass import BQFactory + +pytestmark = pytest.mark.unit + + +X=""" + + + + + + +""" + + + +def test_conversion(): + 'test simple xml conversions' + print "ORIGINAL" + print X + + factory = BQFactory(None) + + r = factory.from_string(X) + print "PARSED" + + x = factory.to_string (r) + + print "XML" + print r + assert x == X.translate(None, '\r\n') diff --git a/nph_5class/bqapi/tests/test_bqfeature.py b/nph_5class/bqapi/tests/test_bqfeature.py new file mode 100644 index 0000000..4a03d8f --- /dev/null +++ b/nph_5class/bqapi/tests/test_bqfeature.py @@ -0,0 +1,198 @@ +import os +import numpy as np +import urllib +from util import fetch_file +from lxml import etree +import ConfigParser +from datetime import datetime + + +from collections import OrderedDict, namedtuple +import pytest +import nose +from nose import with_setup + +from bq.util.mkdir import _mkdir +from bqapi import BQSession, BQServer +from bqapi.util import fetch_dataset +from bqapi.comm import BQCommError +from bqapi.util import * +from bqapi.bqfeature import * + + + +TEST_PATH = 'tests_%s'%urllib.quote(datetime.now().strftime('%Y%m%d%H%M%S%f')) #set a test dir on the system so not too many repeats occur + +pytestmark = pytest.mark.skip("Unported tests") +#pytestmark = pytest.mark.functional + +#setup comm test +def setUp(): + global results_location + global store_local_location + global file1_location + global filename1 + global bqsession + global FeatureResource + + config = ConfigParser.ConfigParser() + config.read('setup.cfg') + root = config.get('Host', 'root') or 'localhost:8080' + user = config.get('Host', 'user') or 'test' + pwd = config.get('Host', 'password') or 'test' + results_location = config.get('Store', 'results_location') or 'Results' + _mkdir(results_location) + + store_location = config.get('Store', 'location') or None + if store_location is None: raise NameError('Requre a store location to run test properly') + + store_local_location = config.get('Store', 'local_location') or 'SampleData' + filename1 = config.get('Store','filename1') or None + if filename1 is None: raise NameError('Requre an image to run test properly') + file1_location = fetch_file(filename1, store_location, store_local_location) + + FeatureResource = namedtuple('FeatureResource',['image','mask','gobject']) + FeatureResource.__new__.__defaults__ = (None, None, None) + #start session + bqsession = BQSession().init_local(user, pwd, bisque_root=root, create_mex=False) + +def setup_bqfeature_fetch(): + """ + uploads an image + """ + global resource_list + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(file1_location, xml=resource) + uniq = etree.XML(content)[0].attrib['resource_uniq'] + image_uri = '%s/image_service/image/%s'%(bqsession.bisque_root,uniq) + resource_list = [FeatureResource(image=image_uri)] + + +def teardown_bqfeature_fetch(): + pass + + +@with_setup(setup_bqfeature_fetch, teardown_bqfeature_fetch) +def test_bqfeature_fetch_1(): + """ + Test feature fetch and returning hdf5 file + """ + filename = 'bqfeature_fetch_1.h5' + path = os.path.join(results_location, filename) + filename = Feature().fetch(bqsession, 'SimpleTestFeature', resource_list, path=path) + + +@with_setup(setup_bqfeature_fetch, teardown_bqfeature_fetch) +def test_bqfeature_fetch_2(): + """ + Test feature fetch and returning pytables object + """ + hdf5 = Feature().fetch(bqsession, 'SimpleTestFeature', resource_list) + hdf5.close() + os.remove(hdf5.filename) + +def setup_bqfeature_fetchvector(): + """ + uploads an image + """ + global resource_list + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(file1_location, xml=resource) + uniq = etree.XML(content)[0].attrib['resource_uniq'] + image_uri = '%s/image_service/image/%s'%(bqsession.bisque_root,uniq) + resource_list = [FeatureResource(image=image_uri)] + + +def teardown_bqfeature_fetchvector(): + pass + + +def test_bqfeature_fetchvector_1(): + """ + Test fetch vector + """ + feature_vector = Feature().fetch_vector(bqsession, 'SimpleTestFeature', resource_list) + +def test_bqfeature_fetchvector_error(): + """ + Test fetch vector on a resource that doesnt exist + """ + try: + resource_list = [FeatureResource(image='%s/image_service/image/notaresource' % bqsession.bisque_root)] + feature_vector = Feature().fetch_vector(bqsession, 'SimpleTestFeature', resource_list) + except FeatureError: + assert True + else: + assert False + + +def setup_bqparallelfeature_fetch(): + """ + uploads a list of images + """ + global resource_list + resource_list = [] + for _ in xrange(10): + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(file1_location, xml=resource) + uniq = etree.XML(content)[0].attrib['resource_uniq'] + resource_list.append(FeatureResource(image='%s/image_service/image/%s'%(bqsession.bisque_root,uniq))) + + +def teardown_bqparallelfeature_fetch(): + """ + """ + pass + + +@with_setup(setup_bqparallelfeature_fetch, teardown_bqparallelfeature_fetch) +def test_bqparallelfeature_fetch_1(): + """ + Test parallel feature fetch vector and returning pytables object + """ + PF=ParallelFeature() + hdf5 = PF.fetch(bqsession, 'SimpleTestFeature', resource_list) + hdf5.close() + os.remove(hdf5.filename) + +@with_setup(setup_bqparallelfeature_fetch, teardown_bqparallelfeature_fetch) +def test_bqparallelfeature_fetch_2(): + """ + Test parallel feature fetch vector and return a file + """ + filename = 'bqparallelfeature_fetch_2.h5' + path = os.path.join(results_location, filename) + PF=ParallelFeature() + PF.set_thread_num(2) + PF.set_chunk_size(5) + filename = PF.fetch(bqsession, 'SimpleTestFeature', resource_list, path=path) + + +def setup_bqparallelfeature_fetchvector(): + """ + Uploads a list of images + """ + global resource_list + resource_list = [] + for _ in xrange(10): + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(file1_location, xml=resource) + uniq = etree.XML(content)[0].attrib['resource_uniq'] + resource_list.append(FeatureResource(image='%s/image_service/image/%s'%(bqsession.bisque_root,uniq))) + + +def teardown_bqparallelfeature_fetchvector(): + """ + """ + pass + + +@with_setup(setup_bqparallelfeature_fetchvector, teardown_bqparallelfeature_fetchvector) +def test_bqparallelfeature_fetchvector_1(): + """ + Test parallel feature fetch vector + """ + PF=ParallelFeature() + PF.set_thread_num(2) + PF.set_chunk_size(5) + feature_vectors = PF.fetch_vector(bqsession, 'SimpleTestFeature', resource_list) diff --git a/nph_5class/bqapi/tests/test_comm.py b/nph_5class/bqapi/tests/test_comm.py new file mode 100644 index 0000000..03f4460 --- /dev/null +++ b/nph_5class/bqapi/tests/test_comm.py @@ -0,0 +1,232 @@ +import pytest + +from collections import OrderedDict, namedtuple +import os +from lxml import etree +import urllib +from datetime import datetime +import time + +from bqapi import BQSession + +TEST_PATH = 'tests_%s'%urllib.quote(datetime.now().strftime('%Y%m%d%H%M%S%f')) #set a test dir on the system so not too many repeats occur + +# default mark is function.. may be overridden +pytestmark = pytest.mark.functional + +############################# +### BQServer +############################# +@pytest.mark.unit +def test_prepare_url_1(server): + """ + """ + check_url = 'http://bisque.ece.ucsb.edu/image/00-123456789?remap=gray&format=tiff' + url = 'http://bisque.ece.ucsb.edu/image/00-123456789' + odict = OrderedDict([('remap','gray'),('format','tiff')]) + url = server.prepare_url(url, odict=odict) + assert url == check_url + +@pytest.mark.unit +def test_prepare_url_2(server): + """ + """ + check_url = 'http://bisque.ece.ucsb.edu/image/00-123456789?remap=gray&format=tiff' + url = 'http://bisque.ece.ucsb.edu/image/00-123456789' + url = server.prepare_url(url, remap='gray', format='tiff') + assert url == check_url + +@pytest.mark.unit +def test_prepare_url_3(server): + """ + """ + check_url = 'http://bisque.ece.ucsb.edu/image/00-123456789?format=tiff&remap=gray' + url = 'http://bisque.ece.ucsb.edu/image/00-123456789' + odict = OrderedDict([('remap','gray')]) + url = server.prepare_url(url, odict=odict, format='tiff') + assert url == check_url + + + +#Test BQSession +def test_open_session(config): + """ + Test Initalizing a BQSession locally + """ + host = config.get ('host.root') + user = config.get ('host.user') + pwd = config.get ('host.password') + + bqsession = BQSession().init_local(user, pwd, bisque_root=host, create_mex=False) + bqsession.close() + + +def test_initalize_mex_locally(config): + """ + Test initalizing a mex locally + """ + host = config.get ('host.root') + user = config.get ('host.user') + pwd = config.get ('host.password') + bqsession = BQSession().init_local(user, pwd, bisque_root=host, create_mex=True) + assert bqsession.mex.uri + bqsession.close() + + +def test_initalize_session_From_mex(config): + """ + Test initalizing a session from a mex + """ + host = config.get ('host.root') + user = config.get ('host.user') + pwd = config.get ('host.password') + bqsession = BQSession().init_local(user, pwd, bisque_root=host) + mex_url = bqsession.mex.uri + token = bqsession.mex.resource_uniq + bqmex = BQSession().init_mex(mex_url, token, user, bisque_root=host) + bqmex.close() + bqsession.close() + + +def test_fetchxml_1(session): + """ + Test fetch xml + """ + user = session.config.get ('host.user') + #bqsession = BQSession().init_local(user, pwd, bisque_root=root) + response_xml = session.fetchxml('/data_service/'+user) #fetches the user + session.close() + if not isinstance(response_xml, etree._Element): + assert False , 'Did not return XML!' + +def test_fetchxml_2(session, stores): + """ + Test fetch xml and save the document to disk + """ + user = session.config.get ('host.user') + filename = 'fetchxml_test_2.xml' + path = os.path.join(stores.results,filename) + path = session.fetchxml('/data_service/'+user, path=path) #fetches the user + + try: + with open(path,'r') as f: + etree.XML(f.read()) #check if xml was returned + + except etree.Error: + assert False , 'Did not return XML!' + + +def test_postxml_1(session): + """ + Test post xml + """ + + test_document =""" + + + + """ + response_xml = session.postxml('/data_service/file', xml=test_document) + if not isinstance(response_xml, etree._Element): + assert False ,'Did not return XML!' + + +def test_postxml_2(session, stores): + """ + Test post xml and save the document to disk + """ + + test_document =""" + + + + """ + filename = 'postxml_test_2.xml' + path = os.path.join(stores.results,filename) + + path = session.postxml('/data_service/file', test_document, path=path) + + try: + with open(path,'r') as f: + etree.XML(f.read()) #check if xml was returned + + except etree.Error: + assert False ,'Did not return XML!' + + +def test_postxml_3(session): + """ + Test post xml and read immediately + """ + + test_document =""" + + + + """ + response0_xml = session.postxml('/data_service/file', xml=test_document) + uri0 = response0_xml.get ('uri') + response1_xml = session.fetchxml(uri0) + uri1 = response0_xml.get ('uri') + session.deletexml (url = uri0) + if not isinstance(response0_xml, etree._Element): + assert False , 'Did not return XML!' + + assert uri0 == uri1, "Posted and Fetched uri do not match" + + + + +def test_fetchblob_1(): + """ + + """ + pass + + +def test_postblob_1(session, stores): + """ Test post blob """ + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, stores.files[0].name)) + content = session.postblob(stores.files[0].location, xml=resource) + assert len(content), "No content returned" + + +def test_postblob_2(session, stores): + """ Test post blob and save the returned document to disk """ + filename = 'postblob_test_2.xml' + path = os.path.join(stores.results,filename) + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, stores.files[0].name)) + path = session.postblob(stores.files[0].location, xml=resource, path=path) + + try: + with open(path,'r') as f: + etree.XML(f.read()) #check if xml was returned + + except etree.Error: + assert False , 'Did not return XML!' + +def test_postblob_3(session, stores): + """ + Test post blob with xml attached + """ + + test_document = """ + + + + """%u'%s/%s'%(TEST_PATH, stores.files[0].name) + content = session.postblob(stores.files[0].location, xml=test_document) + + +def test_run_mex(mexsession): + """ + Test run mex + """ + session = mexsession + mex_uri = session.mex.uri + session.update_mex(status="IN PROGRESS", tags = [], gobjects = [], children=[], reload=False) + response_xml = session.fetchxml(mex_uri) #check xml + session.finish_mex() + + response_xml = session.fetchxml(mex_uri) #check xml + assert mex_uri == response_xml.get ('uri') diff --git a/nph_5class/bqapi/tests/test_util.py b/nph_5class/bqapi/tests/test_util.py new file mode 100644 index 0000000..b798a25 --- /dev/null +++ b/nph_5class/bqapi/tests/test_util.py @@ -0,0 +1,294 @@ +import pytest +import os +import numpy as np +#import urllib +from six.moves import urllib +from datetime import datetime + + + +from bqapi import BQSession, BQServer +from bqapi.util import fetch_dataset +from bq.util.mkdir import _mkdir +from .util import fetch_file +from bqapi.comm import BQCommError +from bqapi.util import * +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree + +TEST_PATH = 'tests_%s'%urllib.parse.quote(datetime.now().strftime('%Y%m%d%H%M%S%f')) #set a test dir on the system so not too many repeats occur + +pytestmark = pytest.mark.skip("Unported tests") + + +@pytest.fixture(scope='module') +def image_uri(session, stores): + """ + uploads an image + """ + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, stores.files[0].name)) + content = bqsession.postblob(store.files[0].location, xml=resource) + return etree.XML(content)[0].attrib['uri'] + + +def setup_fetchimageplanes(): + """ + uploads an image + """ + global image_uri + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(stores.files[0].location, xml=resource) + image_uri = etree.XML(content)[0].attrib['uri'] + + +def teardown_fetchimageplanes(): + pass + + + +def setup_fetchimagepixels(): + """ + uploads an image + """ + global image_uri + resource = etree.Element('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(stores.files[0].location, xml=resource) + image_uri = etree.XML(content)[0].attrib['uri'] + +def teardown_fetchimagepixels(): + pass + + +def setup_fetchdataset(): + """ + uploads an dataset + """ + global dataset_uri + dataset = etree.Element('dataset', name='test') + for _ in xrange(4): + resource = etree.Element('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(stores.files[0].location, xml=resource) + value=etree.SubElement(dataset,'value', type="object") + value.text = etree.XML(content)[0].attrib['uri'] + content = bqsession.postxml('/data_service/dataset', dataset) + dataset_uri = content.attrib['uri'] + +def teardown_fetchdataset(): + pass + + + +def setup_fetchDataset(): + """ + uploads an dataset + """ + global dataset_uri + dataset = etree.Element('dataset', name='test') + for _ in xrange(4): + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(stores.files[0].location, xml=resource) + value=etree.SubElement(dataset,'value', type="object") + value.text = etree.XML(content)[0].attrib['uri'] + content = bqsession.postxml('/data_service/dataset', dataset) + dataset_uri = content.attrib['uri'] + + +def teardown_fetchDataset(): + pass + + + +def setup_saveimagepixels(): + """ + uploads an image + """ + global image_uri + resource = etree.Element('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(stores.files[0].location, xml=resource) + image_uri = etree.XML(content)[0].attrib['uri'] + + +def teardown_saveimagepixels(): + pass + + +def setup_fetchImage(): + """ + uploads an image + """ + global image_uri + resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1)) + content = bqsession.postblob(stores.files[0].location, xml=resource) + image_uri = etree.XML(content)[0].attrib['uri'] + + +def teardown_fetchImage(): + pass + + + +################################################### + + + + +def test_saveblob_1(session,stores): + """ + Saves an image to the blob service + """ + try: + result = save_blob(bqsession, localfile=stores.files[0].location) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + if result is None: + assert False, 'XML Parsing error' + + +def test_saveblob_2(session,stores): + """ + Save an image to the blob service with xml tags + """ + + try: + result = save_blob(bqsession, localfile=stores.files[0].location) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + if result is None: + assert False, 'XML Parsing error' + + + + + +def test_fetchblob_1(session, stores, image_uri): + """ + fetch blob and return path + """ + try: + result = fetch_blob(bqsession, image_uri, dest=stores.results) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + + +def test_fetchblob_2(session, image_uri): + """ + fetch blob and return local path + """ + try: + result = fetch_blob(bqsession, image_uri, uselocalpath=True) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + + + +#@with_setup(setup_fetchimageplanes, teardown_fetchimageplanes) +def test_fetchimageplanes_1(): + """ + fetch image planes and return path + """ + try: + result = fetch_image_planes(bqsession, image_uri, results_location, uselocalpath=False) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + +#@with_setup(setup_fetchimageplanes, teardown_fetchimageplanes) +def test_fetchimageplanes_2(): + """ + Fetch image planes and return path. Routine is run on same host as server. + """ + try: + result = fetch_image_planes(bqsession, image_uri, results_location,uselocalpath=True) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + + +#@with_setup(setup_fetchimagepixels, teardown_fetchimagepixels) +def test_fetchimagepixels_1(): + """ + fetch image planes and return path + """ + try: + result = fetch_image_pixels(bqsession, image_uri, results_location,uselocalpath=True) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + +#@with_setup(setup_fetchimagepixels, teardown_fetchimagepixels) +def test_fetchimagepixels_2(): + """ + fetch image planes and return path. Routine is run on same host as server. + """ + try: + result = fetch_image_pixels(bqsession, image_uri, results_location,uselocalpath=True) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + +#@with_setup(setup_fetchdataset, teardown_fetchdataset) +def test_fetchdataset(): + """ + fetch dataset images + """ + try: + result = fetch_dataset(bqsession, dataset_uri, results_location) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + +#@with_setup(setup_fetchImage, teardown_fetchImage) +def test_fetchImage_1(): + """ + fetch Image + """ + try: + result = fetchImage(bqsession, image_uri, results_location) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + +#@with_setup(setup_fetchImage, teardown_fetchImage) +def test_fetchImage_2(): + """ + fetch Image with localpath + """ + try: + result = fetchImage(bqsession, image_uri, results_location, uselocalpath=True) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + + +#@with_setup(setup_fetchDataset, teardown_fetchDataset) +def test_fetchDataset(): + """ + fetch Dataset images + """ + try: + result = fetchDataset(bqsession, dataset_uri, results_location) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status + + + +#@with_setup(setup_saveimagepixels, teardown_saveimagepixels) +def test_saveimagepixels(): + """ + Test save image pixels + """ + #doesnt work without name on image + xmldoc = """ + + + + """%u'%s/%s'%(TEST_PATH, filename1) + #bqimage = fromXml(etree.XML(xmldoc)) + bqimage = bqsession.factory.from_string (xmldoc) + try: + result = save_image_pixels(bqsession, stores.files[0].location, image_tags=bqimage) + except BQCommError, e: + assert False, 'BQCommError: Status: %s'%e.status diff --git a/nph_5class/bqapi/tests/util.py b/nph_5class/bqapi/tests/util.py new file mode 100644 index 0000000..e06b987 --- /dev/null +++ b/nph_5class/bqapi/tests/util.py @@ -0,0 +1,20 @@ +from bq.util.mkdir import _mkdir +import posixpath +import urllib +import os + +def fetch_file(filename, url, dir): + """ + @param filename: name of the file fetching from the store + @param url: url of the store + @param dir: the directory the file will be placed in + + @return the local path to the file + """ + _mkdir(url) + _mkdir(dir) + url = posixpath.join(url, filename) + path = os.path.join(dir, filename) + if not os.path.exists(path): + urllib.urlretrieve(url, path) + return path \ No newline at end of file diff --git a/nph_5class/bqapi/types.py b/nph_5class/bqapi/types.py new file mode 100644 index 0000000..12c318b --- /dev/null +++ b/nph_5class/bqapi/types.py @@ -0,0 +1,7 @@ + + +USENODE = False +if USENODE: + from .bqnode import * +else: + from .bqclass import * diff --git a/nph_5class/bqapi/util.py b/nph_5class/bqapi/util.py new file mode 100644 index 0000000..162d6d9 --- /dev/null +++ b/nph_5class/bqapi/util.py @@ -0,0 +1,435 @@ + + +import os +import shutil +#import urllib +#import urlparse +#import time +import logging +from six.moves import urllib + +#from lxml import etree as ET +#from lxml import etree +from .xmldict import xml2d, d2xml + +log = logging.getLogger('bqapi.util') + +##################################################### +# misc: unicode +##################################################### + +def normalize_unicode(s): + if isinstance(s, str): + return s + try: + s = s.decode('utf8') + except UnicodeEncodeError: + s = s.encode('ascii', 'replace') + return s + +##################################################### +# misc: path manipulation +##################################################### + +if os.name == 'nt': + def url2localpath(url): + path = urllib.parse.urlparse(url).path + if len(path)>0 and path[0] == '/': + path = path[1:] + try: + return urllib.parse.unquote(path).decode('utf-8') + except UnicodeEncodeError: + # dima: safeguard measure for old non-encoded unicode paths + return urllib.parse.unquote(path) + + def localpath2url(path): + path = path.replace('\\', '/') + url = urllib.parse.quote(path.encode('utf-8')) + if len(path)>3 and path[0] != '/' and path[1] == ':': + # path starts with a drive letter: c:/ + url = 'file:///%s'%url + else: + # path is a relative path + url = 'file://%s'%url + return url + +else: + def url2localpath(url): + url = url.encode('utf-8') # safegurd against un-encoded values in the DB + path = urllib.parse.urlparse(url).path + return urllib.parse.unquote(path) + + def localpath2url(path): + url = urllib.parse.quote(path.encode('utf-8')) + url = 'file://%s'%url + return url + +##################################################### + + +class AttrDict(dict): + def __init__(self, *args, **kwargs): + dict.__init__(self, *args, **kwargs) + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + raise AttributeError + + def __setattr__(self, name, value): + self[name] = value + return value + + def __getstate__(self): + return list(self.items()) + + def __setstate__(self, items): + for key, val in items: + self[key] = val + + +def safecopy (*largs): + largs = list (largs) + d = largs.pop() + + for f in largs: + try: + dest = d + if os.path.isdir (d): + dest = os.path.join (d, os.path.basename(f)) + print ("linking %s to %s"%(f,dest)) + if os.path.exists(dest): + print ("Found existing file %s: removing .." % dest) + os.unlink (dest) + os.link(f, dest) + except (OSError, AttributeError) as e: + print ("Problem in link %s .. trying copy" % e) + shutil.copy2(f, dest) + +def parse_qs(query): + """ + parse a uri query string into a dict + """ + pd = {} + if '&' in query: + for el in query.split('&'): + nm, junk, vl = el.partition('=') + pd.setdefault(nm, []).append(vl) + return pd + +def make_qs(pd): + """ + convert back from dict to qs + """ + query = [] + for k,vl in list(pd.items()): + for v in vl: + pair = v and "%s=%s" % (k,v) or k + query.append(pair) + return "&".join(query) + + +def save_blob(session, localfile=None, resource=None): + """ + put a local image on the server and return the URL + to the METADATA XML record + + @param session: the local session + @param image: an BQImage object + @param localfile: a file-like object or name of a localfile + @return XML content when upload ok + + @exceptions comm.BQCommError - if blob is failed to be posted + """ + content = session.postblob(localfile, xml=resource) + + #content = ET.XML(content) + content = session.factory.string2etree(content) + if len(content)<1: #when would this happen + return None + return content[0] + + +def fetch_blob(session, uri, dest=None, uselocalpath=False): + """ + fetch original image locally as tif + @param session: the bqsession + @param uri: resource image uri + @param dest: a destination directory + @param uselocalpath: true when routine is run on same host as server + """ + image = session.load(uri) + name = image.name or next_name("blob") + + query = None + if uselocalpath: + # Skip 'file:' + path = image.value + if path.startswith('file:'): + path = path[5:] + return {uri: path} + + url = session.service_url('blob_service', path = image.resource_uniq) + blobdata = session.c.fetch(url) + if os.path.isdir(dest): + outdest = os.path.join (dest, os.path.basename(name)) + else: + outdest = os.path.join ('.', os.path.basename(name)) + f = open(outdest, 'wb') + f.write(blobdata) + f.close() + return {uri: outdest} + + +def fetch_image_planes(session, uri, dest=None, uselocalpath=False): + """ + fetch all the image planes of an image locally + @param session: the bqsession + @param uri: resource image uri + @param dest: a destination directory + @param uselocalpath: true when routine is run on same host as server + + """ + image = session.load (uri, view='full') + #x,y,z,t,ch = image.geometry() + meta = image.pixels().meta().fetch() + #meta = ET.XML(meta) + meta = session.factory.string2etree(meta) + t = meta.findall('.//tag[@name="image_num_t"]') + t = len(t) and t[0].get('value') + z = meta.findall('.//tag[@name="image_num_z"]') + z = len(z) and z[0].get('value') + tplanes = int(t) + zplanes = int(z) + + planes=[] + for t in range(tplanes): + for z in range(zplanes): + ip = image.pixels().slice(z=z+1,t=t+1).format('tiff') + if uselocalpath: + ip = ip.localpath() + planes.append (ip) + + files = [] + for i, p in enumerate(planes): + slize = p.fetch() + fname = os.path.join (dest, "%.5d.TIF" % i) + if uselocalpath: + #path = ET.XML(slize).xpath('/resource/@src')[0] + resource = session.factory.string2etree(slize) + path = resource.get ('value') + # Strip file:/ from path + if path.startswith ('file:/'): + path = path[5:] + if os.path.exists(path): + safecopy (path, fname) + else: + log.error ("localpath did not return valid path: %s", path) + else: + f = open(fname, 'wb') + f.write(slize) + f.close() + files.append(fname) + + return files + + +def next_name(name): + count = 0 + while os.path.exists("%s-%.5d.TIF" % (name, count)): + count = count + 1 + return "%s-%.5d.TIF" % (name, count) + + + +def fetch_image_pixels(session, uri, dest, uselocalpath=False): + """ + fetch original image locally as tif + @param session: the bqsession + @param uri: resource image uri + @param dest: a destination directory + @param uselocalpath: true when routine is run on same host as server + """ + image = session.load(uri) + name = image.name or next_name("image") + ip = image.pixels().format('tiff') + if uselocalpath: + ip = ip.localpath() + pixels = ip.fetch() + if os.path.isdir(dest): + dest = os.path.join(dest, os.path.basename(name)) + else: + dest = os.path.join('.', os.path.basename(name)) + if not dest.lower().endswith ('.tif'): + dest = "%s.tif" % dest + + + if uselocalpath: + #path = ET.XML(pixels).xpath('/resource/@src')[0] + resource = session.factory.string2etree(pixels) + path = resource.get ('value') + #path = urllib.url2pathname(path[5:]) + if path.startswith('file:/'): + path = path[5:] + # Skip 'file:' + if os.path.exists(path): + safecopy(path, dest) + return { uri : dest } + else: + log.error ("localpath did not return valid path: %s", path) + + f = open(dest, 'wb') + f.write(pixels) + f.close() + return { uri : dest } + + +def fetch_dataset(session, uri, dest, uselocalpath=False): + """ + fetch elemens of dataset locally as tif + + @param session: the bqsession + @param uri: resource image uri + @param dest: a destination directory + @param uselocalpath: true when routine is run on same host as server + + @return: + """ + dataset = session.fetchxml(uri, view='deep') + members = dataset.findall('.//value[@type="object"]') + + results = {} + for i, imgxml in enumerate(members): + uri = imgxml.text #imgxml.get('uri') + print ("FETCHING", uri) + #fname = os.path.join (dest, "%.5d.tif" % i) + x = fetch_image_pixels(session, uri, dest, uselocalpath=uselocalpath) + results.update (x) + return results + + +def fetchImage(session, uri, dest, uselocalpath=False): + """ + @param: session - + @param: url - + @param: dest - + @param: uselocalpath- (default: False) + + @return + """ + image = session.load(uri).pixels().info() + #fileName = ET.XML(image.fetch()).xpath('//tag[@name="filename"]/@value')[0] + fileName = session.factory.string2etree(image.fetch()).findall('.//tag[@name="filename"]')[0] + fileName = fileName.get ('value') + + ip = session.load(uri).pixels().format('tiff') + + if uselocalpath: + ip = ip.localpath() + + pixels = ip.fetch() + + if os.path.isdir(dest): + dest = os.path.join(dest, fileName) + + if uselocalpath: + #path = ET.XML(pixels).xpath('/resource/@src')[0] + resource = session.factory.string2etree(pixels) + path = resource.get ('value') + #path = urllib.url2pathname(path[5:]) + if path.startswith ('file:/'): + # Skip 'file:' + path = path[5:] + if os.path.exists(path): + safecopy(path, dest) + return {uri: dest } + else: + log.error ("localpath did not return valid path: %s", path) + + f = open(dest, 'wb') + f.write(pixels) + f.close() + return {uri :dest } + + +def fetchDataset(session, uri, dest, uselocalpath=False): + dataset = session.fetchxml(uri, view='deep') + members = dataset.findall('.//value[@type="object"]') + results = {} + + for i, imgxml in enumerate(members): + uri = imgxml.text + print ("FETCHING: ", uri) + #fname = os.path.join (dest, "%.5d.tif" % i) + result = fetchImage(session, uri, dest, uselocalpath=uselocalpath) + results[uri] = result[uri] + return results + + +# Post fields and files to an http host as multipart/form-data. +# fields is a sequence of (name, value) elements for regular form +# fields. files is a sequence of (name, filename, value) elements +# for data to be uploaded as files +# Return the tuple (rsponse headers, server's response page) + +# example: +# post_files ('http://..', +# fields = {'file1': open('file.jpg','rb'), 'name':'file' }) +# post_files ('http://..', fields = [('file1', 'file.jpg', buffer), ('f1', 'v1' )] ) + +def save_image_pixels(session, localfile, image_tags=None): + """ + put a local image on the server and return the URL + to the METADATA XML record + + @param: session - the local session + @param: image - an BQImage object + @param: localfile - a file-like object or name of a localfile + + @return: XML content when upload ok + """ + xml = None + if image_tags: + #xml = ET.tostring(toXml(image_tags)) + xml = session.factory.to_string(image_tags) + return session.postblob(localfile, xml=xml) + + + +def as_flat_dict_tag_value(xmltree): + def _xml2d(e, d, path=''): + for child in e: + name = '%s%s'%(path, child.get('name', '')) + value = child.get('value', None) + if value is not None: + if not name in d: + d[name] = value + else: + if isinstance(d[name], list): + d[name].append(value) + else: + d[name] = [d[name], value] + d = _xml2d(child, d, path='%s%s/'%(path, child.get('name', ''))) + return d + + return _xml2d(xmltree, {}) + +def as_flat_dicts_node(xmltree): + def _xml2d(e, d, path=''): + for child in e: + name = '%s%s'%(path, child.get('name', '')) + #value = child.get('value', None) + value = child + #if value is not None: + if not name in d: + d[name] = value + else: + if isinstance(d[name], list): + d[name].append(value) + else: + d[name] = [d[name], value] + d = _xml2d(child, d, path='%s%s/'%(path, child.get('name', ''))) + return d + + return _xml2d(xmltree, {}) diff --git a/nph_5class/bqapi/util.py.bak b/nph_5class/bqapi/util.py.bak new file mode 100644 index 0000000..2e253f7 --- /dev/null +++ b/nph_5class/bqapi/util.py.bak @@ -0,0 +1,435 @@ +from __future__ import print_function + +import os +import shutil +#import urllib +#import urlparse +#import time +import logging +from six.moves import urllib + +#from lxml import etree as ET +#from lxml import etree +from .xmldict import xml2d, d2xml + +log = logging.getLogger('bqapi.util') + +##################################################### +# misc: unicode +##################################################### + +def normalize_unicode(s): + if isinstance(s, unicode): + return s + try: + s = s.decode('utf8') + except UnicodeEncodeError: + s = s.encode('ascii', 'replace') + return s + +##################################################### +# misc: path manipulation +##################################################### + +if os.name == 'nt': + def url2localpath(url): + path = urllib.parse.urlparse(url).path + if len(path)>0 and path[0] == '/': + path = path[1:] + try: + return urllib.parse.unquote(path).decode('utf-8') + except UnicodeEncodeError: + # dima: safeguard measure for old non-encoded unicode paths + return urllib.parse.unquote(path) + + def localpath2url(path): + path = path.replace('\\', '/') + url = urllib.parse.quote(path.encode('utf-8')) + if len(path)>3 and path[0] != '/' and path[1] == ':': + # path starts with a drive letter: c:/ + url = 'file:///%s'%url + else: + # path is a relative path + url = 'file://%s'%url + return url + +else: + def url2localpath(url): + url = url.encode('utf-8') # safegurd against un-encoded values in the DB + path = urllib.parse.urlparse(url).path + return urllib.parse.unquote(path) + + def localpath2url(path): + url = urllib.parse.quote(path.encode('utf-8')) + url = 'file://%s'%url + return url + +##################################################### + + +class AttrDict(dict): + def __init__(self, *args, **kwargs): + dict.__init__(self, *args, **kwargs) + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + raise AttributeError + + def __setattr__(self, name, value): + self[name] = value + return value + + def __getstate__(self): + return self.items() + + def __setstate__(self, items): + for key, val in items: + self[key] = val + + +def safecopy (*largs): + largs = list (largs) + d = largs.pop() + + for f in largs: + try: + dest = d + if os.path.isdir (d): + dest = os.path.join (d, os.path.basename(f)) + print ("linking %s to %s"%(f,dest)) + if os.path.exists(dest): + print ("Found existing file %s: removing .." % dest) + os.unlink (dest) + os.link(f, dest) + except (OSError, AttributeError) as e: + print ("Problem in link %s .. trying copy" % e) + shutil.copy2(f, dest) + +def parse_qs(query): + """ + parse a uri query string into a dict + """ + pd = {} + if '&' in query: + for el in query.split('&'): + nm, junk, vl = el.partition('=') + pd.setdefault(nm, []).append(vl) + return pd + +def make_qs(pd): + """ + convert back from dict to qs + """ + query = [] + for k,vl in pd.items(): + for v in vl: + pair = v and "%s=%s" % (k,v) or k + query.append(pair) + return "&".join(query) + + +def save_blob(session, localfile=None, resource=None): + """ + put a local image on the server and return the URL + to the METADATA XML record + + @param session: the local session + @param image: an BQImage object + @param localfile: a file-like object or name of a localfile + @return XML content when upload ok + + @exceptions comm.BQCommError - if blob is failed to be posted + """ + content = session.postblob(localfile, xml=resource) + + #content = ET.XML(content) + content = session.factory.string2etree(content) + if len(content)<1: #when would this happen + return None + return content[0] + + +def fetch_blob(session, uri, dest=None, uselocalpath=False): + """ + fetch original image locally as tif + @param session: the bqsession + @param uri: resource image uri + @param dest: a destination directory + @param uselocalpath: true when routine is run on same host as server + """ + image = session.load(uri) + name = image.name or next_name("blob") + + query = None + if uselocalpath: + # Skip 'file:' + path = image.value + if path.startswith('file:'): + path = path[5:] + return {uri: path} + + url = session.service_url('blob_service', path = image.resource_uniq) + blobdata = session.c.fetch(url) + if os.path.isdir(dest): + outdest = os.path.join (dest, os.path.basename(name)) + else: + outdest = os.path.join ('.', os.path.basename(name)) + f = open(outdest, 'wb') + f.write(blobdata) + f.close() + return {uri: outdest} + + +def fetch_image_planes(session, uri, dest=None, uselocalpath=False): + """ + fetch all the image planes of an image locally + @param session: the bqsession + @param uri: resource image uri + @param dest: a destination directory + @param uselocalpath: true when routine is run on same host as server + + """ + image = session.load (uri, view='full') + #x,y,z,t,ch = image.geometry() + meta = image.pixels().meta().fetch() + #meta = ET.XML(meta) + meta = session.factory.string2etree(meta) + t = meta.findall('.//tag[@name="image_num_t"]') + t = len(t) and t[0].get('value') + z = meta.findall('.//tag[@name="image_num_z"]') + z = len(z) and z[0].get('value') + tplanes = int(t) + zplanes = int(z) + + planes=[] + for t in range(tplanes): + for z in range(zplanes): + ip = image.pixels().slice(z=z+1,t=t+1).format('tiff') + if uselocalpath: + ip = ip.localpath() + planes.append (ip) + + files = [] + for i, p in enumerate(planes): + slize = p.fetch() + fname = os.path.join (dest, "%.5d.TIF" % i) + if uselocalpath: + #path = ET.XML(slize).xpath('/resource/@src')[0] + resource = session.factory.string2etree(slize) + path = resource.get ('value') + # Strip file:/ from path + if path.startswith ('file:/'): + path = path[5:] + if os.path.exists(path): + safecopy (path, fname) + else: + log.error ("localpath did not return valid path: %s", path) + else: + f = open(fname, 'wb') + f.write(slize) + f.close() + files.append(fname) + + return files + + +def next_name(name): + count = 0 + while os.path.exists("%s-%.5d.TIF" % (name, count)): + count = count + 1 + return "%s-%.5d.TIF" % (name, count) + + + +def fetch_image_pixels(session, uri, dest, uselocalpath=False): + """ + fetch original image locally as tif + @param session: the bqsession + @param uri: resource image uri + @param dest: a destination directory + @param uselocalpath: true when routine is run on same host as server + """ + image = session.load(uri) + name = image.name or next_name("image") + ip = image.pixels().format('tiff') + if uselocalpath: + ip = ip.localpath() + pixels = ip.fetch() + if os.path.isdir(dest): + dest = os.path.join(dest, os.path.basename(name)) + else: + dest = os.path.join('.', os.path.basename(name)) + if not dest.lower().endswith ('.tif'): + dest = "%s.tif" % dest + + + if uselocalpath: + #path = ET.XML(pixels).xpath('/resource/@src')[0] + resource = session.factory.string2etree(pixels) + path = resource.get ('value') + #path = urllib.url2pathname(path[5:]) + if path.startswith('file:/'): + path = path[5:] + # Skip 'file:' + if os.path.exists(path): + safecopy(path, dest) + return { uri : dest } + else: + log.error ("localpath did not return valid path: %s", path) + + f = open(dest, 'wb') + f.write(pixels) + f.close() + return { uri : dest } + + +def fetch_dataset(session, uri, dest, uselocalpath=False): + """ + fetch elemens of dataset locally as tif + + @param session: the bqsession + @param uri: resource image uri + @param dest: a destination directory + @param uselocalpath: true when routine is run on same host as server + + @return: + """ + dataset = session.fetchxml(uri, view='deep') + members = dataset.findall('.//value[@type="object"]') + + results = {} + for i, imgxml in enumerate(members): + uri = imgxml.text #imgxml.get('uri') + print ("FETCHING", uri) + #fname = os.path.join (dest, "%.5d.tif" % i) + x = fetch_image_pixels(session, uri, dest, uselocalpath=uselocalpath) + results.update (x) + return results + + +def fetchImage(session, uri, dest, uselocalpath=False): + """ + @param: session - + @param: url - + @param: dest - + @param: uselocalpath- (default: False) + + @return + """ + image = session.load(uri).pixels().info() + #fileName = ET.XML(image.fetch()).xpath('//tag[@name="filename"]/@value')[0] + fileName = session.factory.string2etree(image.fetch()).findall('.//tag[@name="filename"]')[0] + fileName = fileName.get ('value') + + ip = session.load(uri).pixels().format('tiff') + + if uselocalpath: + ip = ip.localpath() + + pixels = ip.fetch() + + if os.path.isdir(dest): + dest = os.path.join(dest, fileName) + + if uselocalpath: + #path = ET.XML(pixels).xpath('/resource/@src')[0] + resource = session.factory.string2etree(pixels) + path = resource.get ('value') + #path = urllib.url2pathname(path[5:]) + if path.startswith ('file:/'): + # Skip 'file:' + path = path[5:] + if os.path.exists(path): + safecopy(path, dest) + return {uri: dest } + else: + log.error ("localpath did not return valid path: %s", path) + + f = open(dest, 'wb') + f.write(pixels) + f.close() + return {uri :dest } + + +def fetchDataset(session, uri, dest, uselocalpath=False): + dataset = session.fetchxml(uri, view='deep') + members = dataset.findall('.//value[@type="object"]') + results = {} + + for i, imgxml in enumerate(members): + uri = imgxml.text + print ("FETCHING: ", uri) + #fname = os.path.join (dest, "%.5d.tif" % i) + result = fetchImage(session, uri, dest, uselocalpath=uselocalpath) + results[uri] = result[uri] + return results + + +# Post fields and files to an http host as multipart/form-data. +# fields is a sequence of (name, value) elements for regular form +# fields. files is a sequence of (name, filename, value) elements +# for data to be uploaded as files +# Return the tuple (rsponse headers, server's response page) + +# example: +# post_files ('http://..', +# fields = {'file1': open('file.jpg','rb'), 'name':'file' }) +# post_files ('http://..', fields = [('file1', 'file.jpg', buffer), ('f1', 'v1' )] ) + +def save_image_pixels(session, localfile, image_tags=None): + """ + put a local image on the server and return the URL + to the METADATA XML record + + @param: session - the local session + @param: image - an BQImage object + @param: localfile - a file-like object or name of a localfile + + @return: XML content when upload ok + """ + xml = None + if image_tags: + #xml = ET.tostring(toXml(image_tags)) + xml = session.factory.to_string(image_tags) + return session.postblob(localfile, xml=xml) + + + +def as_flat_dict_tag_value(xmltree): + def _xml2d(e, d, path=''): + for child in e: + name = '%s%s'%(path, child.get('name', '')) + value = child.get('value', None) + if value is not None: + if not name in d: + d[name] = value + else: + if isinstance(d[name], list): + d[name].append(value) + else: + d[name] = [d[name], value] + d = _xml2d(child, d, path='%s%s/'%(path, child.get('name', ''))) + return d + + return _xml2d(xmltree, {}) + +def as_flat_dicts_node(xmltree): + def _xml2d(e, d, path=''): + for child in e: + name = '%s%s'%(path, child.get('name', '')) + #value = child.get('value', None) + value = child + #if value is not None: + if not name in d: + d[name] = value + else: + if isinstance(d[name], list): + d[name].append(value) + else: + d[name] = [d[name], value] + d = _xml2d(child, d, path='%s%s/'%(path, child.get('name', ''))) + return d + + return _xml2d(xmltree, {}) diff --git a/nph_5class/bqapi/xmldict.py b/nph_5class/bqapi/xmldict.py new file mode 100644 index 0000000..559794f --- /dev/null +++ b/nph_5class/bqapi/xmldict.py @@ -0,0 +1,110 @@ +# Create python xml structures compatible with +# http://search.cpan.org/~grantm/XML-Simple-2.18/lib/XML/Simple.pm + + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree +from itertools import groupby + +def xml2d(e): + """Convert an etree into a dict structure + + @type e: etree.Element + @param e: the root of the tree + @return: The dictionary representation of the XML tree + """ + def _xml2d(e): + kids = dict(e.attrib) + #if e.text: + # kids['__text__'] = e.text + #if e.tail: + # kids['__tail__'] = e.tail + for k, g in groupby(e, lambda x: x.tag): + g = [ _xml2d(x) for x in g ] + kids[k]= g + return kids + return { e.tag : _xml2d(e) } + + +def d2xml(d): + """convert dict to xml + + 1. The top level d must contain a single entry i.e. the root element + 2. Keys of the dictionary become sublements or attributes + 3. If a value is a simple string, then the key is an attribute + 4. if a value is dict then, then key is a subelement + 5. if a value is list, then key is a set of sublements + + a = { 'module' : {'tag' : [ { 'name': 'a', 'value': 'b'}, + { 'name': 'c', 'value': 'd'}, + ], + 'gobject' : { 'name': 'g', 'type':'xx' }, + 'uri' : 'test', + } + } + >>> d2xml(a) + + + + + + + @type d: dict + @param d: A dictionary formatted as an XML document + @return: A etree Root element + """ + def _d2xml(d, p): + for k,v in list(d.items()): + if v is None: continue + if isinstance(v,dict): + node = etree.SubElement(p, k) + _d2xml(v, node) + elif isinstance(v,list): + for item in v: + if item is None: continue + node = etree.SubElement(p, k) + _d2xml(item, node) + #elif k == "__text__": + # p.text = v + #elif k == "__tail__": + # p.tail = v + else: + p.set(k, str(v)) + + k,v = list(d.items())[0] + node = etree.Element(k) + _d2xml(v, node) + return node + +# simple dictionary output of name-value pairs, useful for image metadata +def xml2nv(e): + """Convert an etree into a dict structure + + @type e: etree.Element + @param e: the root of the tree + @return: The dictionary representation of the XML tree + """ + def _xml2nv(e, a, path): + for g in e: + n = g.get('name') or g.get('type') + if n is None: + continue + a['%s%s'%(path, n)] = g.get('value') + for child in g: + _xml2nv(child, a, '%s%s/'%(path, n)) + return + a = {} + _xml2nv(e, a, '') + return a + +if __name__=="__main__": + + X = """""" + print (X) + Y = xml2d(etree.XML(X)) + print (Y) + Z = etree.tostring (d2xml(Y) ) + print (Z) + assert X == Z diff --git a/nph_5class/bqapi/xmldict.py.bak b/nph_5class/bqapi/xmldict.py.bak new file mode 100644 index 0000000..6a43118 --- /dev/null +++ b/nph_5class/bqapi/xmldict.py.bak @@ -0,0 +1,110 @@ +# Create python xml structures compatible with +# http://search.cpan.org/~grantm/XML-Simple-2.18/lib/XML/Simple.pm +from __future__ import print_function + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree +from itertools import groupby + +def xml2d(e): + """Convert an etree into a dict structure + + @type e: etree.Element + @param e: the root of the tree + @return: The dictionary representation of the XML tree + """ + def _xml2d(e): + kids = dict(e.attrib) + #if e.text: + # kids['__text__'] = e.text + #if e.tail: + # kids['__tail__'] = e.tail + for k, g in groupby(e, lambda x: x.tag): + g = [ _xml2d(x) for x in g ] + kids[k]= g + return kids + return { e.tag : _xml2d(e) } + + +def d2xml(d): + """convert dict to xml + + 1. The top level d must contain a single entry i.e. the root element + 2. Keys of the dictionary become sublements or attributes + 3. If a value is a simple string, then the key is an attribute + 4. if a value is dict then, then key is a subelement + 5. if a value is list, then key is a set of sublements + + a = { 'module' : {'tag' : [ { 'name': 'a', 'value': 'b'}, + { 'name': 'c', 'value': 'd'}, + ], + 'gobject' : { 'name': 'g', 'type':'xx' }, + 'uri' : 'test', + } + } + >>> d2xml(a) + + + + + + + @type d: dict + @param d: A dictionary formatted as an XML document + @return: A etree Root element + """ + def _d2xml(d, p): + for k,v in d.items(): + if v is None: continue + if isinstance(v,dict): + node = etree.SubElement(p, k) + _d2xml(v, node) + elif isinstance(v,list): + for item in v: + if item is None: continue + node = etree.SubElement(p, k) + _d2xml(item, node) + #elif k == "__text__": + # p.text = v + #elif k == "__tail__": + # p.tail = v + else: + p.set(k, unicode(v)) + + k,v = d.items()[0] + node = etree.Element(k) + _d2xml(v, node) + return node + +# simple dictionary output of name-value pairs, useful for image metadata +def xml2nv(e): + """Convert an etree into a dict structure + + @type e: etree.Element + @param e: the root of the tree + @return: The dictionary representation of the XML tree + """ + def _xml2nv(e, a, path): + for g in e: + n = g.get('name') or g.get('type') + if n is None: + continue + a['%s%s'%(path, n)] = g.get('value') + for child in g: + _xml2nv(child, a, '%s%s/'%(path, n)) + return + a = {} + _xml2nv(e, a, '') + return a + +if __name__=="__main__": + + X = """""" + print (X) + Y = xml2d(etree.XML(X)) + print (Y) + Z = etree.tostring (d2xml(Y) ) + print (Z) + assert X == Z diff --git a/nph_5class/bqconfig.json b/nph_5class/bqconfig.json new file mode 100644 index 0000000..2e090ff --- /dev/null +++ b/nph_5class/bqconfig.json @@ -0,0 +1 @@ +{"Name": "NPHSegmentation", "Author": "VB", "Description": "New NPH segmentation module", "Inputs": {"Input Image": "image"}, "Outputs": {"Segmented Image": "image"}} diff --git a/nph_5class/public/help.html b/nph_5class/public/help.html new file mode 100644 index 0000000..ef562f4 --- /dev/null +++ b/nph_5class/public/help.html @@ -0,0 +1,9 @@ +

NPH Segmentation Module – 5 Class

+

Performs 5 class segmentation of scans corresponding to provided *.nii.gz file. The classes are as follows:

+
    +
  1. Background
  2. +
  3. Ventricle
  4. +
  5. White Matter
  6. +
  7. Subarachnoid
  8. +
  9. Shunt
  10. +
diff --git a/nph_5class/public/help.md b/nph_5class/public/help.md new file mode 100644 index 0000000..4ffc0d3 --- /dev/null +++ b/nph_5class/public/help.md @@ -0,0 +1,10 @@ +# NPH Segmentation Module -- 5 Class + +Performs 5 class segmentation of scans corresponding to provided ``*.nii.gz`` file. +The classes are as follows: + +0. Background +1. Ventricle +2. White Matter +3. Subarachnoid +4. Shunt diff --git a/nph_5class/public/thumbnail.jpg b/nph_5class/public/thumbnail.jpg new file mode 100644 index 0000000..edc2e4f Binary files /dev/null and b/nph_5class/public/thumbnail.jpg differ diff --git a/nph_5class/runtime-module.cfg b/nph_5class/runtime-module.cfg new file mode 100644 index 0000000..6c85a56 --- /dev/null +++ b/nph_5class/runtime-module.cfg @@ -0,0 +1,12 @@ +# Module configuration file for local execution of modules + +module_enabled = True +runtime.platforms = command + +[command] +docker.image = nphsegmentation:v1.0.8 +environments = Staged,Docker +executable = python PythonScriptWrapper.py +files = pydist, PythonScriptWrapper.py + + diff --git a/nph_5class/src/BQ_run_module.py b/nph_5class/src/BQ_run_module.py new file mode 100644 index 0000000..ba90bdd --- /dev/null +++ b/nph_5class/src/BQ_run_module.py @@ -0,0 +1,22 @@ +import pathlib +import logging as log +# import nibabel as nib + +from torch import mode +import nphsegmentation as nsg + +def run_module(input_path_dict, output_folder_path): + output_paths_dict = dict() + + log.info(f"{input_path_dict['Input Image']=}") + input_path = input_path_dict['Input Image'] + # n = nib.load(input_path) + + output_paths_dict["Segmented Image"] = nsg.main(pathlib.Path(input_path), + pathlib.Path(output_folder_path), + # modelPath = pathlib.Path.cwd() / 'src' / 'model_backup/epoch49_ResNet2D3Class_2Layer2x2_mixed2_300.pt', + modelPath = pathlib.Path.cwd() / 'src' / 'model_backup' / 'epoch50_2Dresnet_skullstrip5Class.pt', + rdir = pathlib.Path("/module/src")) + log.info("Finished computing result!") + + return output_paths_dict diff --git a/nph_5class/src/CSFseg.py b/nph_5class/src/CSFseg.py new file mode 100644 index 0000000..f9d7819 --- /dev/null +++ b/nph_5class/src/CSFseg.py @@ -0,0 +1,359 @@ +import numpy as np +import nibabel as nib +import matplotlib.pyplot as plt +import os +import copy +import heapq + +# def connectToBoundary(label, classIdx, tolerance): +# neighbors=[] +# for i in range(-1, 2): +# for j in range(-1, 2): +# k=0 +# neighbors.append((i,j,k)) + +# seen=set() + +# position=[] +# heapq.heapify(position) + +# island=0 +# newLabel=np.zeros(label.shape) +# i, j, k=label.shape +# for z in range(k): +# for x in range(i): +# for y in range(j): + +# if (label[x,y,z]==classIdx) and (x,y,z) not in seen: +# island+=1 +# area=0 +# curIsland=set() +# seen2=set() +# seen.add((x,y,z)) +# curIsland.add((x,y,z)) +# heapq.heappush(position, (x,y,z)) + +# connected=False +# while position: +# cur=heapq.heappop(position) + +# for neighbor in neighbors: + +# if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue +# if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue +# if cur[2]-neighbor[2]<0 or cur[2]-neighbor[2]>=k: continue + +# if (label[cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]]==classIdx) and (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]) not in seen: +# seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2])) +# curIsland.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2])) +# heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2], 0)) + +# position2=[] +# heapq.heapify(position2) + +# for cur in curIsland: +# heapq.heappush(position2,(cur[0],cur[1],cur[2],0)) +# seen2.add(cur) +# while position2: +# cur=heapq.heappop(position2) +# for neighbor in neighbors: + +# if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue +# if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue +# if cur[2]-neighbor[2]<0 or cur[2]-neighbor[2]>=k: continue +# if (label[cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]]!=0) and (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]) not in seen2 and cur[3]=i: continue + if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue + if cur[2]-neighbor[2]<0 or cur[2]-neighbor[2]>=k: continue + + if label[cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]]==label[x,y,z] and (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]) not in seen: + seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2])) + curIsland.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2])) + heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2])) + + islandDict[(x,y,z)]=frozenset(curIsland) +# print(island, area) + + if findMax: + if area>maxArea: + maxArea=area + maxPos=(x,y,z) + + return islandDict[maxPos], maxArea, maxPos + +def Connectivity(label, classIdx, targetIdx, refClass=1,connectivity=8): + neighbors=[] + if connectivity==8: + for i in range(-1, 2): + for j in range(-1, 2): + neighbors.append((i,j)) + elif connectivity==4: + neighbors=[(1,0),(-1,0),(0,1),(0,-1)] + + else: + + return + + seen=set() + + island=0 + position=[] + heapq.heapify(position) + + i, j=label.shape + + for x in range(i): + for y in range(j): + + if (label[x,y]==refClass) and (x,y) not in seen: + island+=1 + seen.add((x,y)) + heapq.heappush(position, (x,y)) + + while position: + cur=heapq.heappop(position) + + for neighbor in neighbors: + + if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue + if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue + + if label[cur[0]-neighbor[0],cur[1]-neighbor[1]]==classIdx and (cur[0]-neighbor[0],cur[1]-neighbor[1]) not in seen: + seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1])) + label[cur[0]-neighbor[0],cur[1]-neighbor[1]]=targetIdx + heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1])) + + + +def numIsland(label,connectivity=8): + neighbors=[] + if connectivity==8: + for i in range(-1, 2): + for j in range(-1, 2): + neighbors.append((i,j)) + elif connectivity==4: + neighbors=[(1,0),(-1,0),(0,1),(0,-1)] + + else: + + return + + seen=set() + + island=0 + position=[] + heapq.heapify(position) + + i, j=label.shape + + + for y in range(j): + for x in range(i-1,-1,-1): + + if (label[x,y]!=0) and (x,y) not in seen: + + if island==1: + if area>100: + island+=1 + break + + else: island=0 + + if island==0: + island+=1 + area=0 + seen.add((x,y)) + heapq.heappush(position, (x,y)) + curIsland=set() + while position: + cur=heapq.heappop(position) + area+=1 + curIsland.add(cur) + for neighbor in neighbors: + + if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue + if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue + + if label[cur[0]-neighbor[0],cur[1]-neighbor[1]]!=0 and (cur[0]-neighbor[0],cur[1]-neighbor[1]) not in seen: + seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1])) + heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1])) + + maxArea=area + maxPos=curIsland + + return island, maxArea, maxPos + +def changeClassResult(segmentation): + for x in range(segmentation.shape[0]): + for y in range(segmentation.shape[1]): + for z in range(segmentation.shape[2]): + if segmentation[x,y,z]==3: + segmentation[x,y,z]=4 + elif segmentation[x,y,z]==4: + segmentation[x,y,z]=5 + #CSF into class10 + elif segmentation[x,y,z]==1: + segmentation[x,y,z]=10 + +def saveImage(array, name): + img = nib.Nifti1Image(array, np.eye(4)) + nib.save(img, name) + +def cutoff(label,max): + + neighbors=[(1,1,0),(0,1,0),(-1,1,0),(-1,0,0),(-1,-1,0),(0,-1,0),(1,-1,0),(1,0,0),(0,0,0)] + surpos = [[2,10,2,2,2,2,2,2,10],[2,2,2,10,2,2,2,2,10],[2,2,2,2,2,10,2,2,10],[2,2,2,2,2,2,2,10,10]] + i, j, k=label.shape + + for z in range(max[2]-3,max[2]+4): + if z == max[2]: continue + for x in range(i): + for y in range(j): + if label[x,y,z] ==0 or label[x,y,z]==4: continue + nei = [] + for neighbor in neighbors: + if x-neighbor[0]<0 or x-neighbor[0]>=i: continue + if y-neighbor[1]<0 or y-neighbor[1]>=j: continue + nei.append(label[x-neighbor[0], y-neighbor[1],z-neighbor[2]]) + if nei in surpos: + label[x,y,z] = 2 + if nei == [10,10,10,10,10,10,10,10,2]: + label[x,y,z] = 10 + if nei[3]==2 and nei[7]==2 and nei[8]==10 and label[x-2,y,z]==2 and label[x+2,y,z]==2: + label[x,y,z] = 2 + if nei[1]==2 and nei[5]==2 and nei[8]==10 and label[x,y-2,z]==2 and label[x,y+2,z]==2: + label[x,y,z] = 2 + + + +def segVent(imgName, outputPath, resultName): + result=nib.load(os.path.join(outputPath, resultName)).get_fdata() + + x,y,z=result.shape + + changeClassResult(result) + + #step 1: get subarachnoid connected to skull + # connectToBoundary(result, 10, tolerance=5) + + + #step 3: get max area of remaining CSF + + island, Area, maxPos=maxArea(result, 10) + for pos in island: + result[pos]=1 + + + cutoff(result,maxPos) + + # check 7 slices + for k in range(maxPos[2]-1,-1,-1): + + for i in range(x): + for j in range(y): + if result[i,j,k]==10 and result[i,j,k+1]==1: + result[i,j,k]=1 + + Connectivity(result[:,:,k], 10, 1, refClass=1) + + for k in range(maxPos[2]+1,z): + for i in range(x): + for j in range(y): + if result[i,j,k] ==10 and result[i,j,k-1]==1 : + result[i,j,k]=1 + Connectivity(result[:,:,k], 10, 1, refClass = 1) + + for k in range(z): + for i in range(x): + for j in range(y): + if result[i,j,k]==10: + result[i,j,k]=3 + + #check max pos of ventricle + # ventmaxArea = 0 + # ventmaxPos = 0 + # for k in range(maxPos[2]-3,maxPos[2]+4): + # ventvoxel = 0 + # for i in range(x): + # for j in range(y): + # if result[i,j,k]==1: + # ventvoxel +=1 + # if ventvoxel > ventmaxArea : + # ventmaxArea = ventvoxel + # ventmaxPos = k + print('------------',imgName,'-------------') + print('middle of 7 slices :', maxPos[2]) + + + saveImage(result, os.path.join(outputPath, outputName:='vent'+resultName)) + + return Area, maxPos, result, outputName + + + + + + + + + diff --git a/nph_5class/src/CTtools.py b/nph_5class/src/CTtools.py new file mode 100644 index 0000000..de129bf --- /dev/null +++ b/nph_5class/src/CTtools.py @@ -0,0 +1,261 @@ +#!/usr/bin/env python2 +# -*- coding: utf-8 -*- +""" +Created on Wed Nov 29 15:49:08 2017 + +@author: pkao +From: https://github.com/pykao/CT2MNI152/blob/master/CTtools.py +""" + +import SimpleITK as sitk +import numpy as np +from skimage.filters import threshold_otsu +from skimage import measure +from scipy import ndimage +from skimage import exposure + +def bone_extracted(ct_img_path, outName=""): + """Extract the bone of the CT scan based on the hard thresholding on pixel value""" + + print('The CT scan you want to implement bone extraction: ', ct_img_path) + + + ct_img = sitk.ReadImage(ct_img_path) + + bone_mask_img = sitk.Image(ct_img.GetWidth(), ct_img.GetHeight(), ct_img.GetDepth(), sitk.sitkFloat32) + + output_ct_img = sitk.Image(ct_img.GetWidth(), ct_img.GetHeight(), ct_img.GetDepth(), sitk.sitkFloat32) + + print('The size of CT scan:', ct_img.GetSize()) + + ct_nda = sitk.GetArrayFromImage(ct_img) + + bone_mask_nda = sitk.GetArrayFromImage(bone_mask_img) + + output_ct_nda = sitk.GetArrayFromImage(output_ct_img) + + #print 'The minimum value of CT scan: ', np.amin(ct_nda) + + #print 'The maximum value of CT scan: ', np.amax(ct_nda) + + #print 'The pixel ID type of CT scan: ', ct_img.GetPixelIDTypeAsString() + + #m = 1.0 + + #b = -1024.0 + + #bone_HU = 500.0 + + #bone_pixel = (bone_HU-b)/m + + bone_pixel = 500 + + for z in range(ct_nda.shape[0]): + for x in range(ct_nda.shape[1]): + for y in range(ct_nda.shape[2]): + if ct_nda[z, x, y] >= bone_pixel: + output_ct_nda[z, x, y] = ct_nda[z, x, y] + bone_mask_nda[z, x, y] = 1.0; + + output_ct_image = sitk.GetImageFromArray(output_ct_nda) + + + + if not outName: # no outName supplied, my addition + output_ct_image_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_skull.nii.gz' + else: + output_ct_image_name = outName + + print('The name of the output skull image: ', output_ct_image_name) + + output_ct_image.CopyInformation(ct_img) + + sitk.WriteImage(output_ct_image, output_ct_image_name) + + return output_ct_image_name + + # bone_mask + #bone_mask_image = sitk.GetImageFromArray(bone_mask_nda) + + #bone_mask_image_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_skullMask.nii.gz' + + # bone_mask_image.CopyInformation(ct_img) + + #print 'The name of the output skull mask image: ', bone_mask_image_name + + #sitk.WriteImage(bone_mask_image, bone_mask_image_name) + + #return output_ct_image_name, bone_mask_image_name + +def getMaximum3DRegion(binary): + """ Get the Maximum 3D region from 3D multiple bindary Regions""" + + all_labels = measure.label(binary, background = 0) + + props = measure.regionprops(all_labels) + + areas = [prop.area for prop in props] + + maxArea_label = 1+np.argmax(areas) + + max_binary = np.float32(all_labels == maxArea_label) + + return max_binary + + + + +def normalizeCTscan(ct_nda): + """Normalize the CT scan to range 0 to 1""" + if np.amin(ct_nda) < 0: + ct_normalized_nda = ct_nda - np.amin(ct_nda) + + ct_normalized_nda = ct_normalized_nda/np.amax(ct_normalized_nda) + + return ct_normalized_nda + + +def otsuThreshoulding(ct_normalized_nda): + """Apply Otsu thresholding on the normalized ranging from 0 to 1 scan""" + + thresh = threshold_otsu(ct_normalized_nda) + + binary = (ct_normalized_nda > thresh)*1 + + return binary.astype(np.float32) + +def get2Maximum2DRegions(max_binary): + """Get two largestest 2D region from multiple 2D regions""" + + xy_two_largest_binary = np.zeros(max_binary.shape, dtype = np.float32 ) + + largest_area = np.zeros(max_binary.shape[0]) + + second_largest_area = np.zeros(max_binary.shape[0]) + + for i in range(max_binary.shape[0]): + xy_binary = max_binary[i, :, :] + xy_labels = measure.label(xy_binary, background = 0) + xy_props = measure.regionprops(xy_labels) + xy_areas = [prop.area for prop in xy_props] + #print xy_areas + + if xy_areas == []: + continue + + elif len(xy_areas) == 1: + largest_area[i] = xy_areas[0] + second_largest_area[i] = 0.0 + largest_label = xy_areas.index(largest_area[i]) + 1 + xy_two_largest_binary[i, :, :] = xy_labels == largest_label + + else: + xy_areas_sorted = sorted(xy_areas) + largest_area[i] = xy_areas_sorted[-1] + second_largest_area[i] = xy_areas_sorted[-2] + largest_label = xy_areas.index(largest_area[i]) + 1 + second_largest_label = xy_areas.index(second_largest_area[i])+1 + xy_largest_binary = xy_labels == largest_label + xy_second_largest_binary = xy_labels == second_largest_label + xy_two_largest_binary[i, :, :] = np.float32(np.logical_or(xy_largest_binary, xy_second_largest_binary)) + + return xy_two_largest_binary + +def get1Maximum2DRegion(max_second_binary): + """Get the largest 2D region from multiple 2D regions""" + + new_binary = np.zeros(max_second_binary.shape, dtype = np.float32) + for i in range(max_second_binary.shape[0]): + xy_binary = max_second_binary[i,:,:] + xy_labels = measure.label(xy_binary) + xy_props = measure.regionprops(xy_labels) + xy_areas = [prop.area for prop in xy_props] + #print i, xy_areas_1 + if xy_areas == []: + continue + else: + max_area_label = 1 + np.argmax(xy_areas) + new_binary[i,:,:] = np.float32(xy_labels == max_area_label) + + return new_binary + + +def imageOpening2D(max_second_binary, structure=np.ones((15, 15))): + """Applying the image opening operation on the binary mask""" + new_max_second_binary = np.zeros(max_second_binary.shape, dtype = np.float32) + + for i in range(max_second_binary.shape[0]): + + new_max_second_binary[i,:,:] = ndimage.binary_opening(max_second_binary[i,:,:].astype(int), structure=structure).astype(np.float32) + + return new_max_second_binary + +def removeCTscandevice(ct_img_path): + """remove the ct scan device""" + + ct_img = sitk.ReadImage(ct_img_path) + + ct_nda = sitk.GetArrayFromImage(ct_img) + + print('The CT scan you want to implement CT scan device removal:', ct_img_path) + + #print 'The minimum value of CT scan: ', np.amin(ct_nda) + + #print 'The maximum value of CT scan: ', np.amax(ct_nda) + + #print 'The pixel ID type of CT scan: ', ct_img.GetPixelIDTypeAsString() + + ct_normalized_nda = normalizeCTscan(ct_nda) + + binary = otsuThreshoulding(ct_normalized_nda) + + max_binary = getMaximum3DRegion(binary) + + xy_two_largest_binary = get2Maximum2DRegions(max_binary) + + max_second_binary = getMaximum3DRegion(xy_two_largest_binary) + + new_binary = get1Maximum2DRegion(max_second_binary) + + new_max_second_bindary = imageOpening2D(new_binary) + + new_max_binary = getMaximum3DRegion(new_max_second_bindary) + + output_ct_image = sitk.GetImageFromArray(ct_nda * new_max_binary) + + output_ct_image.CopyInformation(ct_img) + + output_ct_image_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_woCTdevice.nii.gz' + + sitk.WriteImage(output_ct_image, output_ct_image_name) + + + return output_ct_image_name + + # The mask for CT device + + #woCTdevice_mask_image = sitk.GetImageFromArray(new_max_binary) + + #woCTdevice_mask_image.CopyInformation(ct_img) + + #woCTdevice_mask_image_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_woCTdeviceMask.nii.gz' + + #sitk.WriteImage(woCTdevice_mask_image, woCTdevice_mask_image_name) + + #return output_ct_image_name, woCTdevice_mask_image_name + + + +def contrastStretch(ct_img_path, percent = (10,90)): + """Apply the contrast stretching on 2D or 3D image""" + ct_img = sitk.ReadImage(ct_img_path) + ct_nda = sitk.GetArrayFromImage(ct_img) + p1, p2 = np.percentile(ct_nda, percent, interpolation='nearest') + nda_rescale = exposure.rescale_intensity(ct_nda, in_range = (p1, p2)) + ct_img_cs = sitk.GetImageFromArray(nda_rescale) + ct_img_cs.CopyInformation(ct_img) + output_ct_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_contrastStretching.nii.gz' + sitk.WriteImage(ct_img_cs, output_ct_name) + return output_ct_name + + diff --git a/nph_5class/src/TestFunc.py b/nph_5class/src/TestFunc.py new file mode 100644 index 0000000..b6dd292 --- /dev/null +++ b/nph_5class/src/TestFunc.py @@ -0,0 +1,291 @@ +import time +import numpy as np +import torch +from torch.utils.data import DataLoader +from torch.utils.data import Dataset +import os + +import nibabel as nib +import torchvision +import torchvision.transforms as transforms +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +from scipy import ndimage + + +# device = 'cuda:3' if torch.cuda.is_available() else 'cpu' +# device='cuda:1' if torch.cuda.is_available() else 'cpu' +def getCenter(image, segmentation, i, j, k): + + sample=image[i-16:i+16+1,j-16:j+16+1,k-1:k+1+1] + center=segmentation[i:i+1+1,j:j+1+1,k] + + return sample, center + +def fillHoles(imgName): + image=nib.load('{}_Mask.nii.gz'.format(str(imgName))).get_fdata() + # image = nib.load(" + for z in range(image.shape[2]): + image[:,:,z]=ndimage.binary_fill_holes(image[:,:,z]).astype(int) + + saveImage(image, '{}_Mask.nii.gz'.format(imgName)) + +def readAll(imgPath, betPath): + + positions=[] + + image = nib.load(imgPath).get_fdata() + + brainMask = nib.load(betPath).get_fdata() + + x,y,z=image.shape + + + for z in range(image.shape[2]): + for x in range(image.shape[0]): + for y in range(image.shape[1]): + + if image[x,y,z] > 200: image[x,y,z]=200 + if image[x,y,z] < -100: image[x,y,z]=-100 + + image+=100 + image=image/300 + + for k in range(1, z-1, 1): + for i in range(17, x-17, 2): + for j in range(17, y-17, 2): + + + sample, center =getCenter(image, brainMask, i, j, k) + if center.any(): + positions.append((i,j,k)) +# return image, annotation + return image, brainMask, positions, image.shape + + +def getPatch(image_full, brainMask, i, j, k): + + image, center=getCenter(image_full, brainMask, i, j, k) + + return image, torch.tensor([i,j,k]) + +class NPHDataset(Dataset): + def __init__(self, dataPath, betPath, name, Train=False): + + self.name=name + self.image, self.brainMask, self.imgList, self.imageShape=readAll(dataPath, betPath) + self.transform=transforms.ToTensor() + + def __len__(self): + return len(self.imgList) + + def __getitem__(self, idx): + +# return 0 + if torch.is_tensor(idx): + idx = idx.tolist() + + i,j,k=self.imgList[idx] + data, pos=getPatch(self.image, self.brainMask, i, j, k) + + image = self.transform(data) + sample = {'img': image, + 'pos': pos + } + return sample + +class MyModel(nn.Module): + def __init__(self,ResNet, num_classes=4, num_outputs=9): + super(MyModel, self).__init__() + + self.layer0=nn.Sequential( + nn.Conv2d(3,64, kernel_size=(3, 3), stride=(2, 2), padding=(3, 3), bias=False), + nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=2, stride=2, padding=1, dilation=1, ceil_mode=False), + + ) + + self.layer1=ResNet.layer1 + self.layer2=ResNet.layer2 + self.avgpool=nn.AdaptiveAvgPool2d(output_size=(1, 1)) + + self.fc=nn.Linear(in_features=128, out_features=num_classes*num_outputs, bias=True) + + def forward(self, x): + + x=self.layer0(x) + x=self.layer1(x) + x=self.layer2(x) + x=self.avgpool(x) + x = torch.flatten(x, 1) + x = self.fc(x) + return x + +def test(model, test_loader, shape, device): + """ + 5class test function (new). + """ + + model.eval() + + result=[] + + # Don't update model + with torch.no_grad(): + predList=[] + targetList=[] + + # Predict + reconstructed=np.zeros(shape) + for batch_index, batch_samples in enumerate(test_loader): + data = batch_samples['img'].to(device, dtype=torch.float) + # pos, shape=batch_samples['pos'].to(device, dtype=torch.float), batch_samples['shape'].to(device) + pos = batch_samples['pos'].to(device, dtype=torch.float) + + output = model(data) + softmax=nn.Softmax(dim=1) + output=torch.reshape(output,(output.shape[0], 5, 2,2)) + output=softmax(output) + + pred=output.argmax(dim=1, keepdim=True).cpu() + + N=output.shape[0] + + for k in range(N): + + x, y, z=map(int, (pos[k][0].item(), pos[k][1].item(), pos[k][2].item())) + + # reconstructed[x:x+1+1,y:y+1+1,z]=pred[k,0,:,:].cpu() + # breakpoint() + reconstructed[x:x+1+1,y:y+1+1,z]=pred[k,0,:,:] + # classes.add(pred[k,0,:,:]) + + return reconstructed + +def loadModel(modelPath, device): + ResNet=torch.hub.load('pytorch/vision:v0.10.0', 'resnet18', pretrained=False) + + model = MyModel(ResNet, num_classes=5, num_outputs=4).to(device) + model.load_state_dict(torch.load(modelPath,map_location=device)) + + return model + +def checkDevice(device): + device=device if torch.cuda.is_available() else 'cpu' + return device + +def runTest(imgName, outputPath, dataPath, betPath, device, BS, model): + + # BS=200 + + # dataPath=os.path.join(dataPath,'{}.nii.gz'.format(imgName)) + + # betPath=os.path.join(betPath,'{}_Mask.nii.gz'.format(imgName)) + betPath = betPath / f"{imgName}_Mask.nii.gz" + + testDataset=NPHDataset(dataPath, betPath, imgName,Train=False) + # testDataset=NPHDataset("/module/src/Norm_old_003_96yo.nii.gz", betPath, imgName,Train=False) + # test_loader = DataLoader(testDataset, batch_size=BS, num_workers=16, drop_last=False, shuffle=False) + test_loader = DataLoader(testDataset, batch_size=BS, num_workers=1, drop_last=False, shuffle=False) + shape=testDataset.imageShape + + print('Start Running:', imgName) + + start = time.time() + + reconstructed=test(model, test_loader, shape, device) + # changeClass(reconstructed) + + print(imgName, end=' ') +# print(' Dice score for class{}: {}'.format(i, 2*TP[i]/(2*TP[i]+FP[i]+FN[i]))) + + # img = nib.Nifti1Image(reconstructed, np.eye(4)) + # nib.save(img, 'reconstructed/reconstructed_{}_{}.nii.gz'.format(modelname, imgName)) + # print('Save to: reconstructed_{}_{}.nii.gz'.format(modelname, imgName)) + + # result_noNoise=eliminateNoise(reconstructed, minArea=64) + result_noNoise=eliminateNoise(reconstructed, minArea=32) + # result_noNoise=eliminateNoise(reconstructed, minArea=80) + # result_noNoise = reconstructed + + saveImage(result_noNoise, os.path.join(outputPath, 'reconstructed_{}.nii.gz'.format(imgName))) + + end = time.time() + print('Elapsed time:', end - start) + + return 'reconstructed_{}.nii.gz'.format(imgName) + +def saveImage(image, name): + img = nib.Nifti1Image(image, np.eye(4)) + nib.save(img, name ) + +def eliminateNoise(label, minArea=16): + neighbors=[(-1,0),(1,0),(0,-1),(0,1)] + + seen=set() + import heapq + position=[] + heapq.heapify(position) + + island=0 + newLabel=np.zeros(label.shape) + i, j, k=label.shape + for z in range(k): + for x in range(i): + for y in range(j): + + if (label[x,y,z]!=0) and (x,y,z) not in seen: + island+=1 + area=0 + curIsland=set() + seen.add((x,y,z)) + curIsland.add((x,y,z)) + heapq.heappush(position, (x,y,z)) + + + while position: + cur=heapq.heappop(position) + area+=1 + + + for neighbor in neighbors: + + if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue + if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue +# if cur[2]-neighbor[2]<0 or cur[2]-neighbor[2]>=k: continue + + if label[cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]]==label[x,y,z] and (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]) not in seen: + seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2])) + curIsland.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2])) + heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2])) + + + + for (posX, posY, posZ) in curIsland: + if area 250)] = 0 + + # Apply the mask + scan[np.where(mask > 0)] = 0 + + #to ignore background + scan = flood_fill(scan, (1,1,1), 250, tolerance=0) + + #connectivity - find bright blotches where area is small + #first find all points where low intensity + poi = np.where(scan > 150) + for i in range(len(poi[0])): + if(not (scan[poi[0][i], poi[1][i], poi[2][i]] == 0)): + print("checking pt ", poi[0][i], poi[1][i], poi[2][i]) + regMask = flood(scan, (poi[0][i], poi[1][i], poi[2][i]), tolerance=100) + #evaluate each point to see if in a small region + if(len(np.nonzero(regMask[0])) < 10): + print((np.nonzero(regMask)) ) + scan = flood_fill(scan, (poi[0][i], poi[1][i], poi[2][i]), 0, tolerance=100) + + #restore background + scan = flood_fill(scan, (1,1,1), 0, tolerance=0) + + return scan + +# scans = os.listdir("Scans") # raw scans +# for scan in scans: +# if(scan.endswith("nii.gz")): +# name = scan.split('.')[0] +# print(name) +# mask = "skull_stripped_files/" + name + "_skull.nii.gz" # these files are from Poyu's code +# print(mask) + +# proc = postSkullStrip("Scans/" + scan, mask) + +# nii_image = nib.Nifti1Image(proc.astype(np.float32), affine=np.eye(4)) +# nib.save(nii_image, "stripped/" + name + "_masked.nii.gz") # the corrected raw scans, should have a good number of slices bounded to just the brain + maybe some thin shape of the skull diff --git a/nph_5class/xml_template b/nph_5class/xml_template new file mode 100644 index 0000000..a3a520e --- /dev/null +++ b/nph_5class/xml_template @@ -0,0 +1,42 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +