uri="URI to BLOB" >
+ """
+
+ import_service = self.service ("import")
+ if xml!=None:
+ if not isinstance(xml, basestring):
+ xml = self.factory.to_string(xml)
+ response = import_service.transfer (filename=filename, xml=xml)
+ return response.content
+
+ # import_service_url = self.service_url('import', path='transfer')
+ # if import_service_url is None:
+ # raise BQApiError('Could not find import service to post blob.')
+ # url = self.c.prepare_url(import_service_url, **params)
+ # if xml!=None:
+ # if not isinstance(xml, basestring):
+ # xml = self.factory.to_string(xml)
+ # fields = {}
+ # if filename is not None:
+ # filename = normalize_unicode(filename)
+ # fields['file'] = (filename, open(filename, 'rb'), 'application/octet-stream')
+ # if xml is not None:
+ # fields['file_resource'] = xml
+ # if fields:
+ # # https://github.com/requests/toolbelt/issues/75
+ # m = MultipartEncoder(fields = fields )
+ # m._read = m.read
+ # m.read = lambda size: m._read (8129*1024) # 8MB
+ # return self.c.push(url,
+ # content=m,
+ # headers={'Accept': 'text/xml', 'Content-Type':m.content_type},
+ # path=path, method=method)
+ # raise BQApiError("improper parameters for postblob: must use paramater xml or filename or both ")
+
+
+ def service_url(self, service_type, path = "" , query=None):
+ """
+ @param service_type:
+ @param path:
+ @param query:
+
+ @return
+ """
+ root = self.service_map.get(service_type, None)
+ if root is None:
+ raise BQApiError('Not a service type')
+ if query:
+ path = "%s?%s" % (path, urllib.parse.urlencode(query))
+ return urllib.parse.urljoin(root, path)
+
+
+ def _load_services(self):
+ """
+ @return
+ """
+ services = self.load (posixpath.join(self.bisque_root , "services"))
+ smap = {}
+ for service in services.tags:
+ smap [service.type] = service.value
+ self.service_map = smap
+
+ def service (self, service_name):
+ return ServiceFactory.make (self, service_name)
+
+
+ #############################
+ # Classes and Type
+ #############################
+ def element(self, ty, **attrib):
+ elem = etree.Element(ty, **attrib)
+
+
+ def append(self, mex, tags=[], gobjects=[], children=[]):
+ def append_mex (mex, type_tup):
+ type_, elems = type_tup
+ for tg in elems:
+ if isinstance(tg, dict):
+ tg = d2xml({ type_ : tg})
+ elif isinstance(tg, BQNode):
+ tg = BQFactory.to_etree(tg)
+ elif isinstance(tg, etree._Element):
+ pass
+ else:
+ raise BQApiError('bad values in tag/gobject list %s' % tg)
+ mex.append(tg)
+
+ append_mex(mex, ('tag', tags))
+ append_mex(mex, ('gobject', gobjects))
+ for elem in children:
+ append_mex(mex, elem)
+
+
+ ##############################
+ # Mex
+ ##############################
+ def update_mex(self, status, tags = [], gobjects = [], children=[], reload=False, merge=False):
+ """save an updated mex with the addition
+
+ @param status: The current status of the mex
+ @param tags: list of etree.Element|BQTags|dict objects of form { 'name': 'x', 'value':'z' }
+ @param gobjects: same as etree.Element|BQGobject|dict objects of form { 'name': 'x', 'value':'z' }
+ @param children: list of tuple (type, obj array) i.e ('mex', dict.. )
+ @param reload:
+ @param merge: merge "outputs"/"inputs" section if needed
+ @return
+ """
+ if merge:
+ mex = self.fetchxml(self.mex.uri, view='deep') # get old version of MEX, so it can be merged if needed
+ mex.set('value', status)
+ else:
+ mex = etree.Element('mex', value = status, uri = self.mex.uri)
+ #self.mex.value = status
+ def append_mex (mex, type_tup):
+ type_, elems = type_tup
+ for tg in elems:
+ if isinstance(tg, dict):
+ tg = d2xml({ type_ : tg})
+ elif isinstance(tg, BQNode):
+ tg = self.factory.to_etree(tg)
+ elif isinstance(tg, etree._Element): #pylint: disable=protected-access
+ pass
+ else:
+ raise BQApiError('bad values in tag/gobject list %s' % tg)
+ was_merged = False
+ if merge and tg.tag == 'tag' and tg.get('name', '') in ['inputs', 'outputs']:
+ hits = mex.xpath('./tag[@name="%s"]' % tg.get('name', ''))
+ if hits:
+ assert len(hits) == 1
+ hits[0].extend(list(tg))
+ was_merged = True
+ log.debug("merged '%s' section in MEX", tg.get('name', ''))
+ if not was_merged:
+ mex.append(tg)
+
+ append_mex(mex, ('tag', tags))
+ append_mex(mex, ('gobject', gobjects))
+ for elem in children:
+ append_mex(mex, elem)
+
+ #mex = { 'mex' : { 'uri' : self.mex.uri,
+ # 'status' : status,
+ # 'tag' : tags,
+ # 'gobject': gobjects }}
+ content = self.postxml(self.mex.uri, mex, view='deep' if reload else 'short')
+ if reload and content is not None:
+ self.mex = self.factory.from_string(content)
+ return self.mex
+ return None
+
+
+ def finish_mex(self, status="FINISHED", tags=[], gobjects=[], children=[], msg=None ):
+ """
+ @param status:
+ @param tags:
+ @param gobject:
+ @param children:
+ @param msg:
+
+ @return
+ """
+ if msg is not None:
+ tags.append( { 'name':'message', 'value': msg })
+ try:
+ return self.update_mex(status, tags=tags, gobjects=gobjects, children=children, reload=False, merge=True)
+ except BQCommError as ce:
+ log.error ("Problem during finish mex %s" % ce.response.request.headers)
+ try:
+ return self.update_mex( status='FAILED',tags= [ { 'name':'error_message', 'value': "Error during saving (status %s)" % ce.response.status_code } ] )
+ except:
+ log.exception ("Cannot finish/fail Mex ")
+
+ def fail_mex(self, msg):
+ """
+ @param msg:
+ """
+ if msg is not None:
+ tags = [ { 'name':'error_message', 'value': msg } ]
+ self.finish_mex( status='FAILED', tags=tags)
+
+ def _begin_mex(self, moduleuri):
+ """create a mex on the server for this run"""
+ pass
+
+
+
+ ##############################
+ # Module control
+ ##############################
+ def run_modules(self, module_list, pre_run=None, post_run=None, callback_fct=None):
+ """Run one or more modules in parallel.
+
+ :param module_list: List of modules to run
+ :type module_list: [ { moduleuri: ..., inputs: { param1:val1, param2:val2, ...}, parent_mex: ... }, {...}, ... ]
+ :param pre_run: module entrypoint to call before run (or None if no prerun)
+ :type pre_run: str
+ :param post_run: module entrypoint to call after run (or None if no postrun)
+ :type post_run: str
+ :param callback_fct: function to call on completion (None: block until completion)
+ :type callback_fct: fct(mex_list=list(str))
+ :returns: list of mex URIs, one for each module
+ :rtype: list(str)
+ """
+ # TODO: create MEX according to params and POST it to module_service
+ pass
+
+ ##############################
+ # Resources
+ ##############################
+ def query(self, resource_type, **kw):
+ """Query for a resource
+ tag_query=None, tag_order=None, offset=None, limit=None
+ """
+ results = []
+ queryurl = self.service_url ('data_service', path=resource_type, query=kw)
+ items = self.fetchxml (queryurl)
+ for item in items:
+ results.append (self.factory.from_etree(item))
+ return results
+
+
+ def load(self, url, **params):
+ """Load a bisque object
+
+ @param url:
+ @param params:
+
+ @return
+ """
+ #if view not in url:
+ # url = url + "?view=%s" % view
+ try:
+ xml = self.fetchxml(url, **params)
+ if xml.tag == "response":
+ xml = xml[0]
+ bqo = self.factory.from_etree(xml)
+ return bqo
+ except BQCommError as ce:
+ log.exception('communication issue while loading %s' % ce)
+ return None
+
+ def delete(self, bqo, url=None, **kw):
+ "Delete an object and all children"
+ url = bqo.uri or url
+ if url is not None:
+ return self.deletexml(url)
+
+
+ def save(self, bqo, url=None, **kw):
+ """
+ @param bqo:
+ @param url:
+ @param kw:
+
+ @return
+ """
+ try:
+ original = bqo
+
+ # Find an object (or parent with a valild uri)
+ url = url or bqo.uri
+ if url is None:
+ while url is None and bqo.parent:
+ bqo = bqo.parent
+ url= bqo.parent.uri
+ if url is None:
+ url = self.service_url ('data_service')
+
+ xml = self.factory.to_etree(bqo)
+ xml = self.postxml(url, xml, **kw)
+ return xml is not None and self.factory.from_etree(xml)
+ except BQCommError as ce:
+ log.exception('communication issue while saving %s' , ce)
+ return None
+
+ def saveblob(self, bqo, filename):
+ """Save a blob to the server and return metadata structure
+ """
+
+ try:
+ xml = self.factory.to_etree(bqo)
+ xmlstr = self.postblob (filename=filename, xml= xml)
+ xmlet = self.factory.string2etree (xmlstr)
+ if xmlet.tag == 'resource' and xmlet.get ('type') == 'uploaded':
+ # return inside
+ bqo = self.factory.from_etree(xmlet[0])
+ return bqo
+ return None
+ except BQCommError as ce:
+ log.exception('communication issue while saving %s' , filename)
+ return None
diff --git a/nph_5class/bqapi/exception.py b/nph_5class/bqapi/exception.py
new file mode 100644
index 0000000..1826ccc
--- /dev/null
+++ b/nph_5class/bqapi/exception.py
@@ -0,0 +1,30 @@
+class BQException(Exception):
+ """
+ BQException
+ """
+
+class BQApiError(BQException):
+ """Exception in API usage"""
+
+
+
+class BQCommError(BQException):
+
+ def __init__(self, response):
+ """
+ @param: status - error code
+ @param: headers - dictionary of response headers
+ @param: content - body of the response (default: None)
+
+ """
+ #print 'Status: %s'%status
+ #print 'Headers: %s'%headers
+ self.response = response
+
+
+ def __str__(self):
+ content = "%s...%s" % (self.response.content[:64], self.response.content[-64:]) if len (self.response.content) > 64 else self.response.content
+ return "BQCommError(%s, status=%s, req headers=%s)%s" % (self.response.url,
+ self.response.status_code,
+ self.response.request.headers,
+ content )
diff --git a/nph_5class/bqapi/services.py b/nph_5class/bqapi/services.py
new file mode 100644
index 0000000..c4a80d9
--- /dev/null
+++ b/nph_5class/bqapi/services.py
@@ -0,0 +1,347 @@
+import os
+#import urllib
+#import urlparse
+
+import random
+import string
+import logging
+import tempfile
+import json
+import shutil
+
+from six.moves import urllib
+
+try:
+ from lxml import etree
+except ImportError:
+ import xml.etree.ElementTree as etree
+
+try:
+ import tables
+except ImportError:
+ logging.warn ("pytables services not available")
+
+from requests_toolbelt import MultipartEncoder
+from .util import normalize_unicode
+from .exception import BQCommError
+
+
+
+#DEFAULT_TIMEOUT=None
+DEFAULT_TIMEOUT=60*60 # 1 hour
+
+####
+#### KGK
+#### Still working on filling this out
+#### would be cool to have service definition language to make these.
+#### TODO more service, renders etc.
+
+class BaseServiceProxy(object):
+
+ def __init__(self, session, service_name, timeout=DEFAULT_TIMEOUT):
+ self.session = session
+ self.service_url = session.service_map [service_name]
+ self.service_name = service_name
+ self.timeout = timeout
+
+ def construct(self, path, params=None):
+ url = self.service_url
+ if params:
+ path = "%s?%s" % (path, urllib.parse.urlencode(params))
+ if path:
+ url = urllib.parse.urljoin (url, path)
+ return url
+
+ def request (self, path=None, params=None, method='get', render=None, **kw):
+ """
+ @param path: a path on the service
+ @param params: a diction of value to encode as params
+ @return a reuqest.response
+ """
+ if path and path[0] == "/":
+ path = path[1:]
+ if path:
+ path = urllib.parse.urljoin (self.service_url, path)
+ else:
+ path = self.service_url
+
+ # no longer in session https://github.com/requests/requests/issues/3341
+ timeout = kw.pop('timeout', self.timeout)
+ headers = kw.pop('headers', self.session.c.headers)
+ if render in ("xml", 'etree'):
+ headers.update ({'Content-Type':'text/xml', 'Accept': 'text/xml'})
+
+ try:
+ response = self.session.c.request (url=path, params=params, method=method, timeout=timeout, headers=headers, **kw)
+ if render in ("xml", 'etree'):
+ return etree.fromstring (response.content)
+ return response
+ except etree.ParseError:
+ #self.session.log.error ("xml parse error in %s", response.content)
+ raise BQCommError(response)
+
+ def fetch(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, **kw)
+ def get(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, **kw)
+ def post(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, method='post', **kw)
+ def put(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, method='put', **kw)
+ def delete(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, method='delete', **kw)
+
+
+class AdminProxy (BaseServiceProxy):
+ def login_as (self, user_name):
+ data = self.session.service ('data_service')
+ userxml = data.fetch ("user", params = { 'wpublic' :'1', 'resource_name': user_name}, render="xml")
+ user_uniq = userxml.find ("user").get ('resource_uniq')
+ self.fetch ('/user/{}/login'.format(user_uniq))
+
+
+class AuthProxy (BaseServiceProxy):
+ def login_providers (self, **kw):
+ return self.request ('login_providers', **kw)
+
+ def credentials (self, **kw):
+ return self.request ('credentials', **kw)
+
+ def get_session (self, **kw): # hides session
+ return self.request ('session', **kw)
+
+class BlobProxy (BaseServiceProxy):
+ def _resource_element (self, args_tag_file=None, args_resource_type=None, args_srcpath=None, **kw):
+ """Check the args and create a compatible resource element for posting or linking
+ """
+ if args_tag_file:
+ # Load file into resource
+ try:
+ resource = etree.parse (args_tag_file).getroot()
+ except etree.ParseError as pe:
+ raise BQCommError('Parse failure: aborting: ')
+ else:
+ resource = etree.Element (args_resource_type or 'resource')
+
+ for fld in ('permission', 'hidden'):
+ if fld in kw:
+ resource.set (fld, kw.get(fld))
+ if args_srcpath:
+ resource.set('value', args_srcpath)
+ resource.set('name', os.path.basename (args_srcpath))
+ return resource
+
+ def path_link(self, srcpath, alias=None, resource_type=None, tag_file=None):
+ url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/insert' )
+ params = {}
+ resource = self._resource_element(args_srcpath=srcpath, args_resource_type=resource_type, args_tag_file=tag_file)
+ payload = etree.tostring (resource)
+ if alias:
+ params['user'] = alias
+ r = self.post(url, data=payload, params=params, headers={'content-type': 'application/xml'})
+ return r
+
+ def path_delete(self, srcpath, alias=None):
+ url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/remove' )
+ params = {'path': srcpath}
+ if alias:
+ params['user'] = alias
+ r = self.get(url, params=params)
+ return r
+
+ def path_rename(self, srcpath, dstpath, alias=None):
+ url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/move' )
+ params = {'path': srcpath, 'destination': dstpath}
+ if alias:
+ params['user'] = alias
+ r = self.get(url, params=params)
+ return r
+
+ def path_list(self, srcpath, alias=None):
+ url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/list' )
+ params = { 'path' : srcpath }
+ if alias:
+ params['user'] = alias
+ r = self.get(url, params=params)
+ return r
+
+def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
+ return ''.join(random.choice(chars) for _ in range(size))
+
+class ImportProxy(BaseServiceProxy):
+ def transfer (self, filename, fileobj=None, xml=None):
+ fields = {}
+ if fileobj is None and filename is None:
+ raise BQCommError('Filename or fileobj are required for transfer')
+ if fileobj is None and os.path.exists (filename):
+ fileobj = open (filename, 'rb')
+ if fileobj is not None and filename is None:
+ filename = fileobj.name
+
+ if fileobj is not None:
+ filename = normalize_unicode(filename)
+ fields['file'] = (os.path.basename(filename), fileobj, 'application/octet-stream')
+ if xml is not None:
+ fields['file_resource'] = xml
+ if fields:
+ # https://github.com/requests/toolbelt/issues/75
+ m = MultipartEncoder(fields = fields )
+ m._read = m.read #pylint: disable=protected-access
+ m.read = lambda size: m._read (8129*1024) # 8MB
+ # ID generator is used to force load balancing operations
+ response = self.post("transfer_"+id_generator(),
+ data=m,
+ headers={'Accept': 'text/xml', 'Content-Type':m.content_type})
+ return response
+
+class DatasetProxy (BaseServiceProxy):
+
+ def delete (self, dataset_uniq, members=False, **kw):
+ if members:
+ params = kw.pop('params', {})
+ params['duri'] = dataset_uniq
+ return self.fetch("delete", params=params, **kw)
+ data = self.session.service('data_service')
+ return data.delete (dataset_uniq)
+
+ def append_member (self, dataset_uniq, resource_uniq, **kw):
+ """Append an element
+ """
+ data = self.session.service('data_service')
+ member = etree.Element('value', type='object')
+ member.text = data.contruct (resource_uniq)
+ self.post (dataset_uniq, data=etree.tostring(member), render='etree')
+
+ def delete_member (self, dataset_uniq, resource_uniq, **kw):
+ """Delete a member..
+ @return new dataset if success or None
+ """
+ data = self.session.service('data_service')
+ dataset = data.fetch ( dataset_uniq, params = {'view':'full'}, render='etree')
+ members = dataset.xpath ('value[text()="%s"]' % data.construct (resource_uniq))
+ for member in members:
+ dataset.remove (member)
+ if len (members):
+ for val in dataset.iter ('value'):
+ _ = val.attrib.pop ('index', 0)
+ return data.put (dataset_uniq, data = etree.tostring (dataset), render='etree')
+ return None
+
+
+class ModuleProxy (BaseServiceProxy):
+ def execute (self, module_name, **module_parms):
+ pass
+ def register(self, engine_url):
+ return self.request (path='register_engine', params = { 'engine_url':engine_url })
+ def unregister (self, engine_url):
+ return self.request (path='unregister_engine', params = { 'engine_url':engine_url })
+
+
+
+
+class TableProxy (BaseServiceProxy):
+ def load_array(self, table_uniq, path, slices=[]):
+ """
+ Load array from BisQue.
+ """
+ if table_uniq.startswith('http'):
+ table_uniq = table_uniq.split('/')[-1]
+ slice_list = []
+ for single_slice in slices:
+ if isinstance(single_slice, slice):
+ slice_list.append("%s;%s" % (single_slice.start or '', '' if single_slice.stop is None else single_slice.stop-1))
+ elif isinstance(single_slice, int):
+ slice_list.append("%s;%s" % (single_slice, single_slice))
+ else:
+ raise BQCommError("malformed slice parameter")
+ path = '/'.join([table_uniq.strip('/'), path.strip('/')])
+ info_url = '/'.join([path, 'info', 'format:json'])
+ response = self.get(info_url)
+ try:
+ num_dims = len(json.loads(response.content).get('sizes'))
+ except ValueError:
+ raise BQCommError('array could not be read')
+ # fill slices with missing dims
+ for _ in range(num_dims-len(slice_list)):
+ slice_list.append(';')
+ data_url = '/'.join([path, ','.join(slice_list), 'format:hdf'])
+ response = self.get(data_url)
+ # convert HDF5 to Numpy array (preserve indices??)
+ with tables.open_file('array.h5', driver="H5FD_CORE", driver_core_image=response.content, driver_core_backing_store=0) as h5file:
+ return h5file.root.array.read()
+
+ def store_array(self, array, name):
+ """
+ Store numpy array in BisQue and return resource doc.
+ """
+ try:
+ dirpath = tempfile.mkdtemp()
+ # (1) store array as HDF5 file
+ out_file = os.path.join(dirpath, "%s.h5" % name) # importer needs extension .h5
+ with tables.open_file(out_file, "w", filters = tables.Filters(complevel=5)) as h5file: # compression level 5
+ h5file.create_array(h5file.root, name, array)
+ # (2) call bisque importer with file
+ importer = self.session.service('import')
+ response = importer.transfer(out_file)
+ # (3) return resource xml
+ res = etree.fromstring (response.content)
+ if res.tag != 'resource' or res.get('type') != 'uploaded':
+ raise BQCommError('array could not be stored')
+ else:
+ return res[0]
+ finally:
+ if os.path.isfile(out_file):
+ os.remove(out_file)
+ os.rmdir(dirpath)
+
+
+class ImageProxy(BaseServiceProxy):
+ def get_thumbnail (self, image_uniq, **kw):
+ url = urllib.parse.urljoin( self.session.service_map['image_service'], image_uniq, 'thumbnail' )
+ r = self.get(url)
+ return r
+
+class ExportProxy(BaseServiceProxy):
+ valid_param = set (['files', 'datasets', 'dirs', 'urls', 'users'])
+ def fetch_export(self, **kw):
+ params = { key:val for key,val in list(kw.items()) if key in self.valid_param and val is not None }
+ response = self.fetch ('stream', params = params, stream=kw.pop ('stream', True) )
+ return response
+ def fetch_export_local(self, localpath, stream=True, **kw):
+ response = self.fetch_export (stream=stream, **kw )
+ with open(localpath, 'wb') as f:
+ shutil.copyfileobj(response.raw, f)
+ return localpath
+
+SERVICE_PROXIES = {
+ 'admin' : AdminProxy,
+ 'auth_service' : AuthProxy,
+ 'import' : ImportProxy,
+ 'blob_service': BlobProxy,
+ 'dataset_service': DatasetProxy,
+ 'table': TableProxy,
+ 'image_service' : ImageProxy,
+ 'export' : ExportProxy,
+}
+
+class ServiceFactory (object):
+ @classmethod
+ def make (cls, session, service_name):
+ svc = SERVICE_PROXIES.get (service_name, BaseServiceProxy)
+ if service_name in session.service_map:
+ return svc (session, service_name )
+ return None
+
+
+def test_module():
+ from bqapi import BQSession
+ session = BQSession ().init_local ('admin', 'admin', 'http://localhost:8080')
+ admin = session.service('admin')
+ data = session.service('data_service')
+ #admin.user(uniq).login().fetch ()
+ xml = data.get ("user", params = {'wpublic':'1', 'resource_name' : 'admin'}, render='xml')
+ user_uniq = xml.find ("user").get ('resource_uniq')
+ admin.fetch ('/user/{}/login'.format( user_uniq))
+
+if __name__ == "__main__":
+ test_module()
diff --git a/nph_5class/bqapi/services.py.bak b/nph_5class/bqapi/services.py.bak
new file mode 100644
index 0000000..3ed2d1e
--- /dev/null
+++ b/nph_5class/bqapi/services.py.bak
@@ -0,0 +1,347 @@
+import os
+#import urllib
+#import urlparse
+
+import random
+import string
+import logging
+import tempfile
+import json
+import shutil
+
+from six.moves import urllib
+
+try:
+ from lxml import etree
+except ImportError:
+ import xml.etree.ElementTree as etree
+
+try:
+ import tables
+except ImportError:
+ logging.warn ("pytables services not available")
+
+from requests_toolbelt import MultipartEncoder
+from .util import normalize_unicode
+from .exception import BQCommError
+
+
+
+#DEFAULT_TIMEOUT=None
+DEFAULT_TIMEOUT=60*60 # 1 hour
+
+####
+#### KGK
+#### Still working on filling this out
+#### would be cool to have service definition language to make these.
+#### TODO more service, renders etc.
+
+class BaseServiceProxy(object):
+
+ def __init__(self, session, service_name, timeout=DEFAULT_TIMEOUT):
+ self.session = session
+ self.service_url = session.service_map [service_name]
+ self.service_name = service_name
+ self.timeout = timeout
+
+ def construct(self, path, params=None):
+ url = self.service_url
+ if params:
+ path = "%s?%s" % (path, urllib.parse.urlencode(params))
+ if path:
+ url = urllib.parse.urljoin (url, path)
+ return url
+
+ def request (self, path=None, params=None, method='get', render=None, **kw):
+ """
+ @param path: a path on the service
+ @param params: a diction of value to encode as params
+ @return a reuqest.response
+ """
+ if path and path[0] == "/":
+ path = path[1:]
+ if path:
+ path = urllib.parse.urljoin (self.service_url, path)
+ else:
+ path = self.service_url
+
+ # no longer in session https://github.com/requests/requests/issues/3341
+ timeout = kw.pop('timeout', self.timeout)
+ headers = kw.pop('headers', self.session.c.headers)
+ if render in ("xml", 'etree'):
+ headers.update ({'Content-Type':'text/xml', 'Accept': 'text/xml'})
+
+ try:
+ response = self.session.c.request (url=path, params=params, method=method, timeout=timeout, headers=headers, **kw)
+ if render in ("xml", 'etree'):
+ return etree.fromstring (response.content)
+ return response
+ except etree.ParseError:
+ #self.session.log.error ("xml parse error in %s", response.content)
+ raise BQCommError(response)
+
+ def fetch(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, **kw)
+ def get(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, **kw)
+ def post(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, method='post', **kw)
+ def put(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, method='put', **kw)
+ def delete(self, path=None, params=None, render=None, **kw):
+ return self.request(path=path, params=params, render=render, method='delete', **kw)
+
+
+class AdminProxy (BaseServiceProxy):
+ def login_as (self, user_name):
+ data = self.session.service ('data_service')
+ userxml = data.fetch ("user", params = { 'wpublic' :'1', 'resource_name': user_name}, render="xml")
+ user_uniq = userxml.find ("user").get ('resource_uniq')
+ self.fetch ('/user/{}/login'.format(user_uniq))
+
+
+class AuthProxy (BaseServiceProxy):
+ def login_providers (self, **kw):
+ return self.request ('login_providers', **kw)
+
+ def credentials (self, **kw):
+ return self.request ('credentials', **kw)
+
+ def get_session (self, **kw): # hides session
+ return self.request ('session', **kw)
+
+class BlobProxy (BaseServiceProxy):
+ def _resource_element (self, args_tag_file=None, args_resource_type=None, args_srcpath=None, **kw):
+ """Check the args and create a compatible resource element for posting or linking
+ """
+ if args_tag_file:
+ # Load file into resource
+ try:
+ resource = etree.parse (args_tag_file).getroot()
+ except etree.ParseError as pe:
+ raise BQCommError('Parse failure: aborting: ')
+ else:
+ resource = etree.Element (args_resource_type or 'resource')
+
+ for fld in ('permission', 'hidden'):
+ if fld in kw:
+ resource.set (fld, kw.get(fld))
+ if args_srcpath:
+ resource.set('value', args_srcpath)
+ resource.set('name', os.path.basename (args_srcpath))
+ return resource
+
+ def path_link(self, srcpath, alias=None, resource_type=None, tag_file=None):
+ url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/insert' )
+ params = {}
+ resource = self._resource_element(args_srcpath=srcpath, args_resource_type=resource_type, args_tag_file=tag_file)
+ payload = etree.tostring (resource)
+ if alias:
+ params['user'] = alias
+ r = self.post(url, data=payload, params=params, headers={'content-type': 'application/xml'})
+ return r
+
+ def path_delete(self, srcpath, alias=None):
+ url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/remove' )
+ params = {'path': srcpath}
+ if alias:
+ params['user'] = alias
+ r = self.get(url, params=params)
+ return r
+
+ def path_rename(self, srcpath, dstpath, alias=None):
+ url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/move' )
+ params = {'path': srcpath, 'destination': dstpath}
+ if alias:
+ params['user'] = alias
+ r = self.get(url, params=params)
+ return r
+
+ def path_list(self, srcpath, alias=None):
+ url = urllib.parse.urljoin( self.session.service_map['blob_service'], 'paths/list' )
+ params = { 'path' : srcpath }
+ if alias:
+ params['user'] = alias
+ r = self.get(url, params=params)
+ return r
+
+def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
+ return ''.join(random.choice(chars) for _ in range(size))
+
+class ImportProxy(BaseServiceProxy):
+ def transfer (self, filename, fileobj=None, xml=None):
+ fields = {}
+ if fileobj is None and filename is None:
+ raise BQCommError('Filename or fileobj are required for transfer')
+ if fileobj is None and os.path.exists (filename):
+ fileobj = open (filename, 'rb')
+ if fileobj is not None and filename is None:
+ filename = fileobj.name
+
+ if fileobj is not None:
+ filename = normalize_unicode(filename)
+ fields['file'] = (os.path.basename(filename), fileobj, 'application/octet-stream')
+ if xml is not None:
+ fields['file_resource'] = xml
+ if fields:
+ # https://github.com/requests/toolbelt/issues/75
+ m = MultipartEncoder(fields = fields )
+ m._read = m.read #pylint: disable=protected-access
+ m.read = lambda size: m._read (8129*1024) # 8MB
+ # ID generator is used to force load balancing operations
+ response = self.post("transfer_"+id_generator(),
+ data=m,
+ headers={'Accept': 'text/xml', 'Content-Type':m.content_type})
+ return response
+
+class DatasetProxy (BaseServiceProxy):
+
+ def delete (self, dataset_uniq, members=False, **kw):
+ if members:
+ params = kw.pop('params', {})
+ params['duri'] = dataset_uniq
+ return self.fetch("delete", params=params, **kw)
+ data = self.session.service('data_service')
+ return data.delete (dataset_uniq)
+
+ def append_member (self, dataset_uniq, resource_uniq, **kw):
+ """Append an element
+ """
+ data = self.session.service('data_service')
+ member = etree.Element('value', type='object')
+ member.text = data.contruct (resource_uniq)
+ self.post (dataset_uniq, data=etree.tostring(member), render='etree')
+
+ def delete_member (self, dataset_uniq, resource_uniq, **kw):
+ """Delete a member..
+ @return new dataset if success or None
+ """
+ data = self.session.service('data_service')
+ dataset = data.fetch ( dataset_uniq, params = {'view':'full'}, render='etree')
+ members = dataset.xpath ('value[text()="%s"]' % data.construct (resource_uniq))
+ for member in members:
+ dataset.remove (member)
+ if len (members):
+ for val in dataset.iter ('value'):
+ _ = val.attrib.pop ('index', 0)
+ return data.put (dataset_uniq, data = etree.tostring (dataset), render='etree')
+ return None
+
+
+class ModuleProxy (BaseServiceProxy):
+ def execute (self, module_name, **module_parms):
+ pass
+ def register(self, engine_url):
+ return self.request (path='register_engine', params = { 'engine_url':engine_url })
+ def unregister (self, engine_url):
+ return self.request (path='unregister_engine', params = { 'engine_url':engine_url })
+
+
+
+
+class TableProxy (BaseServiceProxy):
+ def load_array(self, table_uniq, path, slices=[]):
+ """
+ Load array from BisQue.
+ """
+ if table_uniq.startswith('http'):
+ table_uniq = table_uniq.split('/')[-1]
+ slice_list = []
+ for single_slice in slices:
+ if isinstance(single_slice, slice):
+ slice_list.append("%s;%s" % (single_slice.start or '', '' if single_slice.stop is None else single_slice.stop-1))
+ elif isinstance(single_slice, int):
+ slice_list.append("%s;%s" % (single_slice, single_slice))
+ else:
+ raise BQCommError("malformed slice parameter")
+ path = '/'.join([table_uniq.strip('/'), path.strip('/')])
+ info_url = '/'.join([path, 'info', 'format:json'])
+ response = self.get(info_url)
+ try:
+ num_dims = len(json.loads(response.content).get('sizes'))
+ except ValueError:
+ raise BQCommError('array could not be read')
+ # fill slices with missing dims
+ for _ in range(num_dims-len(slice_list)):
+ slice_list.append(';')
+ data_url = '/'.join([path, ','.join(slice_list), 'format:hdf'])
+ response = self.get(data_url)
+ # convert HDF5 to Numpy array (preserve indices??)
+ with tables.open_file('array.h5', driver="H5FD_CORE", driver_core_image=response.content, driver_core_backing_store=0) as h5file:
+ return h5file.root.array.read()
+
+ def store_array(self, array, name):
+ """
+ Store numpy array in BisQue and return resource doc.
+ """
+ try:
+ dirpath = tempfile.mkdtemp()
+ # (1) store array as HDF5 file
+ out_file = os.path.join(dirpath, "%s.h5" % name) # importer needs extension .h5
+ with tables.open_file(out_file, "w", filters = tables.Filters(complevel=5)) as h5file: # compression level 5
+ h5file.create_array(h5file.root, name, array)
+ # (2) call bisque importer with file
+ importer = self.session.service('import')
+ response = importer.transfer(out_file)
+ # (3) return resource xml
+ res = etree.fromstring (response.content)
+ if res.tag != 'resource' or res.get('type') != 'uploaded':
+ raise BQCommError('array could not be stored')
+ else:
+ return res[0]
+ finally:
+ if os.path.isfile(out_file):
+ os.remove(out_file)
+ os.rmdir(dirpath)
+
+
+class ImageProxy(BaseServiceProxy):
+ def get_thumbnail (self, image_uniq, **kw):
+ url = urllib.parse.urljoin( self.session.service_map['image_service'], image_uniq, 'thumbnail' )
+ r = self.get(url)
+ return r
+
+class ExportProxy(BaseServiceProxy):
+ valid_param = set (['files', 'datasets', 'dirs', 'urls', 'users'])
+ def fetch_export(self, **kw):
+ params = { key:val for key,val in kw.items() if key in self.valid_param and val is not None }
+ response = self.fetch ('stream', params = params, stream=kw.pop ('stream', True) )
+ return response
+ def fetch_export_local(self, localpath, stream=True, **kw):
+ response = self.fetch_export (stream=stream, **kw )
+ with open(localpath, 'wb') as f:
+ shutil.copyfileobj(response.raw, f)
+ return localpath
+
+SERVICE_PROXIES = {
+ 'admin' : AdminProxy,
+ 'auth_service' : AuthProxy,
+ 'import' : ImportProxy,
+ 'blob_service': BlobProxy,
+ 'dataset_service': DatasetProxy,
+ 'table': TableProxy,
+ 'image_service' : ImageProxy,
+ 'export' : ExportProxy,
+}
+
+class ServiceFactory (object):
+ @classmethod
+ def make (cls, session, service_name):
+ svc = SERVICE_PROXIES.get (service_name, BaseServiceProxy)
+ if service_name in session.service_map:
+ return svc (session, service_name )
+ return None
+
+
+def test_module():
+ from bqapi import BQSession
+ session = BQSession ().init_local ('admin', 'admin', 'http://localhost:8080')
+ admin = session.service('admin')
+ data = session.service('data_service')
+ #admin.user(uniq).login().fetch ()
+ xml = data.get ("user", params = {'wpublic':'1', 'resource_name' : 'admin'}, render='xml')
+ user_uniq = xml.find ("user").get ('resource_uniq')
+ admin.fetch ('/user/{}/login'.format( user_uniq))
+
+if __name__ == "__main__":
+ test_module()
diff --git a/nph_5class/bqapi/tests/__init__.py b/nph_5class/bqapi/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/nph_5class/bqapi/tests/conftest.py b/nph_5class/bqapi/tests/conftest.py
new file mode 100644
index 0000000..e2ee066
--- /dev/null
+++ b/nph_5class/bqapi/tests/conftest.py
@@ -0,0 +1,30 @@
+##
+## Add local fixtures here
+import pytest
+from collections import OrderedDict, namedtuple
+
+from bq.util.bunch import Bunch
+from bq.util.mkdir import _mkdir
+from .util import fetch_file
+from bqapi import BQServer
+
+@pytest.fixture(scope="module")
+def server():
+ return BQServer()
+
+
+LocalFile = namedtuple('LocalFile', ['name', 'location'])
+
+@pytest.fixture(scope="module")
+def stores(config):
+ samples = config.store.samples_url
+ inputs = config.store.input_dir
+ results = config.store.results_dir
+ _mkdir(results)
+
+ files = []
+ for name in [ x.strip() for x in config.store.files.split() ]:
+ print "Fetching", name
+ files.append (LocalFile (name, fetch_file(name, samples, inputs)))
+
+ return Bunch(samples=samples, inputs=inputs, results=results, files=files)
diff --git a/nph_5class/bqapi/tests/setup.cfg.sample b/nph_5class/bqapi/tests/setup.cfg.sample
new file mode 100644
index 0000000..5fb4ed5
--- /dev/null
+++ b/nph_5class/bqapi/tests/setup.cfg.sample
@@ -0,0 +1,14 @@
+[Host]
+root: http://localhost:8080
+user: test
+password: test
+
+[Store]
+location: http://hammer.ece.ucsb.edu/~bisque/test_data/images/
+local_location: SampleData
+results_location: Results
+
+#simple rgb image
+filename1: flowers_24bit_nointr.png
+
+[nosetests]
diff --git a/nph_5class/bqapi/tests/test_bqapi.py b/nph_5class/bqapi/tests/test_bqapi.py
new file mode 100644
index 0000000..bdc494d
--- /dev/null
+++ b/nph_5class/bqapi/tests/test_bqapi.py
@@ -0,0 +1,33 @@
+import pytest
+
+from lxml import etree
+from bqapi import BQSession
+from bqapi.bqclass import BQFactory
+from tg import config
+
+
+pytestmark = pytest.mark.functional
+
+
+
+def test_load (session):
+ 'Check that loading works'
+
+ #host = config.get ('host.root')
+ #user = config.get ('host.user')
+ #passwd = config.get ('host.password')
+ #bq = BQSession()
+ #bq.init_local (user, passwd, bisque_root = host, create_mex = False)
+ x = session.load ('/data_service/image/?limit=10')
+ print "loading /data_service/images->", BQFactory.to_string((x))
+
+
+def test_load_pixels(session):
+ 'check that you can load pixels from an image'
+ #bq = BQSession()
+ x = session.load ('/data_service/image/?limit=10')
+
+ if len(x.kids):
+ i0 = x.kids[0]
+ pixels = i0.pixels().slice(z=1,t=1).fetch()
+ print len(pixels)
diff --git a/nph_5class/bqapi/tests/test_bqclass.py b/nph_5class/bqapi/tests/test_bqclass.py
new file mode 100644
index 0000000..893077d
--- /dev/null
+++ b/nph_5class/bqapi/tests/test_bqclass.py
@@ -0,0 +1,34 @@
+import pytest
+
+from lxml import etree
+from bqapi.bqclass import BQFactory
+
+pytestmark = pytest.mark.unit
+
+
+X="""
+
+
+
+
+
+
+"""
+
+
+
+def test_conversion():
+ 'test simple xml conversions'
+ print "ORIGINAL"
+ print X
+
+ factory = BQFactory(None)
+
+ r = factory.from_string(X)
+ print "PARSED"
+
+ x = factory.to_string (r)
+
+ print "XML"
+ print r
+ assert x == X.translate(None, '\r\n')
diff --git a/nph_5class/bqapi/tests/test_bqfeature.py b/nph_5class/bqapi/tests/test_bqfeature.py
new file mode 100644
index 0000000..4a03d8f
--- /dev/null
+++ b/nph_5class/bqapi/tests/test_bqfeature.py
@@ -0,0 +1,198 @@
+import os
+import numpy as np
+import urllib
+from util import fetch_file
+from lxml import etree
+import ConfigParser
+from datetime import datetime
+
+
+from collections import OrderedDict, namedtuple
+import pytest
+import nose
+from nose import with_setup
+
+from bq.util.mkdir import _mkdir
+from bqapi import BQSession, BQServer
+from bqapi.util import fetch_dataset
+from bqapi.comm import BQCommError
+from bqapi.util import *
+from bqapi.bqfeature import *
+
+
+
+TEST_PATH = 'tests_%s'%urllib.quote(datetime.now().strftime('%Y%m%d%H%M%S%f')) #set a test dir on the system so not too many repeats occur
+
+pytestmark = pytest.mark.skip("Unported tests")
+#pytestmark = pytest.mark.functional
+
+#setup comm test
+def setUp():
+ global results_location
+ global store_local_location
+ global file1_location
+ global filename1
+ global bqsession
+ global FeatureResource
+
+ config = ConfigParser.ConfigParser()
+ config.read('setup.cfg')
+ root = config.get('Host', 'root') or 'localhost:8080'
+ user = config.get('Host', 'user') or 'test'
+ pwd = config.get('Host', 'password') or 'test'
+ results_location = config.get('Store', 'results_location') or 'Results'
+ _mkdir(results_location)
+
+ store_location = config.get('Store', 'location') or None
+ if store_location is None: raise NameError('Requre a store location to run test properly')
+
+ store_local_location = config.get('Store', 'local_location') or 'SampleData'
+ filename1 = config.get('Store','filename1') or None
+ if filename1 is None: raise NameError('Requre an image to run test properly')
+ file1_location = fetch_file(filename1, store_location, store_local_location)
+
+ FeatureResource = namedtuple('FeatureResource',['image','mask','gobject'])
+ FeatureResource.__new__.__defaults__ = (None, None, None)
+ #start session
+ bqsession = BQSession().init_local(user, pwd, bisque_root=root, create_mex=False)
+
+def setup_bqfeature_fetch():
+ """
+ uploads an image
+ """
+ global resource_list
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(file1_location, xml=resource)
+ uniq = etree.XML(content)[0].attrib['resource_uniq']
+ image_uri = '%s/image_service/image/%s'%(bqsession.bisque_root,uniq)
+ resource_list = [FeatureResource(image=image_uri)]
+
+
+def teardown_bqfeature_fetch():
+ pass
+
+
+@with_setup(setup_bqfeature_fetch, teardown_bqfeature_fetch)
+def test_bqfeature_fetch_1():
+ """
+ Test feature fetch and returning hdf5 file
+ """
+ filename = 'bqfeature_fetch_1.h5'
+ path = os.path.join(results_location, filename)
+ filename = Feature().fetch(bqsession, 'SimpleTestFeature', resource_list, path=path)
+
+
+@with_setup(setup_bqfeature_fetch, teardown_bqfeature_fetch)
+def test_bqfeature_fetch_2():
+ """
+ Test feature fetch and returning pytables object
+ """
+ hdf5 = Feature().fetch(bqsession, 'SimpleTestFeature', resource_list)
+ hdf5.close()
+ os.remove(hdf5.filename)
+
+def setup_bqfeature_fetchvector():
+ """
+ uploads an image
+ """
+ global resource_list
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(file1_location, xml=resource)
+ uniq = etree.XML(content)[0].attrib['resource_uniq']
+ image_uri = '%s/image_service/image/%s'%(bqsession.bisque_root,uniq)
+ resource_list = [FeatureResource(image=image_uri)]
+
+
+def teardown_bqfeature_fetchvector():
+ pass
+
+
+def test_bqfeature_fetchvector_1():
+ """
+ Test fetch vector
+ """
+ feature_vector = Feature().fetch_vector(bqsession, 'SimpleTestFeature', resource_list)
+
+def test_bqfeature_fetchvector_error():
+ """
+ Test fetch vector on a resource that doesnt exist
+ """
+ try:
+ resource_list = [FeatureResource(image='%s/image_service/image/notaresource' % bqsession.bisque_root)]
+ feature_vector = Feature().fetch_vector(bqsession, 'SimpleTestFeature', resource_list)
+ except FeatureError:
+ assert True
+ else:
+ assert False
+
+
+def setup_bqparallelfeature_fetch():
+ """
+ uploads a list of images
+ """
+ global resource_list
+ resource_list = []
+ for _ in xrange(10):
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(file1_location, xml=resource)
+ uniq = etree.XML(content)[0].attrib['resource_uniq']
+ resource_list.append(FeatureResource(image='%s/image_service/image/%s'%(bqsession.bisque_root,uniq)))
+
+
+def teardown_bqparallelfeature_fetch():
+ """
+ """
+ pass
+
+
+@with_setup(setup_bqparallelfeature_fetch, teardown_bqparallelfeature_fetch)
+def test_bqparallelfeature_fetch_1():
+ """
+ Test parallel feature fetch vector and returning pytables object
+ """
+ PF=ParallelFeature()
+ hdf5 = PF.fetch(bqsession, 'SimpleTestFeature', resource_list)
+ hdf5.close()
+ os.remove(hdf5.filename)
+
+@with_setup(setup_bqparallelfeature_fetch, teardown_bqparallelfeature_fetch)
+def test_bqparallelfeature_fetch_2():
+ """
+ Test parallel feature fetch vector and return a file
+ """
+ filename = 'bqparallelfeature_fetch_2.h5'
+ path = os.path.join(results_location, filename)
+ PF=ParallelFeature()
+ PF.set_thread_num(2)
+ PF.set_chunk_size(5)
+ filename = PF.fetch(bqsession, 'SimpleTestFeature', resource_list, path=path)
+
+
+def setup_bqparallelfeature_fetchvector():
+ """
+ Uploads a list of images
+ """
+ global resource_list
+ resource_list = []
+ for _ in xrange(10):
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(file1_location, xml=resource)
+ uniq = etree.XML(content)[0].attrib['resource_uniq']
+ resource_list.append(FeatureResource(image='%s/image_service/image/%s'%(bqsession.bisque_root,uniq)))
+
+
+def teardown_bqparallelfeature_fetchvector():
+ """
+ """
+ pass
+
+
+@with_setup(setup_bqparallelfeature_fetchvector, teardown_bqparallelfeature_fetchvector)
+def test_bqparallelfeature_fetchvector_1():
+ """
+ Test parallel feature fetch vector
+ """
+ PF=ParallelFeature()
+ PF.set_thread_num(2)
+ PF.set_chunk_size(5)
+ feature_vectors = PF.fetch_vector(bqsession, 'SimpleTestFeature', resource_list)
diff --git a/nph_5class/bqapi/tests/test_comm.py b/nph_5class/bqapi/tests/test_comm.py
new file mode 100644
index 0000000..03f4460
--- /dev/null
+++ b/nph_5class/bqapi/tests/test_comm.py
@@ -0,0 +1,232 @@
+import pytest
+
+from collections import OrderedDict, namedtuple
+import os
+from lxml import etree
+import urllib
+from datetime import datetime
+import time
+
+from bqapi import BQSession
+
+TEST_PATH = 'tests_%s'%urllib.quote(datetime.now().strftime('%Y%m%d%H%M%S%f')) #set a test dir on the system so not too many repeats occur
+
+# default mark is function.. may be overridden
+pytestmark = pytest.mark.functional
+
+#############################
+### BQServer
+#############################
+@pytest.mark.unit
+def test_prepare_url_1(server):
+ """
+ """
+ check_url = 'http://bisque.ece.ucsb.edu/image/00-123456789?remap=gray&format=tiff'
+ url = 'http://bisque.ece.ucsb.edu/image/00-123456789'
+ odict = OrderedDict([('remap','gray'),('format','tiff')])
+ url = server.prepare_url(url, odict=odict)
+ assert url == check_url
+
+@pytest.mark.unit
+def test_prepare_url_2(server):
+ """
+ """
+ check_url = 'http://bisque.ece.ucsb.edu/image/00-123456789?remap=gray&format=tiff'
+ url = 'http://bisque.ece.ucsb.edu/image/00-123456789'
+ url = server.prepare_url(url, remap='gray', format='tiff')
+ assert url == check_url
+
+@pytest.mark.unit
+def test_prepare_url_3(server):
+ """
+ """
+ check_url = 'http://bisque.ece.ucsb.edu/image/00-123456789?format=tiff&remap=gray'
+ url = 'http://bisque.ece.ucsb.edu/image/00-123456789'
+ odict = OrderedDict([('remap','gray')])
+ url = server.prepare_url(url, odict=odict, format='tiff')
+ assert url == check_url
+
+
+
+#Test BQSession
+def test_open_session(config):
+ """
+ Test Initalizing a BQSession locally
+ """
+ host = config.get ('host.root')
+ user = config.get ('host.user')
+ pwd = config.get ('host.password')
+
+ bqsession = BQSession().init_local(user, pwd, bisque_root=host, create_mex=False)
+ bqsession.close()
+
+
+def test_initalize_mex_locally(config):
+ """
+ Test initalizing a mex locally
+ """
+ host = config.get ('host.root')
+ user = config.get ('host.user')
+ pwd = config.get ('host.password')
+ bqsession = BQSession().init_local(user, pwd, bisque_root=host, create_mex=True)
+ assert bqsession.mex.uri
+ bqsession.close()
+
+
+def test_initalize_session_From_mex(config):
+ """
+ Test initalizing a session from a mex
+ """
+ host = config.get ('host.root')
+ user = config.get ('host.user')
+ pwd = config.get ('host.password')
+ bqsession = BQSession().init_local(user, pwd, bisque_root=host)
+ mex_url = bqsession.mex.uri
+ token = bqsession.mex.resource_uniq
+ bqmex = BQSession().init_mex(mex_url, token, user, bisque_root=host)
+ bqmex.close()
+ bqsession.close()
+
+
+def test_fetchxml_1(session):
+ """
+ Test fetch xml
+ """
+ user = session.config.get ('host.user')
+ #bqsession = BQSession().init_local(user, pwd, bisque_root=root)
+ response_xml = session.fetchxml('/data_service/'+user) #fetches the user
+ session.close()
+ if not isinstance(response_xml, etree._Element):
+ assert False , 'Did not return XML!'
+
+def test_fetchxml_2(session, stores):
+ """
+ Test fetch xml and save the document to disk
+ """
+ user = session.config.get ('host.user')
+ filename = 'fetchxml_test_2.xml'
+ path = os.path.join(stores.results,filename)
+ path = session.fetchxml('/data_service/'+user, path=path) #fetches the user
+
+ try:
+ with open(path,'r') as f:
+ etree.XML(f.read()) #check if xml was returned
+
+ except etree.Error:
+ assert False , 'Did not return XML!'
+
+
+def test_postxml_1(session):
+ """
+ Test post xml
+ """
+
+ test_document ="""
+
+
+
+ """
+ response_xml = session.postxml('/data_service/file', xml=test_document)
+ if not isinstance(response_xml, etree._Element):
+ assert False ,'Did not return XML!'
+
+
+def test_postxml_2(session, stores):
+ """
+ Test post xml and save the document to disk
+ """
+
+ test_document ="""
+
+
+
+ """
+ filename = 'postxml_test_2.xml'
+ path = os.path.join(stores.results,filename)
+
+ path = session.postxml('/data_service/file', test_document, path=path)
+
+ try:
+ with open(path,'r') as f:
+ etree.XML(f.read()) #check if xml was returned
+
+ except etree.Error:
+ assert False ,'Did not return XML!'
+
+
+def test_postxml_3(session):
+ """
+ Test post xml and read immediately
+ """
+
+ test_document ="""
+
+
+
+ """
+ response0_xml = session.postxml('/data_service/file', xml=test_document)
+ uri0 = response0_xml.get ('uri')
+ response1_xml = session.fetchxml(uri0)
+ uri1 = response0_xml.get ('uri')
+ session.deletexml (url = uri0)
+ if not isinstance(response0_xml, etree._Element):
+ assert False , 'Did not return XML!'
+
+ assert uri0 == uri1, "Posted and Fetched uri do not match"
+
+
+
+
+def test_fetchblob_1():
+ """
+
+ """
+ pass
+
+
+def test_postblob_1(session, stores):
+ """ Test post blob """
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, stores.files[0].name))
+ content = session.postblob(stores.files[0].location, xml=resource)
+ assert len(content), "No content returned"
+
+
+def test_postblob_2(session, stores):
+ """ Test post blob and save the returned document to disk """
+ filename = 'postblob_test_2.xml'
+ path = os.path.join(stores.results,filename)
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, stores.files[0].name))
+ path = session.postblob(stores.files[0].location, xml=resource, path=path)
+
+ try:
+ with open(path,'r') as f:
+ etree.XML(f.read()) #check if xml was returned
+
+ except etree.Error:
+ assert False , 'Did not return XML!'
+
+def test_postblob_3(session, stores):
+ """
+ Test post blob with xml attached
+ """
+
+ test_document = """
+
+
+
+ """%u'%s/%s'%(TEST_PATH, stores.files[0].name)
+ content = session.postblob(stores.files[0].location, xml=test_document)
+
+
+def test_run_mex(mexsession):
+ """
+ Test run mex
+ """
+ session = mexsession
+ mex_uri = session.mex.uri
+ session.update_mex(status="IN PROGRESS", tags = [], gobjects = [], children=[], reload=False)
+ response_xml = session.fetchxml(mex_uri) #check xml
+ session.finish_mex()
+
+ response_xml = session.fetchxml(mex_uri) #check xml
+ assert mex_uri == response_xml.get ('uri')
diff --git a/nph_5class/bqapi/tests/test_util.py b/nph_5class/bqapi/tests/test_util.py
new file mode 100644
index 0000000..b798a25
--- /dev/null
+++ b/nph_5class/bqapi/tests/test_util.py
@@ -0,0 +1,294 @@
+import pytest
+import os
+import numpy as np
+#import urllib
+from six.moves import urllib
+from datetime import datetime
+
+
+
+from bqapi import BQSession, BQServer
+from bqapi.util import fetch_dataset
+from bq.util.mkdir import _mkdir
+from .util import fetch_file
+from bqapi.comm import BQCommError
+from bqapi.util import *
+try:
+ from lxml import etree
+except ImportError:
+ import xml.etree.ElementTree as etree
+
+TEST_PATH = 'tests_%s'%urllib.parse.quote(datetime.now().strftime('%Y%m%d%H%M%S%f')) #set a test dir on the system so not too many repeats occur
+
+pytestmark = pytest.mark.skip("Unported tests")
+
+
+@pytest.fixture(scope='module')
+def image_uri(session, stores):
+ """
+ uploads an image
+ """
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, stores.files[0].name))
+ content = bqsession.postblob(store.files[0].location, xml=resource)
+ return etree.XML(content)[0].attrib['uri']
+
+
+def setup_fetchimageplanes():
+ """
+ uploads an image
+ """
+ global image_uri
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(stores.files[0].location, xml=resource)
+ image_uri = etree.XML(content)[0].attrib['uri']
+
+
+def teardown_fetchimageplanes():
+ pass
+
+
+
+def setup_fetchimagepixels():
+ """
+ uploads an image
+ """
+ global image_uri
+ resource = etree.Element('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(stores.files[0].location, xml=resource)
+ image_uri = etree.XML(content)[0].attrib['uri']
+
+def teardown_fetchimagepixels():
+ pass
+
+
+def setup_fetchdataset():
+ """
+ uploads an dataset
+ """
+ global dataset_uri
+ dataset = etree.Element('dataset', name='test')
+ for _ in xrange(4):
+ resource = etree.Element('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(stores.files[0].location, xml=resource)
+ value=etree.SubElement(dataset,'value', type="object")
+ value.text = etree.XML(content)[0].attrib['uri']
+ content = bqsession.postxml('/data_service/dataset', dataset)
+ dataset_uri = content.attrib['uri']
+
+def teardown_fetchdataset():
+ pass
+
+
+
+def setup_fetchDataset():
+ """
+ uploads an dataset
+ """
+ global dataset_uri
+ dataset = etree.Element('dataset', name='test')
+ for _ in xrange(4):
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(stores.files[0].location, xml=resource)
+ value=etree.SubElement(dataset,'value', type="object")
+ value.text = etree.XML(content)[0].attrib['uri']
+ content = bqsession.postxml('/data_service/dataset', dataset)
+ dataset_uri = content.attrib['uri']
+
+
+def teardown_fetchDataset():
+ pass
+
+
+
+def setup_saveimagepixels():
+ """
+ uploads an image
+ """
+ global image_uri
+ resource = etree.Element('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(stores.files[0].location, xml=resource)
+ image_uri = etree.XML(content)[0].attrib['uri']
+
+
+def teardown_saveimagepixels():
+ pass
+
+
+def setup_fetchImage():
+ """
+ uploads an image
+ """
+ global image_uri
+ resource = etree.Element ('resource', name=u'%s/%s'%(TEST_PATH, filename1))
+ content = bqsession.postblob(stores.files[0].location, xml=resource)
+ image_uri = etree.XML(content)[0].attrib['uri']
+
+
+def teardown_fetchImage():
+ pass
+
+
+
+###################################################
+
+
+
+
+def test_saveblob_1(session,stores):
+ """
+ Saves an image to the blob service
+ """
+ try:
+ result = save_blob(bqsession, localfile=stores.files[0].location)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+ if result is None:
+ assert False, 'XML Parsing error'
+
+
+def test_saveblob_2(session,stores):
+ """
+ Save an image to the blob service with xml tags
+ """
+
+ try:
+ result = save_blob(bqsession, localfile=stores.files[0].location)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+ if result is None:
+ assert False, 'XML Parsing error'
+
+
+
+
+
+def test_fetchblob_1(session, stores, image_uri):
+ """
+ fetch blob and return path
+ """
+ try:
+ result = fetch_blob(bqsession, image_uri, dest=stores.results)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+
+def test_fetchblob_2(session, image_uri):
+ """
+ fetch blob and return local path
+ """
+ try:
+ result = fetch_blob(bqsession, image_uri, uselocalpath=True)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+
+
+#@with_setup(setup_fetchimageplanes, teardown_fetchimageplanes)
+def test_fetchimageplanes_1():
+ """
+ fetch image planes and return path
+ """
+ try:
+ result = fetch_image_planes(bqsession, image_uri, results_location, uselocalpath=False)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+#@with_setup(setup_fetchimageplanes, teardown_fetchimageplanes)
+def test_fetchimageplanes_2():
+ """
+ Fetch image planes and return path. Routine is run on same host as server.
+ """
+ try:
+ result = fetch_image_planes(bqsession, image_uri, results_location,uselocalpath=True)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+
+#@with_setup(setup_fetchimagepixels, teardown_fetchimagepixels)
+def test_fetchimagepixels_1():
+ """
+ fetch image planes and return path
+ """
+ try:
+ result = fetch_image_pixels(bqsession, image_uri, results_location,uselocalpath=True)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+#@with_setup(setup_fetchimagepixels, teardown_fetchimagepixels)
+def test_fetchimagepixels_2():
+ """
+ fetch image planes and return path. Routine is run on same host as server.
+ """
+ try:
+ result = fetch_image_pixels(bqsession, image_uri, results_location,uselocalpath=True)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+#@with_setup(setup_fetchdataset, teardown_fetchdataset)
+def test_fetchdataset():
+ """
+ fetch dataset images
+ """
+ try:
+ result = fetch_dataset(bqsession, dataset_uri, results_location)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+#@with_setup(setup_fetchImage, teardown_fetchImage)
+def test_fetchImage_1():
+ """
+ fetch Image
+ """
+ try:
+ result = fetchImage(bqsession, image_uri, results_location)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+#@with_setup(setup_fetchImage, teardown_fetchImage)
+def test_fetchImage_2():
+ """
+ fetch Image with localpath
+ """
+ try:
+ result = fetchImage(bqsession, image_uri, results_location, uselocalpath=True)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+
+#@with_setup(setup_fetchDataset, teardown_fetchDataset)
+def test_fetchDataset():
+ """
+ fetch Dataset images
+ """
+ try:
+ result = fetchDataset(bqsession, dataset_uri, results_location)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
+
+
+
+#@with_setup(setup_saveimagepixels, teardown_saveimagepixels)
+def test_saveimagepixels():
+ """
+ Test save image pixels
+ """
+ #doesnt work without name on image
+ xmldoc = """
+
+
+
+ """%u'%s/%s'%(TEST_PATH, filename1)
+ #bqimage = fromXml(etree.XML(xmldoc))
+ bqimage = bqsession.factory.from_string (xmldoc)
+ try:
+ result = save_image_pixels(bqsession, stores.files[0].location, image_tags=bqimage)
+ except BQCommError, e:
+ assert False, 'BQCommError: Status: %s'%e.status
diff --git a/nph_5class/bqapi/tests/util.py b/nph_5class/bqapi/tests/util.py
new file mode 100644
index 0000000..e06b987
--- /dev/null
+++ b/nph_5class/bqapi/tests/util.py
@@ -0,0 +1,20 @@
+from bq.util.mkdir import _mkdir
+import posixpath
+import urllib
+import os
+
+def fetch_file(filename, url, dir):
+ """
+ @param filename: name of the file fetching from the store
+ @param url: url of the store
+ @param dir: the directory the file will be placed in
+
+ @return the local path to the file
+ """
+ _mkdir(url)
+ _mkdir(dir)
+ url = posixpath.join(url, filename)
+ path = os.path.join(dir, filename)
+ if not os.path.exists(path):
+ urllib.urlretrieve(url, path)
+ return path
\ No newline at end of file
diff --git a/nph_5class/bqapi/types.py b/nph_5class/bqapi/types.py
new file mode 100644
index 0000000..12c318b
--- /dev/null
+++ b/nph_5class/bqapi/types.py
@@ -0,0 +1,7 @@
+
+
+USENODE = False
+if USENODE:
+ from .bqnode import *
+else:
+ from .bqclass import *
diff --git a/nph_5class/bqapi/util.py b/nph_5class/bqapi/util.py
new file mode 100644
index 0000000..162d6d9
--- /dev/null
+++ b/nph_5class/bqapi/util.py
@@ -0,0 +1,435 @@
+
+
+import os
+import shutil
+#import urllib
+#import urlparse
+#import time
+import logging
+from six.moves import urllib
+
+#from lxml import etree as ET
+#from lxml import etree
+from .xmldict import xml2d, d2xml
+
+log = logging.getLogger('bqapi.util')
+
+#####################################################
+# misc: unicode
+#####################################################
+
+def normalize_unicode(s):
+ if isinstance(s, str):
+ return s
+ try:
+ s = s.decode('utf8')
+ except UnicodeEncodeError:
+ s = s.encode('ascii', 'replace')
+ return s
+
+#####################################################
+# misc: path manipulation
+#####################################################
+
+if os.name == 'nt':
+ def url2localpath(url):
+ path = urllib.parse.urlparse(url).path
+ if len(path)>0 and path[0] == '/':
+ path = path[1:]
+ try:
+ return urllib.parse.unquote(path).decode('utf-8')
+ except UnicodeEncodeError:
+ # dima: safeguard measure for old non-encoded unicode paths
+ return urllib.parse.unquote(path)
+
+ def localpath2url(path):
+ path = path.replace('\\', '/')
+ url = urllib.parse.quote(path.encode('utf-8'))
+ if len(path)>3 and path[0] != '/' and path[1] == ':':
+ # path starts with a drive letter: c:/
+ url = 'file:///%s'%url
+ else:
+ # path is a relative path
+ url = 'file://%s'%url
+ return url
+
+else:
+ def url2localpath(url):
+ url = url.encode('utf-8') # safegurd against un-encoded values in the DB
+ path = urllib.parse.urlparse(url).path
+ return urllib.parse.unquote(path)
+
+ def localpath2url(path):
+ url = urllib.parse.quote(path.encode('utf-8'))
+ url = 'file://%s'%url
+ return url
+
+#####################################################
+
+
+class AttrDict(dict):
+ def __init__(self, *args, **kwargs):
+ dict.__init__(self, *args, **kwargs)
+
+ def __getattr__(self, name):
+ try:
+ return self[name]
+ except KeyError:
+ raise AttributeError
+
+ def __setattr__(self, name, value):
+ self[name] = value
+ return value
+
+ def __getstate__(self):
+ return list(self.items())
+
+ def __setstate__(self, items):
+ for key, val in items:
+ self[key] = val
+
+
+def safecopy (*largs):
+ largs = list (largs)
+ d = largs.pop()
+
+ for f in largs:
+ try:
+ dest = d
+ if os.path.isdir (d):
+ dest = os.path.join (d, os.path.basename(f))
+ print ("linking %s to %s"%(f,dest))
+ if os.path.exists(dest):
+ print ("Found existing file %s: removing .." % dest)
+ os.unlink (dest)
+ os.link(f, dest)
+ except (OSError, AttributeError) as e:
+ print ("Problem in link %s .. trying copy" % e)
+ shutil.copy2(f, dest)
+
+def parse_qs(query):
+ """
+ parse a uri query string into a dict
+ """
+ pd = {}
+ if '&' in query:
+ for el in query.split('&'):
+ nm, junk, vl = el.partition('=')
+ pd.setdefault(nm, []).append(vl)
+ return pd
+
+def make_qs(pd):
+ """
+ convert back from dict to qs
+ """
+ query = []
+ for k,vl in list(pd.items()):
+ for v in vl:
+ pair = v and "%s=%s" % (k,v) or k
+ query.append(pair)
+ return "&".join(query)
+
+
+def save_blob(session, localfile=None, resource=None):
+ """
+ put a local image on the server and return the URL
+ to the METADATA XML record
+
+ @param session: the local session
+ @param image: an BQImage object
+ @param localfile: a file-like object or name of a localfile
+ @return XML content when upload ok
+
+ @exceptions comm.BQCommError - if blob is failed to be posted
+ """
+ content = session.postblob(localfile, xml=resource)
+
+ #content = ET.XML(content)
+ content = session.factory.string2etree(content)
+ if len(content)<1: #when would this happen
+ return None
+ return content[0]
+
+
+def fetch_blob(session, uri, dest=None, uselocalpath=False):
+ """
+ fetch original image locally as tif
+ @param session: the bqsession
+ @param uri: resource image uri
+ @param dest: a destination directory
+ @param uselocalpath: true when routine is run on same host as server
+ """
+ image = session.load(uri)
+ name = image.name or next_name("blob")
+
+ query = None
+ if uselocalpath:
+ # Skip 'file:'
+ path = image.value
+ if path.startswith('file:'):
+ path = path[5:]
+ return {uri: path}
+
+ url = session.service_url('blob_service', path = image.resource_uniq)
+ blobdata = session.c.fetch(url)
+ if os.path.isdir(dest):
+ outdest = os.path.join (dest, os.path.basename(name))
+ else:
+ outdest = os.path.join ('.', os.path.basename(name))
+ f = open(outdest, 'wb')
+ f.write(blobdata)
+ f.close()
+ return {uri: outdest}
+
+
+def fetch_image_planes(session, uri, dest=None, uselocalpath=False):
+ """
+ fetch all the image planes of an image locally
+ @param session: the bqsession
+ @param uri: resource image uri
+ @param dest: a destination directory
+ @param uselocalpath: true when routine is run on same host as server
+
+ """
+ image = session.load (uri, view='full')
+ #x,y,z,t,ch = image.geometry()
+ meta = image.pixels().meta().fetch()
+ #meta = ET.XML(meta)
+ meta = session.factory.string2etree(meta)
+ t = meta.findall('.//tag[@name="image_num_t"]')
+ t = len(t) and t[0].get('value')
+ z = meta.findall('.//tag[@name="image_num_z"]')
+ z = len(z) and z[0].get('value')
+ tplanes = int(t)
+ zplanes = int(z)
+
+ planes=[]
+ for t in range(tplanes):
+ for z in range(zplanes):
+ ip = image.pixels().slice(z=z+1,t=t+1).format('tiff')
+ if uselocalpath:
+ ip = ip.localpath()
+ planes.append (ip)
+
+ files = []
+ for i, p in enumerate(planes):
+ slize = p.fetch()
+ fname = os.path.join (dest, "%.5d.TIF" % i)
+ if uselocalpath:
+ #path = ET.XML(slize).xpath('/resource/@src')[0]
+ resource = session.factory.string2etree(slize)
+ path = resource.get ('value')
+ # Strip file:/ from path
+ if path.startswith ('file:/'):
+ path = path[5:]
+ if os.path.exists(path):
+ safecopy (path, fname)
+ else:
+ log.error ("localpath did not return valid path: %s", path)
+ else:
+ f = open(fname, 'wb')
+ f.write(slize)
+ f.close()
+ files.append(fname)
+
+ return files
+
+
+def next_name(name):
+ count = 0
+ while os.path.exists("%s-%.5d.TIF" % (name, count)):
+ count = count + 1
+ return "%s-%.5d.TIF" % (name, count)
+
+
+
+def fetch_image_pixels(session, uri, dest, uselocalpath=False):
+ """
+ fetch original image locally as tif
+ @param session: the bqsession
+ @param uri: resource image uri
+ @param dest: a destination directory
+ @param uselocalpath: true when routine is run on same host as server
+ """
+ image = session.load(uri)
+ name = image.name or next_name("image")
+ ip = image.pixels().format('tiff')
+ if uselocalpath:
+ ip = ip.localpath()
+ pixels = ip.fetch()
+ if os.path.isdir(dest):
+ dest = os.path.join(dest, os.path.basename(name))
+ else:
+ dest = os.path.join('.', os.path.basename(name))
+ if not dest.lower().endswith ('.tif'):
+ dest = "%s.tif" % dest
+
+
+ if uselocalpath:
+ #path = ET.XML(pixels).xpath('/resource/@src')[0]
+ resource = session.factory.string2etree(pixels)
+ path = resource.get ('value')
+ #path = urllib.url2pathname(path[5:])
+ if path.startswith('file:/'):
+ path = path[5:]
+ # Skip 'file:'
+ if os.path.exists(path):
+ safecopy(path, dest)
+ return { uri : dest }
+ else:
+ log.error ("localpath did not return valid path: %s", path)
+
+ f = open(dest, 'wb')
+ f.write(pixels)
+ f.close()
+ return { uri : dest }
+
+
+def fetch_dataset(session, uri, dest, uselocalpath=False):
+ """
+ fetch elemens of dataset locally as tif
+
+ @param session: the bqsession
+ @param uri: resource image uri
+ @param dest: a destination directory
+ @param uselocalpath: true when routine is run on same host as server
+
+ @return:
+ """
+ dataset = session.fetchxml(uri, view='deep')
+ members = dataset.findall('.//value[@type="object"]')
+
+ results = {}
+ for i, imgxml in enumerate(members):
+ uri = imgxml.text #imgxml.get('uri')
+ print ("FETCHING", uri)
+ #fname = os.path.join (dest, "%.5d.tif" % i)
+ x = fetch_image_pixels(session, uri, dest, uselocalpath=uselocalpath)
+ results.update (x)
+ return results
+
+
+def fetchImage(session, uri, dest, uselocalpath=False):
+ """
+ @param: session -
+ @param: url -
+ @param: dest -
+ @param: uselocalpath- (default: False)
+
+ @return
+ """
+ image = session.load(uri).pixels().info()
+ #fileName = ET.XML(image.fetch()).xpath('//tag[@name="filename"]/@value')[0]
+ fileName = session.factory.string2etree(image.fetch()).findall('.//tag[@name="filename"]')[0]
+ fileName = fileName.get ('value')
+
+ ip = session.load(uri).pixels().format('tiff')
+
+ if uselocalpath:
+ ip = ip.localpath()
+
+ pixels = ip.fetch()
+
+ if os.path.isdir(dest):
+ dest = os.path.join(dest, fileName)
+
+ if uselocalpath:
+ #path = ET.XML(pixels).xpath('/resource/@src')[0]
+ resource = session.factory.string2etree(pixels)
+ path = resource.get ('value')
+ #path = urllib.url2pathname(path[5:])
+ if path.startswith ('file:/'):
+ # Skip 'file:'
+ path = path[5:]
+ if os.path.exists(path):
+ safecopy(path, dest)
+ return {uri: dest }
+ else:
+ log.error ("localpath did not return valid path: %s", path)
+
+ f = open(dest, 'wb')
+ f.write(pixels)
+ f.close()
+ return {uri :dest }
+
+
+def fetchDataset(session, uri, dest, uselocalpath=False):
+ dataset = session.fetchxml(uri, view='deep')
+ members = dataset.findall('.//value[@type="object"]')
+ results = {}
+
+ for i, imgxml in enumerate(members):
+ uri = imgxml.text
+ print ("FETCHING: ", uri)
+ #fname = os.path.join (dest, "%.5d.tif" % i)
+ result = fetchImage(session, uri, dest, uselocalpath=uselocalpath)
+ results[uri] = result[uri]
+ return results
+
+
+# Post fields and files to an http host as multipart/form-data.
+# fields is a sequence of (name, value) elements for regular form
+# fields. files is a sequence of (name, filename, value) elements
+# for data to be uploaded as files
+# Return the tuple (rsponse headers, server's response page)
+
+# example:
+# post_files ('http://..',
+# fields = {'file1': open('file.jpg','rb'), 'name':'file' })
+# post_files ('http://..', fields = [('file1', 'file.jpg', buffer), ('f1', 'v1' )] )
+
+def save_image_pixels(session, localfile, image_tags=None):
+ """
+ put a local image on the server and return the URL
+ to the METADATA XML record
+
+ @param: session - the local session
+ @param: image - an BQImage object
+ @param: localfile - a file-like object or name of a localfile
+
+ @return: XML content when upload ok
+ """
+ xml = None
+ if image_tags:
+ #xml = ET.tostring(toXml(image_tags))
+ xml = session.factory.to_string(image_tags)
+ return session.postblob(localfile, xml=xml)
+
+
+
+def as_flat_dict_tag_value(xmltree):
+ def _xml2d(e, d, path=''):
+ for child in e:
+ name = '%s%s'%(path, child.get('name', ''))
+ value = child.get('value', None)
+ if value is not None:
+ if not name in d:
+ d[name] = value
+ else:
+ if isinstance(d[name], list):
+ d[name].append(value)
+ else:
+ d[name] = [d[name], value]
+ d = _xml2d(child, d, path='%s%s/'%(path, child.get('name', '')))
+ return d
+
+ return _xml2d(xmltree, {})
+
+def as_flat_dicts_node(xmltree):
+ def _xml2d(e, d, path=''):
+ for child in e:
+ name = '%s%s'%(path, child.get('name', ''))
+ #value = child.get('value', None)
+ value = child
+ #if value is not None:
+ if not name in d:
+ d[name] = value
+ else:
+ if isinstance(d[name], list):
+ d[name].append(value)
+ else:
+ d[name] = [d[name], value]
+ d = _xml2d(child, d, path='%s%s/'%(path, child.get('name', '')))
+ return d
+
+ return _xml2d(xmltree, {})
diff --git a/nph_5class/bqapi/util.py.bak b/nph_5class/bqapi/util.py.bak
new file mode 100644
index 0000000..2e253f7
--- /dev/null
+++ b/nph_5class/bqapi/util.py.bak
@@ -0,0 +1,435 @@
+from __future__ import print_function
+
+import os
+import shutil
+#import urllib
+#import urlparse
+#import time
+import logging
+from six.moves import urllib
+
+#from lxml import etree as ET
+#from lxml import etree
+from .xmldict import xml2d, d2xml
+
+log = logging.getLogger('bqapi.util')
+
+#####################################################
+# misc: unicode
+#####################################################
+
+def normalize_unicode(s):
+ if isinstance(s, unicode):
+ return s
+ try:
+ s = s.decode('utf8')
+ except UnicodeEncodeError:
+ s = s.encode('ascii', 'replace')
+ return s
+
+#####################################################
+# misc: path manipulation
+#####################################################
+
+if os.name == 'nt':
+ def url2localpath(url):
+ path = urllib.parse.urlparse(url).path
+ if len(path)>0 and path[0] == '/':
+ path = path[1:]
+ try:
+ return urllib.parse.unquote(path).decode('utf-8')
+ except UnicodeEncodeError:
+ # dima: safeguard measure for old non-encoded unicode paths
+ return urllib.parse.unquote(path)
+
+ def localpath2url(path):
+ path = path.replace('\\', '/')
+ url = urllib.parse.quote(path.encode('utf-8'))
+ if len(path)>3 and path[0] != '/' and path[1] == ':':
+ # path starts with a drive letter: c:/
+ url = 'file:///%s'%url
+ else:
+ # path is a relative path
+ url = 'file://%s'%url
+ return url
+
+else:
+ def url2localpath(url):
+ url = url.encode('utf-8') # safegurd against un-encoded values in the DB
+ path = urllib.parse.urlparse(url).path
+ return urllib.parse.unquote(path)
+
+ def localpath2url(path):
+ url = urllib.parse.quote(path.encode('utf-8'))
+ url = 'file://%s'%url
+ return url
+
+#####################################################
+
+
+class AttrDict(dict):
+ def __init__(self, *args, **kwargs):
+ dict.__init__(self, *args, **kwargs)
+
+ def __getattr__(self, name):
+ try:
+ return self[name]
+ except KeyError:
+ raise AttributeError
+
+ def __setattr__(self, name, value):
+ self[name] = value
+ return value
+
+ def __getstate__(self):
+ return self.items()
+
+ def __setstate__(self, items):
+ for key, val in items:
+ self[key] = val
+
+
+def safecopy (*largs):
+ largs = list (largs)
+ d = largs.pop()
+
+ for f in largs:
+ try:
+ dest = d
+ if os.path.isdir (d):
+ dest = os.path.join (d, os.path.basename(f))
+ print ("linking %s to %s"%(f,dest))
+ if os.path.exists(dest):
+ print ("Found existing file %s: removing .." % dest)
+ os.unlink (dest)
+ os.link(f, dest)
+ except (OSError, AttributeError) as e:
+ print ("Problem in link %s .. trying copy" % e)
+ shutil.copy2(f, dest)
+
+def parse_qs(query):
+ """
+ parse a uri query string into a dict
+ """
+ pd = {}
+ if '&' in query:
+ for el in query.split('&'):
+ nm, junk, vl = el.partition('=')
+ pd.setdefault(nm, []).append(vl)
+ return pd
+
+def make_qs(pd):
+ """
+ convert back from dict to qs
+ """
+ query = []
+ for k,vl in pd.items():
+ for v in vl:
+ pair = v and "%s=%s" % (k,v) or k
+ query.append(pair)
+ return "&".join(query)
+
+
+def save_blob(session, localfile=None, resource=None):
+ """
+ put a local image on the server and return the URL
+ to the METADATA XML record
+
+ @param session: the local session
+ @param image: an BQImage object
+ @param localfile: a file-like object or name of a localfile
+ @return XML content when upload ok
+
+ @exceptions comm.BQCommError - if blob is failed to be posted
+ """
+ content = session.postblob(localfile, xml=resource)
+
+ #content = ET.XML(content)
+ content = session.factory.string2etree(content)
+ if len(content)<1: #when would this happen
+ return None
+ return content[0]
+
+
+def fetch_blob(session, uri, dest=None, uselocalpath=False):
+ """
+ fetch original image locally as tif
+ @param session: the bqsession
+ @param uri: resource image uri
+ @param dest: a destination directory
+ @param uselocalpath: true when routine is run on same host as server
+ """
+ image = session.load(uri)
+ name = image.name or next_name("blob")
+
+ query = None
+ if uselocalpath:
+ # Skip 'file:'
+ path = image.value
+ if path.startswith('file:'):
+ path = path[5:]
+ return {uri: path}
+
+ url = session.service_url('blob_service', path = image.resource_uniq)
+ blobdata = session.c.fetch(url)
+ if os.path.isdir(dest):
+ outdest = os.path.join (dest, os.path.basename(name))
+ else:
+ outdest = os.path.join ('.', os.path.basename(name))
+ f = open(outdest, 'wb')
+ f.write(blobdata)
+ f.close()
+ return {uri: outdest}
+
+
+def fetch_image_planes(session, uri, dest=None, uselocalpath=False):
+ """
+ fetch all the image planes of an image locally
+ @param session: the bqsession
+ @param uri: resource image uri
+ @param dest: a destination directory
+ @param uselocalpath: true when routine is run on same host as server
+
+ """
+ image = session.load (uri, view='full')
+ #x,y,z,t,ch = image.geometry()
+ meta = image.pixels().meta().fetch()
+ #meta = ET.XML(meta)
+ meta = session.factory.string2etree(meta)
+ t = meta.findall('.//tag[@name="image_num_t"]')
+ t = len(t) and t[0].get('value')
+ z = meta.findall('.//tag[@name="image_num_z"]')
+ z = len(z) and z[0].get('value')
+ tplanes = int(t)
+ zplanes = int(z)
+
+ planes=[]
+ for t in range(tplanes):
+ for z in range(zplanes):
+ ip = image.pixels().slice(z=z+1,t=t+1).format('tiff')
+ if uselocalpath:
+ ip = ip.localpath()
+ planes.append (ip)
+
+ files = []
+ for i, p in enumerate(planes):
+ slize = p.fetch()
+ fname = os.path.join (dest, "%.5d.TIF" % i)
+ if uselocalpath:
+ #path = ET.XML(slize).xpath('/resource/@src')[0]
+ resource = session.factory.string2etree(slize)
+ path = resource.get ('value')
+ # Strip file:/ from path
+ if path.startswith ('file:/'):
+ path = path[5:]
+ if os.path.exists(path):
+ safecopy (path, fname)
+ else:
+ log.error ("localpath did not return valid path: %s", path)
+ else:
+ f = open(fname, 'wb')
+ f.write(slize)
+ f.close()
+ files.append(fname)
+
+ return files
+
+
+def next_name(name):
+ count = 0
+ while os.path.exists("%s-%.5d.TIF" % (name, count)):
+ count = count + 1
+ return "%s-%.5d.TIF" % (name, count)
+
+
+
+def fetch_image_pixels(session, uri, dest, uselocalpath=False):
+ """
+ fetch original image locally as tif
+ @param session: the bqsession
+ @param uri: resource image uri
+ @param dest: a destination directory
+ @param uselocalpath: true when routine is run on same host as server
+ """
+ image = session.load(uri)
+ name = image.name or next_name("image")
+ ip = image.pixels().format('tiff')
+ if uselocalpath:
+ ip = ip.localpath()
+ pixels = ip.fetch()
+ if os.path.isdir(dest):
+ dest = os.path.join(dest, os.path.basename(name))
+ else:
+ dest = os.path.join('.', os.path.basename(name))
+ if not dest.lower().endswith ('.tif'):
+ dest = "%s.tif" % dest
+
+
+ if uselocalpath:
+ #path = ET.XML(pixels).xpath('/resource/@src')[0]
+ resource = session.factory.string2etree(pixels)
+ path = resource.get ('value')
+ #path = urllib.url2pathname(path[5:])
+ if path.startswith('file:/'):
+ path = path[5:]
+ # Skip 'file:'
+ if os.path.exists(path):
+ safecopy(path, dest)
+ return { uri : dest }
+ else:
+ log.error ("localpath did not return valid path: %s", path)
+
+ f = open(dest, 'wb')
+ f.write(pixels)
+ f.close()
+ return { uri : dest }
+
+
+def fetch_dataset(session, uri, dest, uselocalpath=False):
+ """
+ fetch elemens of dataset locally as tif
+
+ @param session: the bqsession
+ @param uri: resource image uri
+ @param dest: a destination directory
+ @param uselocalpath: true when routine is run on same host as server
+
+ @return:
+ """
+ dataset = session.fetchxml(uri, view='deep')
+ members = dataset.findall('.//value[@type="object"]')
+
+ results = {}
+ for i, imgxml in enumerate(members):
+ uri = imgxml.text #imgxml.get('uri')
+ print ("FETCHING", uri)
+ #fname = os.path.join (dest, "%.5d.tif" % i)
+ x = fetch_image_pixels(session, uri, dest, uselocalpath=uselocalpath)
+ results.update (x)
+ return results
+
+
+def fetchImage(session, uri, dest, uselocalpath=False):
+ """
+ @param: session -
+ @param: url -
+ @param: dest -
+ @param: uselocalpath- (default: False)
+
+ @return
+ """
+ image = session.load(uri).pixels().info()
+ #fileName = ET.XML(image.fetch()).xpath('//tag[@name="filename"]/@value')[0]
+ fileName = session.factory.string2etree(image.fetch()).findall('.//tag[@name="filename"]')[0]
+ fileName = fileName.get ('value')
+
+ ip = session.load(uri).pixels().format('tiff')
+
+ if uselocalpath:
+ ip = ip.localpath()
+
+ pixels = ip.fetch()
+
+ if os.path.isdir(dest):
+ dest = os.path.join(dest, fileName)
+
+ if uselocalpath:
+ #path = ET.XML(pixels).xpath('/resource/@src')[0]
+ resource = session.factory.string2etree(pixels)
+ path = resource.get ('value')
+ #path = urllib.url2pathname(path[5:])
+ if path.startswith ('file:/'):
+ # Skip 'file:'
+ path = path[5:]
+ if os.path.exists(path):
+ safecopy(path, dest)
+ return {uri: dest }
+ else:
+ log.error ("localpath did not return valid path: %s", path)
+
+ f = open(dest, 'wb')
+ f.write(pixels)
+ f.close()
+ return {uri :dest }
+
+
+def fetchDataset(session, uri, dest, uselocalpath=False):
+ dataset = session.fetchxml(uri, view='deep')
+ members = dataset.findall('.//value[@type="object"]')
+ results = {}
+
+ for i, imgxml in enumerate(members):
+ uri = imgxml.text
+ print ("FETCHING: ", uri)
+ #fname = os.path.join (dest, "%.5d.tif" % i)
+ result = fetchImage(session, uri, dest, uselocalpath=uselocalpath)
+ results[uri] = result[uri]
+ return results
+
+
+# Post fields and files to an http host as multipart/form-data.
+# fields is a sequence of (name, value) elements for regular form
+# fields. files is a sequence of (name, filename, value) elements
+# for data to be uploaded as files
+# Return the tuple (rsponse headers, server's response page)
+
+# example:
+# post_files ('http://..',
+# fields = {'file1': open('file.jpg','rb'), 'name':'file' })
+# post_files ('http://..', fields = [('file1', 'file.jpg', buffer), ('f1', 'v1' )] )
+
+def save_image_pixels(session, localfile, image_tags=None):
+ """
+ put a local image on the server and return the URL
+ to the METADATA XML record
+
+ @param: session - the local session
+ @param: image - an BQImage object
+ @param: localfile - a file-like object or name of a localfile
+
+ @return: XML content when upload ok
+ """
+ xml = None
+ if image_tags:
+ #xml = ET.tostring(toXml(image_tags))
+ xml = session.factory.to_string(image_tags)
+ return session.postblob(localfile, xml=xml)
+
+
+
+def as_flat_dict_tag_value(xmltree):
+ def _xml2d(e, d, path=''):
+ for child in e:
+ name = '%s%s'%(path, child.get('name', ''))
+ value = child.get('value', None)
+ if value is not None:
+ if not name in d:
+ d[name] = value
+ else:
+ if isinstance(d[name], list):
+ d[name].append(value)
+ else:
+ d[name] = [d[name], value]
+ d = _xml2d(child, d, path='%s%s/'%(path, child.get('name', '')))
+ return d
+
+ return _xml2d(xmltree, {})
+
+def as_flat_dicts_node(xmltree):
+ def _xml2d(e, d, path=''):
+ for child in e:
+ name = '%s%s'%(path, child.get('name', ''))
+ #value = child.get('value', None)
+ value = child
+ #if value is not None:
+ if not name in d:
+ d[name] = value
+ else:
+ if isinstance(d[name], list):
+ d[name].append(value)
+ else:
+ d[name] = [d[name], value]
+ d = _xml2d(child, d, path='%s%s/'%(path, child.get('name', '')))
+ return d
+
+ return _xml2d(xmltree, {})
diff --git a/nph_5class/bqapi/xmldict.py b/nph_5class/bqapi/xmldict.py
new file mode 100644
index 0000000..559794f
--- /dev/null
+++ b/nph_5class/bqapi/xmldict.py
@@ -0,0 +1,110 @@
+# Create python xml structures compatible with
+# http://search.cpan.org/~grantm/XML-Simple-2.18/lib/XML/Simple.pm
+
+
+try:
+ from lxml import etree
+except ImportError:
+ import xml.etree.ElementTree as etree
+from itertools import groupby
+
+def xml2d(e):
+ """Convert an etree into a dict structure
+
+ @type e: etree.Element
+ @param e: the root of the tree
+ @return: The dictionary representation of the XML tree
+ """
+ def _xml2d(e):
+ kids = dict(e.attrib)
+ #if e.text:
+ # kids['__text__'] = e.text
+ #if e.tail:
+ # kids['__tail__'] = e.tail
+ for k, g in groupby(e, lambda x: x.tag):
+ g = [ _xml2d(x) for x in g ]
+ kids[k]= g
+ return kids
+ return { e.tag : _xml2d(e) }
+
+
+def d2xml(d):
+ """convert dict to xml
+
+ 1. The top level d must contain a single entry i.e. the root element
+ 2. Keys of the dictionary become sublements or attributes
+ 3. If a value is a simple string, then the key is an attribute
+ 4. if a value is dict then, then key is a subelement
+ 5. if a value is list, then key is a set of sublements
+
+ a = { 'module' : {'tag' : [ { 'name': 'a', 'value': 'b'},
+ { 'name': 'c', 'value': 'd'},
+ ],
+ 'gobject' : { 'name': 'g', 'type':'xx' },
+ 'uri' : 'test',
+ }
+ }
+ >>> d2xml(a)
+
+
+
+
+
+
+ @type d: dict
+ @param d: A dictionary formatted as an XML document
+ @return: A etree Root element
+ """
+ def _d2xml(d, p):
+ for k,v in list(d.items()):
+ if v is None: continue
+ if isinstance(v,dict):
+ node = etree.SubElement(p, k)
+ _d2xml(v, node)
+ elif isinstance(v,list):
+ for item in v:
+ if item is None: continue
+ node = etree.SubElement(p, k)
+ _d2xml(item, node)
+ #elif k == "__text__":
+ # p.text = v
+ #elif k == "__tail__":
+ # p.tail = v
+ else:
+ p.set(k, str(v))
+
+ k,v = list(d.items())[0]
+ node = etree.Element(k)
+ _d2xml(v, node)
+ return node
+
+# simple dictionary output of name-value pairs, useful for image metadata
+def xml2nv(e):
+ """Convert an etree into a dict structure
+
+ @type e: etree.Element
+ @param e: the root of the tree
+ @return: The dictionary representation of the XML tree
+ """
+ def _xml2nv(e, a, path):
+ for g in e:
+ n = g.get('name') or g.get('type')
+ if n is None:
+ continue
+ a['%s%s'%(path, n)] = g.get('value')
+ for child in g:
+ _xml2nv(child, a, '%s%s/'%(path, n))
+ return
+ a = {}
+ _xml2nv(e, a, '')
+ return a
+
+if __name__=="__main__":
+
+ X = """"""
+ print (X)
+ Y = xml2d(etree.XML(X))
+ print (Y)
+ Z = etree.tostring (d2xml(Y) )
+ print (Z)
+ assert X == Z
diff --git a/nph_5class/bqapi/xmldict.py.bak b/nph_5class/bqapi/xmldict.py.bak
new file mode 100644
index 0000000..6a43118
--- /dev/null
+++ b/nph_5class/bqapi/xmldict.py.bak
@@ -0,0 +1,110 @@
+# Create python xml structures compatible with
+# http://search.cpan.org/~grantm/XML-Simple-2.18/lib/XML/Simple.pm
+from __future__ import print_function
+
+try:
+ from lxml import etree
+except ImportError:
+ import xml.etree.ElementTree as etree
+from itertools import groupby
+
+def xml2d(e):
+ """Convert an etree into a dict structure
+
+ @type e: etree.Element
+ @param e: the root of the tree
+ @return: The dictionary representation of the XML tree
+ """
+ def _xml2d(e):
+ kids = dict(e.attrib)
+ #if e.text:
+ # kids['__text__'] = e.text
+ #if e.tail:
+ # kids['__tail__'] = e.tail
+ for k, g in groupby(e, lambda x: x.tag):
+ g = [ _xml2d(x) for x in g ]
+ kids[k]= g
+ return kids
+ return { e.tag : _xml2d(e) }
+
+
+def d2xml(d):
+ """convert dict to xml
+
+ 1. The top level d must contain a single entry i.e. the root element
+ 2. Keys of the dictionary become sublements or attributes
+ 3. If a value is a simple string, then the key is an attribute
+ 4. if a value is dict then, then key is a subelement
+ 5. if a value is list, then key is a set of sublements
+
+ a = { 'module' : {'tag' : [ { 'name': 'a', 'value': 'b'},
+ { 'name': 'c', 'value': 'd'},
+ ],
+ 'gobject' : { 'name': 'g', 'type':'xx' },
+ 'uri' : 'test',
+ }
+ }
+ >>> d2xml(a)
+
+
+
+
+
+
+ @type d: dict
+ @param d: A dictionary formatted as an XML document
+ @return: A etree Root element
+ """
+ def _d2xml(d, p):
+ for k,v in d.items():
+ if v is None: continue
+ if isinstance(v,dict):
+ node = etree.SubElement(p, k)
+ _d2xml(v, node)
+ elif isinstance(v,list):
+ for item in v:
+ if item is None: continue
+ node = etree.SubElement(p, k)
+ _d2xml(item, node)
+ #elif k == "__text__":
+ # p.text = v
+ #elif k == "__tail__":
+ # p.tail = v
+ else:
+ p.set(k, unicode(v))
+
+ k,v = d.items()[0]
+ node = etree.Element(k)
+ _d2xml(v, node)
+ return node
+
+# simple dictionary output of name-value pairs, useful for image metadata
+def xml2nv(e):
+ """Convert an etree into a dict structure
+
+ @type e: etree.Element
+ @param e: the root of the tree
+ @return: The dictionary representation of the XML tree
+ """
+ def _xml2nv(e, a, path):
+ for g in e:
+ n = g.get('name') or g.get('type')
+ if n is None:
+ continue
+ a['%s%s'%(path, n)] = g.get('value')
+ for child in g:
+ _xml2nv(child, a, '%s%s/'%(path, n))
+ return
+ a = {}
+ _xml2nv(e, a, '')
+ return a
+
+if __name__=="__main__":
+
+ X = """"""
+ print (X)
+ Y = xml2d(etree.XML(X))
+ print (Y)
+ Z = etree.tostring (d2xml(Y) )
+ print (Z)
+ assert X == Z
diff --git a/nph_5class/bqconfig.json b/nph_5class/bqconfig.json
new file mode 100644
index 0000000..2e090ff
--- /dev/null
+++ b/nph_5class/bqconfig.json
@@ -0,0 +1 @@
+{"Name": "NPHSegmentation", "Author": "VB", "Description": "New NPH segmentation module", "Inputs": {"Input Image": "image"}, "Outputs": {"Segmented Image": "image"}}
diff --git a/nph_5class/public/help.html b/nph_5class/public/help.html
new file mode 100644
index 0000000..ef562f4
--- /dev/null
+++ b/nph_5class/public/help.html
@@ -0,0 +1,9 @@
+NPH Segmentation Module – 5 Class
+Performs 5 class segmentation of scans corresponding to provided *.nii.gz
file. The classes are as follows:
+
+- Background
+- Ventricle
+- White Matter
+- Subarachnoid
+- Shunt
+
diff --git a/nph_5class/public/help.md b/nph_5class/public/help.md
new file mode 100644
index 0000000..4ffc0d3
--- /dev/null
+++ b/nph_5class/public/help.md
@@ -0,0 +1,10 @@
+# NPH Segmentation Module -- 5 Class
+
+Performs 5 class segmentation of scans corresponding to provided ``*.nii.gz`` file.
+The classes are as follows:
+
+0. Background
+1. Ventricle
+2. White Matter
+3. Subarachnoid
+4. Shunt
diff --git a/nph_5class/public/thumbnail.jpg b/nph_5class/public/thumbnail.jpg
new file mode 100644
index 0000000..edc2e4f
Binary files /dev/null and b/nph_5class/public/thumbnail.jpg differ
diff --git a/nph_5class/runtime-module.cfg b/nph_5class/runtime-module.cfg
new file mode 100644
index 0000000..6c85a56
--- /dev/null
+++ b/nph_5class/runtime-module.cfg
@@ -0,0 +1,12 @@
+# Module configuration file for local execution of modules
+
+module_enabled = True
+runtime.platforms = command
+
+[command]
+docker.image = nphsegmentation:v1.0.8
+environments = Staged,Docker
+executable = python PythonScriptWrapper.py
+files = pydist, PythonScriptWrapper.py
+
+
diff --git a/nph_5class/src/BQ_run_module.py b/nph_5class/src/BQ_run_module.py
new file mode 100644
index 0000000..ba90bdd
--- /dev/null
+++ b/nph_5class/src/BQ_run_module.py
@@ -0,0 +1,22 @@
+import pathlib
+import logging as log
+# import nibabel as nib
+
+from torch import mode
+import nphsegmentation as nsg
+
+def run_module(input_path_dict, output_folder_path):
+ output_paths_dict = dict()
+
+ log.info(f"{input_path_dict['Input Image']=}")
+ input_path = input_path_dict['Input Image']
+ # n = nib.load(input_path)
+
+ output_paths_dict["Segmented Image"] = nsg.main(pathlib.Path(input_path),
+ pathlib.Path(output_folder_path),
+ # modelPath = pathlib.Path.cwd() / 'src' / 'model_backup/epoch49_ResNet2D3Class_2Layer2x2_mixed2_300.pt',
+ modelPath = pathlib.Path.cwd() / 'src' / 'model_backup' / 'epoch50_2Dresnet_skullstrip5Class.pt',
+ rdir = pathlib.Path("/module/src"))
+ log.info("Finished computing result!")
+
+ return output_paths_dict
diff --git a/nph_5class/src/CSFseg.py b/nph_5class/src/CSFseg.py
new file mode 100644
index 0000000..f9d7819
--- /dev/null
+++ b/nph_5class/src/CSFseg.py
@@ -0,0 +1,359 @@
+import numpy as np
+import nibabel as nib
+import matplotlib.pyplot as plt
+import os
+import copy
+import heapq
+
+# def connectToBoundary(label, classIdx, tolerance):
+# neighbors=[]
+# for i in range(-1, 2):
+# for j in range(-1, 2):
+# k=0
+# neighbors.append((i,j,k))
+
+# seen=set()
+
+# position=[]
+# heapq.heapify(position)
+
+# island=0
+# newLabel=np.zeros(label.shape)
+# i, j, k=label.shape
+# for z in range(k):
+# for x in range(i):
+# for y in range(j):
+
+# if (label[x,y,z]==classIdx) and (x,y,z) not in seen:
+# island+=1
+# area=0
+# curIsland=set()
+# seen2=set()
+# seen.add((x,y,z))
+# curIsland.add((x,y,z))
+# heapq.heappush(position, (x,y,z))
+
+# connected=False
+# while position:
+# cur=heapq.heappop(position)
+
+# for neighbor in neighbors:
+
+# if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue
+# if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue
+# if cur[2]-neighbor[2]<0 or cur[2]-neighbor[2]>=k: continue
+
+# if (label[cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]]==classIdx) and (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]) not in seen:
+# seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]))
+# curIsland.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]))
+# heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2], 0))
+
+# position2=[]
+# heapq.heapify(position2)
+
+# for cur in curIsland:
+# heapq.heappush(position2,(cur[0],cur[1],cur[2],0))
+# seen2.add(cur)
+# while position2:
+# cur=heapq.heappop(position2)
+# for neighbor in neighbors:
+
+# if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue
+# if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue
+# if cur[2]-neighbor[2]<0 or cur[2]-neighbor[2]>=k: continue
+# if (label[cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]]!=0) and (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]) not in seen2 and cur[3]=i: continue
+ if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue
+ if cur[2]-neighbor[2]<0 or cur[2]-neighbor[2]>=k: continue
+
+ if label[cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]]==label[x,y,z] and (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]) not in seen:
+ seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]))
+ curIsland.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]))
+ heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]-neighbor[2]))
+
+ islandDict[(x,y,z)]=frozenset(curIsland)
+# print(island, area)
+
+ if findMax:
+ if area>maxArea:
+ maxArea=area
+ maxPos=(x,y,z)
+
+ return islandDict[maxPos], maxArea, maxPos
+
+def Connectivity(label, classIdx, targetIdx, refClass=1,connectivity=8):
+ neighbors=[]
+ if connectivity==8:
+ for i in range(-1, 2):
+ for j in range(-1, 2):
+ neighbors.append((i,j))
+ elif connectivity==4:
+ neighbors=[(1,0),(-1,0),(0,1),(0,-1)]
+
+ else:
+
+ return
+
+ seen=set()
+
+ island=0
+ position=[]
+ heapq.heapify(position)
+
+ i, j=label.shape
+
+ for x in range(i):
+ for y in range(j):
+
+ if (label[x,y]==refClass) and (x,y) not in seen:
+ island+=1
+ seen.add((x,y))
+ heapq.heappush(position, (x,y))
+
+ while position:
+ cur=heapq.heappop(position)
+
+ for neighbor in neighbors:
+
+ if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue
+ if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue
+
+ if label[cur[0]-neighbor[0],cur[1]-neighbor[1]]==classIdx and (cur[0]-neighbor[0],cur[1]-neighbor[1]) not in seen:
+ seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1]))
+ label[cur[0]-neighbor[0],cur[1]-neighbor[1]]=targetIdx
+ heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1]))
+
+
+
+def numIsland(label,connectivity=8):
+ neighbors=[]
+ if connectivity==8:
+ for i in range(-1, 2):
+ for j in range(-1, 2):
+ neighbors.append((i,j))
+ elif connectivity==4:
+ neighbors=[(1,0),(-1,0),(0,1),(0,-1)]
+
+ else:
+
+ return
+
+ seen=set()
+
+ island=0
+ position=[]
+ heapq.heapify(position)
+
+ i, j=label.shape
+
+
+ for y in range(j):
+ for x in range(i-1,-1,-1):
+
+ if (label[x,y]!=0) and (x,y) not in seen:
+
+ if island==1:
+ if area>100:
+ island+=1
+ break
+
+ else: island=0
+
+ if island==0:
+ island+=1
+ area=0
+ seen.add((x,y))
+ heapq.heappush(position, (x,y))
+ curIsland=set()
+ while position:
+ cur=heapq.heappop(position)
+ area+=1
+ curIsland.add(cur)
+ for neighbor in neighbors:
+
+ if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue
+ if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue
+
+ if label[cur[0]-neighbor[0],cur[1]-neighbor[1]]!=0 and (cur[0]-neighbor[0],cur[1]-neighbor[1]) not in seen:
+ seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1]))
+ heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1]))
+
+ maxArea=area
+ maxPos=curIsland
+
+ return island, maxArea, maxPos
+
+def changeClassResult(segmentation):
+ for x in range(segmentation.shape[0]):
+ for y in range(segmentation.shape[1]):
+ for z in range(segmentation.shape[2]):
+ if segmentation[x,y,z]==3:
+ segmentation[x,y,z]=4
+ elif segmentation[x,y,z]==4:
+ segmentation[x,y,z]=5
+ #CSF into class10
+ elif segmentation[x,y,z]==1:
+ segmentation[x,y,z]=10
+
+def saveImage(array, name):
+ img = nib.Nifti1Image(array, np.eye(4))
+ nib.save(img, name)
+
+def cutoff(label,max):
+
+ neighbors=[(1,1,0),(0,1,0),(-1,1,0),(-1,0,0),(-1,-1,0),(0,-1,0),(1,-1,0),(1,0,0),(0,0,0)]
+ surpos = [[2,10,2,2,2,2,2,2,10],[2,2,2,10,2,2,2,2,10],[2,2,2,2,2,10,2,2,10],[2,2,2,2,2,2,2,10,10]]
+ i, j, k=label.shape
+
+ for z in range(max[2]-3,max[2]+4):
+ if z == max[2]: continue
+ for x in range(i):
+ for y in range(j):
+ if label[x,y,z] ==0 or label[x,y,z]==4: continue
+ nei = []
+ for neighbor in neighbors:
+ if x-neighbor[0]<0 or x-neighbor[0]>=i: continue
+ if y-neighbor[1]<0 or y-neighbor[1]>=j: continue
+ nei.append(label[x-neighbor[0], y-neighbor[1],z-neighbor[2]])
+ if nei in surpos:
+ label[x,y,z] = 2
+ if nei == [10,10,10,10,10,10,10,10,2]:
+ label[x,y,z] = 10
+ if nei[3]==2 and nei[7]==2 and nei[8]==10 and label[x-2,y,z]==2 and label[x+2,y,z]==2:
+ label[x,y,z] = 2
+ if nei[1]==2 and nei[5]==2 and nei[8]==10 and label[x,y-2,z]==2 and label[x,y+2,z]==2:
+ label[x,y,z] = 2
+
+
+
+def segVent(imgName, outputPath, resultName):
+ result=nib.load(os.path.join(outputPath, resultName)).get_fdata()
+
+ x,y,z=result.shape
+
+ changeClassResult(result)
+
+ #step 1: get subarachnoid connected to skull
+ # connectToBoundary(result, 10, tolerance=5)
+
+
+ #step 3: get max area of remaining CSF
+
+ island, Area, maxPos=maxArea(result, 10)
+ for pos in island:
+ result[pos]=1
+
+
+ cutoff(result,maxPos)
+
+ # check 7 slices
+ for k in range(maxPos[2]-1,-1,-1):
+
+ for i in range(x):
+ for j in range(y):
+ if result[i,j,k]==10 and result[i,j,k+1]==1:
+ result[i,j,k]=1
+
+ Connectivity(result[:,:,k], 10, 1, refClass=1)
+
+ for k in range(maxPos[2]+1,z):
+ for i in range(x):
+ for j in range(y):
+ if result[i,j,k] ==10 and result[i,j,k-1]==1 :
+ result[i,j,k]=1
+ Connectivity(result[:,:,k], 10, 1, refClass = 1)
+
+ for k in range(z):
+ for i in range(x):
+ for j in range(y):
+ if result[i,j,k]==10:
+ result[i,j,k]=3
+
+ #check max pos of ventricle
+ # ventmaxArea = 0
+ # ventmaxPos = 0
+ # for k in range(maxPos[2]-3,maxPos[2]+4):
+ # ventvoxel = 0
+ # for i in range(x):
+ # for j in range(y):
+ # if result[i,j,k]==1:
+ # ventvoxel +=1
+ # if ventvoxel > ventmaxArea :
+ # ventmaxArea = ventvoxel
+ # ventmaxPos = k
+ print('------------',imgName,'-------------')
+ print('middle of 7 slices :', maxPos[2])
+
+
+ saveImage(result, os.path.join(outputPath, outputName:='vent'+resultName))
+
+ return Area, maxPos, result, outputName
+
+
+
+
+
+
+
+
+
diff --git a/nph_5class/src/CTtools.py b/nph_5class/src/CTtools.py
new file mode 100644
index 0000000..de129bf
--- /dev/null
+++ b/nph_5class/src/CTtools.py
@@ -0,0 +1,261 @@
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+"""
+Created on Wed Nov 29 15:49:08 2017
+
+@author: pkao
+From: https://github.com/pykao/CT2MNI152/blob/master/CTtools.py
+"""
+
+import SimpleITK as sitk
+import numpy as np
+from skimage.filters import threshold_otsu
+from skimage import measure
+from scipy import ndimage
+from skimage import exposure
+
+def bone_extracted(ct_img_path, outName=""):
+ """Extract the bone of the CT scan based on the hard thresholding on pixel value"""
+
+ print('The CT scan you want to implement bone extraction: ', ct_img_path)
+
+
+ ct_img = sitk.ReadImage(ct_img_path)
+
+ bone_mask_img = sitk.Image(ct_img.GetWidth(), ct_img.GetHeight(), ct_img.GetDepth(), sitk.sitkFloat32)
+
+ output_ct_img = sitk.Image(ct_img.GetWidth(), ct_img.GetHeight(), ct_img.GetDepth(), sitk.sitkFloat32)
+
+ print('The size of CT scan:', ct_img.GetSize())
+
+ ct_nda = sitk.GetArrayFromImage(ct_img)
+
+ bone_mask_nda = sitk.GetArrayFromImage(bone_mask_img)
+
+ output_ct_nda = sitk.GetArrayFromImage(output_ct_img)
+
+ #print 'The minimum value of CT scan: ', np.amin(ct_nda)
+
+ #print 'The maximum value of CT scan: ', np.amax(ct_nda)
+
+ #print 'The pixel ID type of CT scan: ', ct_img.GetPixelIDTypeAsString()
+
+ #m = 1.0
+
+ #b = -1024.0
+
+ #bone_HU = 500.0
+
+ #bone_pixel = (bone_HU-b)/m
+
+ bone_pixel = 500
+
+ for z in range(ct_nda.shape[0]):
+ for x in range(ct_nda.shape[1]):
+ for y in range(ct_nda.shape[2]):
+ if ct_nda[z, x, y] >= bone_pixel:
+ output_ct_nda[z, x, y] = ct_nda[z, x, y]
+ bone_mask_nda[z, x, y] = 1.0;
+
+ output_ct_image = sitk.GetImageFromArray(output_ct_nda)
+
+
+
+ if not outName: # no outName supplied, my addition
+ output_ct_image_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_skull.nii.gz'
+ else:
+ output_ct_image_name = outName
+
+ print('The name of the output skull image: ', output_ct_image_name)
+
+ output_ct_image.CopyInformation(ct_img)
+
+ sitk.WriteImage(output_ct_image, output_ct_image_name)
+
+ return output_ct_image_name
+
+ # bone_mask
+ #bone_mask_image = sitk.GetImageFromArray(bone_mask_nda)
+
+ #bone_mask_image_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_skullMask.nii.gz'
+
+ # bone_mask_image.CopyInformation(ct_img)
+
+ #print 'The name of the output skull mask image: ', bone_mask_image_name
+
+ #sitk.WriteImage(bone_mask_image, bone_mask_image_name)
+
+ #return output_ct_image_name, bone_mask_image_name
+
+def getMaximum3DRegion(binary):
+ """ Get the Maximum 3D region from 3D multiple bindary Regions"""
+
+ all_labels = measure.label(binary, background = 0)
+
+ props = measure.regionprops(all_labels)
+
+ areas = [prop.area for prop in props]
+
+ maxArea_label = 1+np.argmax(areas)
+
+ max_binary = np.float32(all_labels == maxArea_label)
+
+ return max_binary
+
+
+
+
+def normalizeCTscan(ct_nda):
+ """Normalize the CT scan to range 0 to 1"""
+ if np.amin(ct_nda) < 0:
+ ct_normalized_nda = ct_nda - np.amin(ct_nda)
+
+ ct_normalized_nda = ct_normalized_nda/np.amax(ct_normalized_nda)
+
+ return ct_normalized_nda
+
+
+def otsuThreshoulding(ct_normalized_nda):
+ """Apply Otsu thresholding on the normalized ranging from 0 to 1 scan"""
+
+ thresh = threshold_otsu(ct_normalized_nda)
+
+ binary = (ct_normalized_nda > thresh)*1
+
+ return binary.astype(np.float32)
+
+def get2Maximum2DRegions(max_binary):
+ """Get two largestest 2D region from multiple 2D regions"""
+
+ xy_two_largest_binary = np.zeros(max_binary.shape, dtype = np.float32 )
+
+ largest_area = np.zeros(max_binary.shape[0])
+
+ second_largest_area = np.zeros(max_binary.shape[0])
+
+ for i in range(max_binary.shape[0]):
+ xy_binary = max_binary[i, :, :]
+ xy_labels = measure.label(xy_binary, background = 0)
+ xy_props = measure.regionprops(xy_labels)
+ xy_areas = [prop.area for prop in xy_props]
+ #print xy_areas
+
+ if xy_areas == []:
+ continue
+
+ elif len(xy_areas) == 1:
+ largest_area[i] = xy_areas[0]
+ second_largest_area[i] = 0.0
+ largest_label = xy_areas.index(largest_area[i]) + 1
+ xy_two_largest_binary[i, :, :] = xy_labels == largest_label
+
+ else:
+ xy_areas_sorted = sorted(xy_areas)
+ largest_area[i] = xy_areas_sorted[-1]
+ second_largest_area[i] = xy_areas_sorted[-2]
+ largest_label = xy_areas.index(largest_area[i]) + 1
+ second_largest_label = xy_areas.index(second_largest_area[i])+1
+ xy_largest_binary = xy_labels == largest_label
+ xy_second_largest_binary = xy_labels == second_largest_label
+ xy_two_largest_binary[i, :, :] = np.float32(np.logical_or(xy_largest_binary, xy_second_largest_binary))
+
+ return xy_two_largest_binary
+
+def get1Maximum2DRegion(max_second_binary):
+ """Get the largest 2D region from multiple 2D regions"""
+
+ new_binary = np.zeros(max_second_binary.shape, dtype = np.float32)
+ for i in range(max_second_binary.shape[0]):
+ xy_binary = max_second_binary[i,:,:]
+ xy_labels = measure.label(xy_binary)
+ xy_props = measure.regionprops(xy_labels)
+ xy_areas = [prop.area for prop in xy_props]
+ #print i, xy_areas_1
+ if xy_areas == []:
+ continue
+ else:
+ max_area_label = 1 + np.argmax(xy_areas)
+ new_binary[i,:,:] = np.float32(xy_labels == max_area_label)
+
+ return new_binary
+
+
+def imageOpening2D(max_second_binary, structure=np.ones((15, 15))):
+ """Applying the image opening operation on the binary mask"""
+ new_max_second_binary = np.zeros(max_second_binary.shape, dtype = np.float32)
+
+ for i in range(max_second_binary.shape[0]):
+
+ new_max_second_binary[i,:,:] = ndimage.binary_opening(max_second_binary[i,:,:].astype(int), structure=structure).astype(np.float32)
+
+ return new_max_second_binary
+
+def removeCTscandevice(ct_img_path):
+ """remove the ct scan device"""
+
+ ct_img = sitk.ReadImage(ct_img_path)
+
+ ct_nda = sitk.GetArrayFromImage(ct_img)
+
+ print('The CT scan you want to implement CT scan device removal:', ct_img_path)
+
+ #print 'The minimum value of CT scan: ', np.amin(ct_nda)
+
+ #print 'The maximum value of CT scan: ', np.amax(ct_nda)
+
+ #print 'The pixel ID type of CT scan: ', ct_img.GetPixelIDTypeAsString()
+
+ ct_normalized_nda = normalizeCTscan(ct_nda)
+
+ binary = otsuThreshoulding(ct_normalized_nda)
+
+ max_binary = getMaximum3DRegion(binary)
+
+ xy_two_largest_binary = get2Maximum2DRegions(max_binary)
+
+ max_second_binary = getMaximum3DRegion(xy_two_largest_binary)
+
+ new_binary = get1Maximum2DRegion(max_second_binary)
+
+ new_max_second_bindary = imageOpening2D(new_binary)
+
+ new_max_binary = getMaximum3DRegion(new_max_second_bindary)
+
+ output_ct_image = sitk.GetImageFromArray(ct_nda * new_max_binary)
+
+ output_ct_image.CopyInformation(ct_img)
+
+ output_ct_image_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_woCTdevice.nii.gz'
+
+ sitk.WriteImage(output_ct_image, output_ct_image_name)
+
+
+ return output_ct_image_name
+
+ # The mask for CT device
+
+ #woCTdevice_mask_image = sitk.GetImageFromArray(new_max_binary)
+
+ #woCTdevice_mask_image.CopyInformation(ct_img)
+
+ #woCTdevice_mask_image_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_woCTdeviceMask.nii.gz'
+
+ #sitk.WriteImage(woCTdevice_mask_image, woCTdevice_mask_image_name)
+
+ #return output_ct_image_name, woCTdevice_mask_image_name
+
+
+
+def contrastStretch(ct_img_path, percent = (10,90)):
+ """Apply the contrast stretching on 2D or 3D image"""
+ ct_img = sitk.ReadImage(ct_img_path)
+ ct_nda = sitk.GetArrayFromImage(ct_img)
+ p1, p2 = np.percentile(ct_nda, percent, interpolation='nearest')
+ nda_rescale = exposure.rescale_intensity(ct_nda, in_range = (p1, p2))
+ ct_img_cs = sitk.GetImageFromArray(nda_rescale)
+ ct_img_cs.CopyInformation(ct_img)
+ output_ct_name = ct_img_path[:ct_img_path.find('.nii.gz')]+'_contrastStretching.nii.gz'
+ sitk.WriteImage(ct_img_cs, output_ct_name)
+ return output_ct_name
+
+
diff --git a/nph_5class/src/TestFunc.py b/nph_5class/src/TestFunc.py
new file mode 100644
index 0000000..b6dd292
--- /dev/null
+++ b/nph_5class/src/TestFunc.py
@@ -0,0 +1,291 @@
+import time
+import numpy as np
+import torch
+from torch.utils.data import DataLoader
+from torch.utils.data import Dataset
+import os
+
+import nibabel as nib
+import torchvision
+import torchvision.transforms as transforms
+import torch.nn as nn
+import torch.nn.functional as F
+import torch.optim as optim
+from scipy import ndimage
+
+
+# device = 'cuda:3' if torch.cuda.is_available() else 'cpu'
+# device='cuda:1' if torch.cuda.is_available() else 'cpu'
+def getCenter(image, segmentation, i, j, k):
+
+ sample=image[i-16:i+16+1,j-16:j+16+1,k-1:k+1+1]
+ center=segmentation[i:i+1+1,j:j+1+1,k]
+
+ return sample, center
+
+def fillHoles(imgName):
+ image=nib.load('{}_Mask.nii.gz'.format(str(imgName))).get_fdata()
+ # image = nib.load("
+ for z in range(image.shape[2]):
+ image[:,:,z]=ndimage.binary_fill_holes(image[:,:,z]).astype(int)
+
+ saveImage(image, '{}_Mask.nii.gz'.format(imgName))
+
+def readAll(imgPath, betPath):
+
+ positions=[]
+
+ image = nib.load(imgPath).get_fdata()
+
+ brainMask = nib.load(betPath).get_fdata()
+
+ x,y,z=image.shape
+
+
+ for z in range(image.shape[2]):
+ for x in range(image.shape[0]):
+ for y in range(image.shape[1]):
+
+ if image[x,y,z] > 200: image[x,y,z]=200
+ if image[x,y,z] < -100: image[x,y,z]=-100
+
+ image+=100
+ image=image/300
+
+ for k in range(1, z-1, 1):
+ for i in range(17, x-17, 2):
+ for j in range(17, y-17, 2):
+
+
+ sample, center =getCenter(image, brainMask, i, j, k)
+ if center.any():
+ positions.append((i,j,k))
+# return image, annotation
+ return image, brainMask, positions, image.shape
+
+
+def getPatch(image_full, brainMask, i, j, k):
+
+ image, center=getCenter(image_full, brainMask, i, j, k)
+
+ return image, torch.tensor([i,j,k])
+
+class NPHDataset(Dataset):
+ def __init__(self, dataPath, betPath, name, Train=False):
+
+ self.name=name
+ self.image, self.brainMask, self.imgList, self.imageShape=readAll(dataPath, betPath)
+ self.transform=transforms.ToTensor()
+
+ def __len__(self):
+ return len(self.imgList)
+
+ def __getitem__(self, idx):
+
+# return 0
+ if torch.is_tensor(idx):
+ idx = idx.tolist()
+
+ i,j,k=self.imgList[idx]
+ data, pos=getPatch(self.image, self.brainMask, i, j, k)
+
+ image = self.transform(data)
+ sample = {'img': image,
+ 'pos': pos
+ }
+ return sample
+
+class MyModel(nn.Module):
+ def __init__(self,ResNet, num_classes=4, num_outputs=9):
+ super(MyModel, self).__init__()
+
+ self.layer0=nn.Sequential(
+ nn.Conv2d(3,64, kernel_size=(3, 3), stride=(2, 2), padding=(3, 3), bias=False),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True),
+ nn.ReLU(inplace=True),
+ nn.MaxPool2d(kernel_size=2, stride=2, padding=1, dilation=1, ceil_mode=False),
+
+ )
+
+ self.layer1=ResNet.layer1
+ self.layer2=ResNet.layer2
+ self.avgpool=nn.AdaptiveAvgPool2d(output_size=(1, 1))
+
+ self.fc=nn.Linear(in_features=128, out_features=num_classes*num_outputs, bias=True)
+
+ def forward(self, x):
+
+ x=self.layer0(x)
+ x=self.layer1(x)
+ x=self.layer2(x)
+ x=self.avgpool(x)
+ x = torch.flatten(x, 1)
+ x = self.fc(x)
+ return x
+
+def test(model, test_loader, shape, device):
+ """
+ 5class test function (new).
+ """
+
+ model.eval()
+
+ result=[]
+
+ # Don't update model
+ with torch.no_grad():
+ predList=[]
+ targetList=[]
+
+ # Predict
+ reconstructed=np.zeros(shape)
+ for batch_index, batch_samples in enumerate(test_loader):
+ data = batch_samples['img'].to(device, dtype=torch.float)
+ # pos, shape=batch_samples['pos'].to(device, dtype=torch.float), batch_samples['shape'].to(device)
+ pos = batch_samples['pos'].to(device, dtype=torch.float)
+
+ output = model(data)
+ softmax=nn.Softmax(dim=1)
+ output=torch.reshape(output,(output.shape[0], 5, 2,2))
+ output=softmax(output)
+
+ pred=output.argmax(dim=1, keepdim=True).cpu()
+
+ N=output.shape[0]
+
+ for k in range(N):
+
+ x, y, z=map(int, (pos[k][0].item(), pos[k][1].item(), pos[k][2].item()))
+
+ # reconstructed[x:x+1+1,y:y+1+1,z]=pred[k,0,:,:].cpu()
+ # breakpoint()
+ reconstructed[x:x+1+1,y:y+1+1,z]=pred[k,0,:,:]
+ # classes.add(pred[k,0,:,:])
+
+ return reconstructed
+
+def loadModel(modelPath, device):
+ ResNet=torch.hub.load('pytorch/vision:v0.10.0', 'resnet18', pretrained=False)
+
+ model = MyModel(ResNet, num_classes=5, num_outputs=4).to(device)
+ model.load_state_dict(torch.load(modelPath,map_location=device))
+
+ return model
+
+def checkDevice(device):
+ device=device if torch.cuda.is_available() else 'cpu'
+ return device
+
+def runTest(imgName, outputPath, dataPath, betPath, device, BS, model):
+
+ # BS=200
+
+ # dataPath=os.path.join(dataPath,'{}.nii.gz'.format(imgName))
+
+ # betPath=os.path.join(betPath,'{}_Mask.nii.gz'.format(imgName))
+ betPath = betPath / f"{imgName}_Mask.nii.gz"
+
+ testDataset=NPHDataset(dataPath, betPath, imgName,Train=False)
+ # testDataset=NPHDataset("/module/src/Norm_old_003_96yo.nii.gz", betPath, imgName,Train=False)
+ # test_loader = DataLoader(testDataset, batch_size=BS, num_workers=16, drop_last=False, shuffle=False)
+ test_loader = DataLoader(testDataset, batch_size=BS, num_workers=1, drop_last=False, shuffle=False)
+ shape=testDataset.imageShape
+
+ print('Start Running:', imgName)
+
+ start = time.time()
+
+ reconstructed=test(model, test_loader, shape, device)
+ # changeClass(reconstructed)
+
+ print(imgName, end=' ')
+# print(' Dice score for class{}: {}'.format(i, 2*TP[i]/(2*TP[i]+FP[i]+FN[i])))
+
+ # img = nib.Nifti1Image(reconstructed, np.eye(4))
+ # nib.save(img, 'reconstructed/reconstructed_{}_{}.nii.gz'.format(modelname, imgName))
+ # print('Save to: reconstructed_{}_{}.nii.gz'.format(modelname, imgName))
+
+ # result_noNoise=eliminateNoise(reconstructed, minArea=64)
+ result_noNoise=eliminateNoise(reconstructed, minArea=32)
+ # result_noNoise=eliminateNoise(reconstructed, minArea=80)
+ # result_noNoise = reconstructed
+
+ saveImage(result_noNoise, os.path.join(outputPath, 'reconstructed_{}.nii.gz'.format(imgName)))
+
+ end = time.time()
+ print('Elapsed time:', end - start)
+
+ return 'reconstructed_{}.nii.gz'.format(imgName)
+
+def saveImage(image, name):
+ img = nib.Nifti1Image(image, np.eye(4))
+ nib.save(img, name )
+
+def eliminateNoise(label, minArea=16):
+ neighbors=[(-1,0),(1,0),(0,-1),(0,1)]
+
+ seen=set()
+ import heapq
+ position=[]
+ heapq.heapify(position)
+
+ island=0
+ newLabel=np.zeros(label.shape)
+ i, j, k=label.shape
+ for z in range(k):
+ for x in range(i):
+ for y in range(j):
+
+ if (label[x,y,z]!=0) and (x,y,z) not in seen:
+ island+=1
+ area=0
+ curIsland=set()
+ seen.add((x,y,z))
+ curIsland.add((x,y,z))
+ heapq.heappush(position, (x,y,z))
+
+
+ while position:
+ cur=heapq.heappop(position)
+ area+=1
+
+
+ for neighbor in neighbors:
+
+ if cur[0]-neighbor[0]<0 or cur[0]-neighbor[0]>=i: continue
+ if cur[1]-neighbor[1]<0 or cur[1]-neighbor[1]>=j: continue
+# if cur[2]-neighbor[2]<0 or cur[2]-neighbor[2]>=k: continue
+
+ if label[cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]]==label[x,y,z] and (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]) not in seen:
+ seen.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]))
+ curIsland.add((cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]))
+ heapq.heappush(position, (cur[0]-neighbor[0],cur[1]-neighbor[1],cur[2]))
+
+
+
+ for (posX, posY, posZ) in curIsland:
+ if area 250)] = 0
+
+ # Apply the mask
+ scan[np.where(mask > 0)] = 0
+
+ #to ignore background
+ scan = flood_fill(scan, (1,1,1), 250, tolerance=0)
+
+ #connectivity - find bright blotches where area is small
+ #first find all points where low intensity
+ poi = np.where(scan > 150)
+ for i in range(len(poi[0])):
+ if(not (scan[poi[0][i], poi[1][i], poi[2][i]] == 0)):
+ print("checking pt ", poi[0][i], poi[1][i], poi[2][i])
+ regMask = flood(scan, (poi[0][i], poi[1][i], poi[2][i]), tolerance=100)
+ #evaluate each point to see if in a small region
+ if(len(np.nonzero(regMask[0])) < 10):
+ print((np.nonzero(regMask)) )
+ scan = flood_fill(scan, (poi[0][i], poi[1][i], poi[2][i]), 0, tolerance=100)
+
+ #restore background
+ scan = flood_fill(scan, (1,1,1), 0, tolerance=0)
+
+ return scan
+
+# scans = os.listdir("Scans") # raw scans
+# for scan in scans:
+# if(scan.endswith("nii.gz")):
+# name = scan.split('.')[0]
+# print(name)
+# mask = "skull_stripped_files/" + name + "_skull.nii.gz" # these files are from Poyu's code
+# print(mask)
+
+# proc = postSkullStrip("Scans/" + scan, mask)
+
+# nii_image = nib.Nifti1Image(proc.astype(np.float32), affine=np.eye(4))
+# nib.save(nii_image, "stripped/" + name + "_masked.nii.gz") # the corrected raw scans, should have a good number of slices bounded to just the brain + maybe some thin shape of the skull
diff --git a/nph_5class/xml_template b/nph_5class/xml_template
new file mode 100644
index 0000000..a3a520e
--- /dev/null
+++ b/nph_5class/xml_template
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+