Skip to content
This repository has been archived by the owner on May 23, 2023. It is now read-only.

Commit

Permalink
Added manual mapping api. Added mandatory indicator_category field an…
Browse files Browse the repository at this point in the history
…d unit measure field
  • Loading branch information
kjod committed Apr 9, 2017
1 parent b535e34 commit c0c2584
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 17 deletions.
1 change: 1 addition & 0 deletions ZOOM/api/indicator/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,5 @@ class Meta:
'source_id',
'country_id__region',
'country_id__name',
'unit_of_measure',
)
41 changes: 30 additions & 11 deletions ZOOM/api/manual_map/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from rest_framework.response import Response
from rest_framework import status
from rest_framework.decorators import api_view
from validate.models import File
import numpy as np
import pandas as pd
import pickle
Expand All @@ -24,31 +25,40 @@
@api_view(['GET', 'POST'])
def manual_mapping(request):
print('Recieved request')
print(request)
#print(request.data)
#print(request.data['file_id'])
#print(request.data['dict'])

if request.method == 'POST':
print('Rewuest Recieved')
#check data types
print(request)
# add validation check here
if 'dict' in request.POST:
if 'dict' in request.data:
print('here')
mappings = json.loads(request.data['dict'])
mappings.pop("null", None)
mappings.pop("unit_measure", None)#change later
mappings.pop("validate_store", None) # remove??
df_data = pd.read_csv(request.data['file_id']) # change to use with multiple files
print(request.data['dict'])
mappings = request.data['dict']#json.loads(request.data['dict'])
mappings.pop("null", None)#not needed for api call
mappings.pop("validate_store", None) # not needed for api call
file_id = str(File.objects.get(id=request.data['file_id']).file)
df_data = pd.read_csv(file_id) # change to use with multiple files
found_dtype = []
convert_to_dtype = []
error_message = []
correction_mappings = {}

dict_name = mappings.pop("empty_indicator", None)#request.session['dtypes']
dict_name = request.data["dtypes_loc"]#request.session['dtypes']
indicator_value = mappings.pop("empty_indicator", None)
country_value = mappings.pop("empty_country", None)
indicator_category_value = mappings.pop("empty_indicator_cat", None)
unit_of_measure_value = mappings.pop("empty_unit_of_measure", None)
relationship_dict = mappings.pop("relationship", None)
left_over_dict = mappings.pop("left_over", None)

print('Here in this method')
print(file_id)
print(dict_name)

with open(dict_name, 'rb') as f:
dtypes_dict = pickle.load(f)
#check if exists
Expand Down Expand Up @@ -111,9 +121,18 @@ def manual_mapping(request):
df_data['indicator_category_id'] = indicator_category_value
dtypes_dict[mappings['indicator_category_id'][0]] = [('str', 'str')]

if relationship_dict:
df_data = convert_df(mappings, relationship_dict, left_over_dict, df_data, dtypes_dict)
if unit_of_measure_value:
if len(unit_of_measure_value.keys()) < 2 :#chect each entry emoty unit_of measure a dict
mappings['unit_of_measure'] = ['unit_of_measure']
df_data['unit_of_measure'] = unit_of_measure_value[unit_of_measure_value.keys()[0]]
dtypes_dict[mappings['unit_of_measure'][0]] = [('str', 'str')]
else:
mappings['unit_of_measure'] = ['unit_of_measure']
dtypes_dict[mappings['unit_of_measure'][0]] = [('str', 'str')]

if relationship_dict:
#check if unit of measure exists
df_data = convert_df(mappings, relationship_dict, left_over_dict, df_data, dtypes_dict, unit_of_measure_value)

#remove replace
#Validation
Expand Down Expand Up @@ -168,7 +187,7 @@ def manual_mapping(request):
bulk_list = []

#cycle through dataset and save each line
order["file_source_id"] = request.data['file_id'];#request.session['files'][0]
order["file_source_id"] = file_id;#request.session['files'][0]
instance = FileSource(file_name = order['file_source_id'])
instance.save()
file_id = instance.id
Expand Down
4 changes: 2 additions & 2 deletions ZOOM/api/validate/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def validate(request):
template_heading_list.append(field.name)#.get_attname_column())

template_heading_list = template_heading_list[4:len(template_heading_list)]#skip first four headings as irrelevant to user input
template_heading_list.append("unit_measure") #needed?
#template_heading_list.append("unit_measure") #needed?

#count = 0# not sure if this is still needed, might need for matches
dicts, _ = get_dictionaries()#get dicts for country
Expand Down Expand Up @@ -222,4 +222,4 @@ def validate(self, file_id, request):
#output need to pass allignments of mapped headings
return Response({"message": "Got some data!", "data": request.data})
#return context
#return render(request, 'validate/input_report.html', context)"""
#return render(request, 'validate/input_report.html', context)"""
17 changes: 13 additions & 4 deletions ZOOM/lib/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ def check_column_data(dtypes, column_data, model_field, file_heading):#No need f
#should return field to map to
def check_data_type(field, dtypes):
#add time
print("getting here in check data type")
dtype_set = set()
result = False
if field == "country_id":
Expand Down Expand Up @@ -259,8 +260,11 @@ def correct_data(df_data, correction_data):#correction_data ["country_name, iso2
#if date convert in integer
#if

def convert_df(mappings,relationship_dict, left_over_dict, df_data, dtypes_dict):
def convert_df(mappings,relationship_dict, left_over_dict, df_data, dtypes_dict, empty_unit_measure_value):

if not empty_unit_measure_value:
empty_unit_measure_value = {}

columns = []
for col in df_data.columns:
temp = str(col)#.replace(" ", "~")#needed?
Expand Down Expand Up @@ -296,15 +300,20 @@ def convert_df(mappings,relationship_dict, left_over_dict, df_data, dtypes_dict)
#might have to be greater than 2

if col in mappings['indicator_category_id'] and len(mappings['indicator_category_id']) > 1:#if more than one relationship ie multiple subgroupd and relationships
check = new_df[relationship_dict[col]][counter]
check = new_df[relationship_dict[col]][counter] #if supgroup already defined
new_df[relationship_dict[col]][counter] = new_df[relationship_dict[col]][counter] + "|" + (col.replace("~", " "))#last part not needed
new_df[left_over_dict[col]][counter] = df_data[col.replace("~", " ")][i]
new_df[left_over_dict[col]][counter] = df_data[col.replace("~", " ")][i]#check if col_replace is there
#if empty_unit_of_measure in mappings:
#apply units of measure
#a = 5

else: #normal case
#hre.y
#hre.y
new_df[relationship_dict[col]][counter] = col.replace("~", " ")#add column heading
new_df[left_over_dict[col]][counter] = df_data[col.replace("~", " ")][i]

if col.replace("~", " ") in empty_unit_measure_value:
new_df['unit_of_measure'] = empty_unit_measure_value[col.replace("~", " ")]

#new_df = df_data[mappings[key]]
#map value
Expand Down

0 comments on commit c0c2584

Please sign in to comment.