diff --git a/Makefile b/Makefile index 82b8950..0c4f651 100644 --- a/Makefile +++ b/Makefile @@ -2,9 +2,9 @@ TEST?=$$(go list ./... | grep -v 'vendor') HOSTNAME=aa NAMESPACE=test NAME=config -VERSION=0.1.6 +VERSION=0.2.1 BINARY=terraform-provider-${NAME} -OS_ARCH=darwin_amd64 +OS_ARCH=linux_amd64 default: install diff --git a/config/data_source_config_common_functions.go b/config/data_source_config_common_functions.go index fbe04e8..2d2c32b 100644 --- a/config/data_source_config_common_functions.go +++ b/config/data_source_config_common_functions.go @@ -3,6 +3,7 @@ package config import ( "fmt" + "github.com/360EntSecGroup-Skylar/excelize/v2" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -26,6 +27,33 @@ func dataSourceFilterSchema() *schema.Schema { } } +func dataSourceLookupSchema() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "column": { + Type: schema.TypeString, + Required: true, + }, + "worksheet": { + Type: schema.TypeString, + Optional: true, + }, + "key_column": { + Type: schema.TypeString, + Required: true, + }, + "value_column": { + Type: schema.TypeString, + Required: true, + }, + }, + }, + } +} + func buildConfigDataSourceFilters(set *schema.Set) []map[string]interface{} { var filters []map[string]interface{} for _, v := range set.List() { @@ -42,6 +70,81 @@ func buildConfigDataSourceFilters(set *schema.Set) []map[string]interface{} { return filters } +func buildConfigDataSourceLookup(set *schema.Set) []map[string]interface{} { + var lookup []map[string]interface{} + for _, v := range set.List() { + m := v.(map[string]interface{}) + mvalue := make(map[string]interface{}) + mvalue["Column"] = m["column"].(string) + if m["worksheet"] != nil { + mvalue["Worksheet"] = m["worksheet"].(string) + } else { + mvalue["Worksheet"] = nil + } + mvalue["Key"] = m["key_column"].(string) + mvalue["Value"] = m["value_column"].(string) + lookup = append(lookup, mvalue) + } + return lookup +} + +func checkLookupValue(lookup []map[string]interface{}, key string) bool { + for _, lv := range lookup { + if lv["Column"].(string) == key { + return true + } + } + return false +} + +func getLookupValue(lookup []map[string]interface{}, excel_file, default_worksheet string, key string, value string) (string, error) { + var lookupValue = "" + for _, lv := range lookup { + if lv["Column"].(string) == key { + f, err := excelize.OpenFile(excel_file) + if err != nil { + return "", err + } + worksheet := "" + if lv["Worksheet"].(string) != "" { + worksheet = lv["Worksheet"].(string) + } else { + worksheet = default_worksheet + } + rows, err := f.GetRows(worksheet) + if err != nil { + return "", fmt.Errorf(fmt.Sprintf("%v", rows)) + } + + columns := len(rows[0]) + + // get column of key + header := rows[0] + column_key := 0 + column_value := -1 + for i := 0; i < columns; i++ { + if header[i] == key { + column_key = i + } + if header[i] == lv["Value"] { + column_value = i + } + } + // get row of key + if column_value >= 0 { + for _, row := range rows { + if row[column_key] == value { + lookupValue = row[column_value] + } + } + } else { + return "", fmt.Errorf("lookup value not found") + } + } + } + return lookupValue, nil +} + func checkFiltersForItem(filters []map[string]interface{}, key string, value string) bool { for _, fv := range filters { if fv["Name"] == key { diff --git a/config/data_source_configuration_workbook.go b/config/data_source_configuration_workbook.go index 2cccbe5..b94f1b2 100644 --- a/config/data_source_configuration_workbook.go +++ b/config/data_source_configuration_workbook.go @@ -65,6 +65,7 @@ func dataSourceConfigurationWorkbook() *schema.Resource { Optional: true, }, "filter": dataSourceFilterSchema(), + "lookup": dataSourceLookupSchema(), }, } } @@ -84,12 +85,17 @@ func dataSourceConfigurationItemRead(ctx context.Context, d *schema.ResourceData end_column := d.Get("col_end").(string) var filters []map[string]interface{} - // gather all filters if v, ok := d.GetOk("filter"); ok { filters = buildConfigDataSourceFilters(v.(*schema.Set)) } + var lookup []map[string]interface{} + // gather all lookups + if v, ok := d.GetOk("lookup"); ok { + lookup = buildConfigDataSourceLookup(v.(*schema.Set)) + } + // set the default configuration item column name if col_config_item == "" { col_config_item = "configuration_item" @@ -170,7 +176,7 @@ func dataSourceConfigurationItemRead(ctx context.Context, d *schema.ResourceData mapping := map_yaml.(map[interface{}]interface{}) // remap all csv headers based on mapping configuration - records := reMapData(csv, mapping["config_schema"], filters, col_config_item) + records := reMapData(csv, mapping["config_schema"], filters, lookup, excel_file, sheet_name, col_config_item) // get the transformed data data := getItemData(records, items, col_config_item) @@ -389,7 +395,7 @@ func unique(items []string) []string { return list } -func reMapData(csv []map[string]string, mapping interface{}, filters []map[string]interface{}, configuration_item string) []map[string]interface{} { +func reMapData(csv []map[string]string, mapping interface{}, filters []map[string]interface{}, lookup []map[string]interface{}, excel_file string, worksheet string, configuration_item string) []map[string]interface{} { new_csv := make([]map[string]interface{}, len(csv)) for key, value := range csv { item_key := "" @@ -436,7 +442,7 @@ func reMapData(csv []map[string]string, mapping interface{}, filters []map[strin new_value[k] = value[k] } else if strings.HasPrefix(k, "s_") || strings.HasPrefix(k, "string_") { replacer := strings.NewReplacer("s_", "", "string_", "") - new_key := replacer.Replace(k) + new_key = replacer.Replace(k) if value[k] != "" { new_value[new_key] = value[k] } else { @@ -444,7 +450,7 @@ func reMapData(csv []map[string]string, mapping interface{}, filters []map[strin } } else if strings.HasPrefix(k, "t_") || strings.HasPrefix(k, "tag_") { replacer := strings.NewReplacer("t_", "", "tag_", "") - new_key := strings.Title(replacer.Replace(k)) + new_key = strings.Title(replacer.Replace(k)) if value[k] != "" { new_tag[new_key] = value[k] } else { @@ -452,7 +458,7 @@ func reMapData(csv []map[string]string, mapping interface{}, filters []map[strin } } else if strings.HasPrefix(k, "n_") || strings.HasPrefix(k, "num_") || strings.HasPrefix(k, "number_") || strings.HasPrefix(k, "numeric_") { replacer := strings.NewReplacer("n_", "", "num_", "", "number_", "", "numeric_", "") - new_key := replacer.Replace(k) + new_key = replacer.Replace(k) if value[k] != "" { n, _ := strconv.ParseFloat(value[k], 64) new_value[new_key] = n @@ -461,7 +467,7 @@ func reMapData(csv []map[string]string, mapping interface{}, filters []map[strin } } else if strings.HasPrefix(k, "b_") || strings.HasPrefix(k, "bool_") || strings.HasPrefix(k, "boolean_") { replacer := strings.NewReplacer("b_", "", "bool_", "", "boolean_", "") - new_key := replacer.Replace(k) + new_key = replacer.Replace(k) if value[k] != "" { val, _ := strconv.ParseBool(value[k]) new_value[new_key] = val @@ -470,7 +476,7 @@ func reMapData(csv []map[string]string, mapping interface{}, filters []map[strin } } else if strings.HasPrefix(k, "l_") || strings.HasPrefix(k, "list_") { replacer := strings.NewReplacer("l_", "", "list_", "") - new_key := replacer.Replace(k) + new_key = replacer.Replace(k) if value[k] != "" { new_value[new_key] = strings.Split(value[k], ",") } else { @@ -478,7 +484,7 @@ func reMapData(csv []map[string]string, mapping interface{}, filters []map[strin } } else if strings.HasPrefix(k, "m_") || strings.HasPrefix(k, "map_") || strings.HasPrefix(k, "h_") || strings.HasPrefix(k, "hash_") { replacer := strings.NewReplacer("m_", "", "map_", "", "h_", "", "hash_", "") - new_key := replacer.Replace(k) + new_key = replacer.Replace(k) if value[k] != "" { vlist := strings.Split(value[k], ",") vmap := make(map[string]string) @@ -491,9 +497,32 @@ func reMapData(csv []map[string]string, mapping interface{}, filters []map[strin new_value[new_key] = map[string]string{} } } else { + new_key = k new_value[k] = value[k] } + // get lookup value + if lookup != nil && checkLookupValue(lookup, new_key) { + if strings.Contains(value[new_key], ",") { + lkvals := strings.Split(value[new_key], ",") + for idx, vl := range lkvals { + lookup_value, err := getLookupValue(lookup, excel_file, worksheet, new_key, vl) + if err == nil && lookup_value != "" { + if idx == 0 { + new_value[new_key] = lookup_value + } else { + new_value[new_key] = new_value[new_key].(string) + "," + lookup_value + } + } + } + } else { + lookup_value, err := getLookupValue(lookup, excel_file, worksheet, new_key, value[new_key]) + if err == nil && lookup_value != "" { + new_value[new_key] = lookup_value + } + } + } + // check if value included in filter if len(filters) > 0 { if !include_value { diff --git a/docs/data-sources/workbook.md b/docs/data-sources/workbook.md index cf37d88..eca5bd4 100644 --- a/docs/data-sources/workbook.md +++ b/docs/data-sources/workbook.md @@ -40,6 +40,17 @@ data "config_workbook" "excel" { } } +data "config_workbook" "excel" { + excel = "filename.xlsx" + worksheet = "Sheet1" + lookup { + column = "script_name" + worksheet = "event_target" + key = "name" + value = "script" + } +} + data "config_workbook" "excel_vertical" { excel = "filename.xlsx" worksheet = "Sheet2" @@ -214,15 +225,25 @@ config_schema: - **schema** (String) - (Optional) JSON/YAML format string containing the schema of the configurations. - **worksheet** (String) - (Optional) The sheet name of the excel worksheet - **orientation** (String) - (Optional) default horizontal. Valid values are (horizontal,vertical) +- **filter** (Block) - (Optional) Filter the data +- **lookup** (Block) - (Optional) Replace data using lookup. Like `vlookup` function in Excel #### There should only be 1 instance of **csv** or **excel**. You cannot define both on the same data source -### Filters +### Filter Nested `filter` blocks have the following structure: - **name** (String) - (Required) The name of the header/column - **values** (List) - (Required) The list of valid values to filter +### Lookup + +Nested `lookup` blocks have the following structure: +- **column** (String) - (Required) Column name of data you need lookup +- **worksheet** (String) - (Optional) Worksheet of the reference data. Default value is current worksheet +- **column_key** (String) - (Required) Colummn name of the lookup key +- **column_value** (String) - (Required) Column name of the lookup value + ### Output - **id** (String) The ID of this resource. diff --git a/examples/files/event.xlsx b/examples/files/event.xlsx new file mode 100644 index 0000000..7d462db Binary files /dev/null and b/examples/files/event.xlsx differ diff --git a/examples/main.tf b/examples/main.tf index 18100fb..5bf6cb5 100644 --- a/examples/main.tf +++ b/examples/main.tf @@ -1,8 +1,8 @@ terraform { required_providers { config = { - version = "0.1.6" - source = "alabuel/config" + version = "0.2.1" + source = "aa/test/config" } } } @@ -29,10 +29,30 @@ data "config_workbook" "excel" { data "config_workbook" "vexcel" { excel = "files/data.xlsx" worksheet = "Vert" - type = "vertical" + orientation = "vertical" configuration_item = "my_vertical" } +data "config_workbook" "lkexcel" { + excel = "files/event.xlsx" + worksheet = "cloudwatch_event_rule" + configuration_item = "cloudwatch_event_rule" + + lookup { + column = "command" + worksheet = "event_target" + key_column = "name" + value_column = "script" + } + + lookup { + column = "dependents" + worksheet = "event_target" + key_column = "name" + value_column = "script" + } +} + output "horiz" { value = jsondecode(data.config_workbook.excel.json) } @@ -40,3 +60,7 @@ output "horiz" { output "vert" { value = jsondecode(data.config_workbook.vexcel.json) } + +output "lookup" { + value = jsondecode(data.config_workbook.lkexcel.json) +} \ No newline at end of file