Skip to content

Commit

Permalink
Merge pull request #50 from DrFaust92/schedule
Browse files Browse the repository at this point in the history
Schedule
  • Loading branch information
DrFaust92 authored Mar 19, 2022
2 parents 9d539f7 + 7cea373 commit c49a4fc
Show file tree
Hide file tree
Showing 9 changed files with 432 additions and 22 deletions.
1 change: 1 addition & 0 deletions bitbucket/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ func Provider() *schema.Provider {
"bitbucket_deploy_key": resourceDeployKey(),
"bitbucket_pipeline_ssh_key": resourcePipelineSshKey(),
"bitbucket_pipeline_ssh_known_host": resourcePipelineSshKnownHost(),
"bitbucket_pipeline_schedule": resourcePipelineSchedule(),
"bitbucket_ssh_key": resourceSshKey(),
"bitbucket_branch_restriction": resourceBranchRestriction(),
"bitbucket_branching_model": resourceBranchingModel(),
Expand Down
247 changes: 247 additions & 0 deletions bitbucket/resource_pipeline_schedule.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,247 @@
package bitbucket

import (
"fmt"
"log"
"strings"

"github.com/DrFaust92/bitbucket-go-client"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
)

func resourcePipelineSchedule() *schema.Resource {
return &schema.Resource{
Create: resourcePipelineScheduleCreate,
Read: resourcePipelineScheduleRead,
Update: resourcePipelineScheduleUpdate,
Delete: resourcePipelineScheduleDelete,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},

Schema: map[string]*schema.Schema{
"workspace": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"repository": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"enabled": {
Type: schema.TypeBool,
Required: true,
},
"cron_pattern": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"target": {
Type: schema.TypeList,
Required: true,
ForceNew: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"ref_name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"ref_type": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validation.StringInSlice([]string{"branch", "tag"}, false),
},
"selector": {
Type: schema.TypeList,
Required: true,
ForceNew: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"pattern": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
},
},
},
},
},
},
"uuid": {
Type: schema.TypeString,
Computed: true,
},
},
}
}

func resourcePipelineScheduleCreate(d *schema.ResourceData, m interface{}) error {
c := m.(Clients).genClient
pipeApi := c.ApiClient.PipelinesApi

pipeSchedule := expandPipelineSchedule(d)
log.Printf("[DEBUG] Pipeline Schedule Request: %#v", pipeSchedule)

repo := d.Get("repository").(string)
workspace := d.Get("workspace").(string)
schedule, _, err := pipeApi.CreateRepositoryPipelineSchedule(c.AuthContext, *pipeSchedule, workspace, repo)

if err != nil {
return fmt.Errorf("error creating pipeline schedule: %w", err)
}

d.SetId(string(fmt.Sprintf("%s/%s/%s", workspace, repo, schedule.Uuid)))

return resourcePipelineScheduleRead(d, m)
}

func resourcePipelineScheduleUpdate(d *schema.ResourceData, m interface{}) error {
c := m.(Clients).genClient
pipeApi := c.ApiClient.PipelinesApi

workspace, repo, uuid, err := pipeScheduleId(d.Id())
if err != nil {
return err
}

pipeSchedule := expandPipelineSchedule(d)
log.Printf("[DEBUG] Pipeline Schedule Request: %#v", pipeSchedule)
_, _, err = pipeApi.UpdateRepositoryPipelineSchedule(c.AuthContext, *pipeSchedule, workspace, repo, uuid)

if err != nil {
return fmt.Errorf("error updating pipeline schedule: %w", err)
}

return resourcePipelineScheduleRead(d, m)
}

func resourcePipelineScheduleRead(d *schema.ResourceData, m interface{}) error {
c := m.(Clients).genClient
pipeApi := c.ApiClient.PipelinesApi

workspace, repo, uuid, err := pipeScheduleId(d.Id())
if err != nil {
return err
}

schedule, res, err := pipeApi.GetRepositoryPipelineSchedule(c.AuthContext, workspace, repo, uuid)
if err != nil {
return fmt.Errorf("error reading Pipeline Schedule (%s): %w", d.Id(), err)
}

if res.StatusCode == 404 {
log.Printf("[WARN] Pipeline Schedule (%s) not found, removing from state", d.Id())
d.SetId("")
return nil
}

if res.Body == nil {
return fmt.Errorf("error getting Pipeline Schedule (%s): empty response", d.Id())
}

d.Set("repository", repo)
d.Set("workspace", workspace)
d.Set("uuid", schedule.Uuid)
d.Set("enabled", schedule.Enabled)
d.Set("cron_pattern", schedule.CronPattern)

d.Set("target", flattenPipelineRefTarget(schedule.Target))

return nil
}

func resourcePipelineScheduleDelete(d *schema.ResourceData, m interface{}) error {
c := m.(Clients).genClient
pipeApi := c.ApiClient.PipelinesApi

workspace, repo, uuid, err := pipeScheduleId(d.Id())
if err != nil {
return err
}
_, err = pipeApi.DeleteRepositoryPipelineSchedule(c.AuthContext, workspace, repo, uuid)

if err != nil {
return fmt.Errorf("error deleting Pipeline Schedule (%s): %w", d.Id(), err)
}

return err
}

func expandPipelineSchedule(d *schema.ResourceData) *bitbucket.PipelineSchedule {
schedule := &bitbucket.PipelineSchedule{
Enabled: d.Get("enabled").(bool),
CronPattern: d.Get("cron_pattern").(string),
Target: expandPipelineRefTarget(d.Get("target").([]interface{})),
}

return schedule
}

func expandPipelineRefTarget(conf []interface{}) *bitbucket.PipelineRefTarget {
tfMap, _ := conf[0].(map[string]interface{})

target := &bitbucket.PipelineRefTarget{
RefName: tfMap["ref_name"].(string),
RefType: tfMap["ref_type"].(string),
Selector: expandPipelineRefTargetSelector(tfMap["selector"].([]interface{})),
Type_: "pipeline_ref_target",
}

return target
}

func expandPipelineRefTargetSelector(conf []interface{}) *bitbucket.PipelineSelector {
tfMap, _ := conf[0].(map[string]interface{})

selector := &bitbucket.PipelineSelector{
Pattern: tfMap["pattern"].(string),
Type_: "branches",
}

return selector
}

func flattenPipelineRefTarget(rp *bitbucket.PipelineRefTarget) []interface{} {
if rp == nil {
return []interface{}{}
}

m := map[string]interface{}{
"ref_name": rp.RefName,
"ref_type": rp.RefType,
"selector": flattenPipelineSelector(rp.Selector),
}

return []interface{}{m}
}

func flattenPipelineSelector(rp *bitbucket.PipelineSelector) []interface{} {
if rp == nil {
return []interface{}{}
}

m := map[string]interface{}{
"pattern": rp.Pattern,
}

return []interface{}{m}
}

func pipeScheduleId(id string) (string, string, string, error) {
parts := strings.Split(id, "/")

if len(parts) != 3 {
return "", "", "", fmt.Errorf("unexpected format of ID (%q), expected WORKSPACE-ID/REPO-ID/UUID", id)
}

return parts[0], parts[1], parts[2], nil
}
111 changes: 111 additions & 0 deletions bitbucket/resource_pipeline_schedule_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
package bitbucket

import (
"fmt"
"os"
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)

func TestAccBitbucketPipelineSchedule_basic(t *testing.T) {
resourceName := "bitbucket_pipeline_schedule.test"

workspace := os.Getenv("BITBUCKET_TEAM")
//because the schedule resource requires a pipe already defined we are passing here a bootstrapped repo
repo := os.Getenv("BITBUCKET_PIPELINED_REPO")

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBitbucketPipelineScheduleDestroy,
Steps: []resource.TestStep{
{
Config: testAccBitbucketPipelineScheduleConfig(workspace, repo, true),
Check: resource.ComposeTestCheckFunc(
testAccCheckBitbucketPipelineScheduleExists(resourceName),
resource.TestCheckResourceAttr(resourceName, "workspace", workspace),
resource.TestCheckResourceAttr(resourceName, "repository", repo),
resource.TestCheckResourceAttr(resourceName, "enabled", "true"),
resource.TestCheckResourceAttr(resourceName, "cron_pattern", "0 30 * * * ? *"),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccBitbucketPipelineScheduleConfig(workspace, repo, false),
Check: resource.ComposeTestCheckFunc(
testAccCheckBitbucketPipelineScheduleExists(resourceName),
resource.TestCheckResourceAttr(resourceName, "workspace", workspace),
resource.TestCheckResourceAttr(resourceName, "repository", repo),
resource.TestCheckResourceAttr(resourceName, "enabled", "false"),
resource.TestCheckResourceAttr(resourceName, "cron_pattern", "0 30 * * * ? *"),
),
},
},
})
}

func testAccCheckBitbucketPipelineScheduleDestroy(s *terraform.State) error {
client := testAccProvider.Meta().(Clients).genClient
pipeApi := client.ApiClient.PipelinesApi

for _, rs := range s.RootModule().Resources {
if rs.Type != "bitbucket_pipeline_schedule" {
continue
}

workspace, repo, uuid, err := pipeScheduleId(rs.Primary.ID)
if err != nil {
return err
}

_, res, err := pipeApi.GetRepositoryPipelineSchedule(client.AuthContext, workspace, repo, uuid)

if err == nil {
return fmt.Errorf("The resource was found should have errored")
}

if res.StatusCode != 404 {
return fmt.Errorf("Pipeline Schedule still exists")
}

}
return nil
}

func testAccCheckBitbucketPipelineScheduleExists(n string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No Pipeline Schedule ID is set")
}
return nil
}
}

func testAccBitbucketPipelineScheduleConfig(workspace, repo string, enabled bool) string {
return fmt.Sprintf(`
resource "bitbucket_pipeline_schedule" "test" {
workspace = %[1]q
repository = %[2]q
enabled = %[3]t
cron_pattern = "0 30 * * * ? *"
target {
ref_name = "master"
ref_type = "branch"
selector {
pattern = "staging"
}
}
}
`, workspace, repo, enabled)
}
Loading

0 comments on commit c49a4fc

Please sign in to comment.