Skip to content

Commit

Permalink
Add S3 Fetcher (#106)
Browse files Browse the repository at this point in the history
* client, create, get

* fix typo

* list

* update

* delete

* integration test, beginning of mux tests

* fix arn test, export env var names to consts

* create tests

* delete tests

* get tests

* list tests

* update tests

* readmes

* update bucket name for tests

* newline

* update workflow secrets

* refactor s3_bucket_connector -> s3_fetcher

* export valid regions & logs types

* refactor main readme
  • Loading branch information
mirii1994 authored Jan 22, 2023
1 parent b7cd2b2 commit 9a3e8d9
Show file tree
Hide file tree
Showing 24 changed files with 1,359 additions and 2 deletions.
1 change: 1 addition & 0 deletions .github/workflows/test-daily.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ jobs:
S3_PATH: ${{ secrets.S3_PATH }}
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
AWS_ARN: ${{ secrets.AWS_ARN }}
AWS_ARN_S3_CONNECTOR: ${{ secrets.AWS_ARN_S3_CONNECTOR }}
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ jobs:
S3_PATH: ${{ secrets.S3_PATH }}
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
AWS_ARN: ${{ secrets.AWS_ARN }}
AWS_ARN_S3_CONNECTOR: ${{ secrets.AWS_ARN_S3_CONNECTOR }}
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
Expand Down
7 changes: 5 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ The library currently supports the following API endpoints:
* [Restore logs](https://github.com/logzio/logzio_terraform_client/tree/master/restore_logs).
* [Authentication groups](https://docs.logz.io/api/#tag/Authentication-groups).
* [Kibana Objects](https://docs.logz.io/api/#tag/Import-or-export-Kibana-objects).
* [S3 Fetcher](https://docs.logz.io/api/#tag/Connect-to-S3-Buckets).

#### Contributing

Expand All @@ -30,17 +31,19 @@ The library currently supports the following API endpoints:

### Changelog

- v1.15.0
- Add [S3 Fetcher](https://docs.logz.io/api/#tag/Connect-to-S3-Buckets).
- v1.14.0
- `alerts_v2` - support new field `schedule`
- v1.13.1
- Add retry mechanism for requests.
- v1.13.0
- Bug fix - **sub_accounts**: field `ReservedDailyGB` in requests can be 0.


<details>
<summary markdown="span">Exapnd to check old versions </summary>

- v1.13.0
- Bug fix - **sub_accounts**: field `ReservedDailyGB` in requests can be 0.
- v1.12.0
- Upgrade to Go 1.18.
- Refactor `users`, adjust to the recent API fields.
Expand Down
26 changes: 26 additions & 0 deletions s3_fetcher/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# S3 Fetcher

Compatible with Logz.io's [S3 Fetcher API](https://docs.logz.io/api/#tag/Connect-to-S3-Buckets).

To create a new S3 fetcher:

```go
client, _ := s3_fetcher.New(apiToken, test_utils.GetLogzIoBaseUrl())
addS3Buckt := false
active := true
connector, err := underTest.CreateS3Fetcher(s3_fetcher.S3FetcherRequest{
Bucket: "my_bucket",
AddS3ObjectKeyAsLogField: &addS3Buckt,
Active: &active,
Region: s3_fetcher.RegionUsEast1,
LogsType: s3_fetcher.LogsTypeElb,
})
```

| Function | Function Name |
|----------|--------------------------------------------------------------------------------------------------|
| Create | `func (c *S3FetcherClient) CreateS3Fetcher(create S3FetcherRequest) (*S3FetcherResponse, error)` |
| Delete | `func (c *S3FetcherClient) DeleteS3Fetcher(s3FetcherId int64) error` |
| Get | `func (c *S3FetcherClient) GetS3Fetcher(s3FetcherId int64) (*S3FetcherResponse, error)` |
| List | `func (c *S3FetcherClient) ListS3Fetchers() ([]S3FetcherResponse, error)` |
| Update | `func (c *S3FetcherClient) UpdateS3Fetcher(s3FetcherId int64, update S3FetcherRequest) error` |
184 changes: 184 additions & 0 deletions s3_fetcher/client_s3_fetcher.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
package s3_fetcher

import (
"fmt"
"github.com/logzio/logzio_terraform_client/client"
)

const (
s3FetcherServiceEndpoint string = "%s/v1/log-shipping/s3-buckets"

s3FetcherResourceName = "s3 fetcher"

operationCreateS3Fetcher = "CreateS3Fetcher"
operationGetS3Fetcher = "GetS3Fetcher"
operationListS3Fetcher = "ListS3Fetcher"
operationUpdateS3Fetcher = "UpdateS3Fetcher"
operationDeleteS3Fetcher = "DeleteS3Fetcher"
)

type S3FetcherClient struct {
*client.Client
}

type S3FetcherRequest struct {
AccessKey string `json:"accessKey,omitempty"`
SecretKey string `json:"secretKey,omitempty"`
Arn string `json:"arn,omitempty"`
Bucket string `json:"bucket"`
Prefix string `json:"prefix,omitempty"`
Active *bool `json:"active"`
AddS3ObjectKeyAsLogField *bool `json:"addS3ObjectKeyAsLogField,omitempty"`
Region AwsRegion `json:"region"`
LogsType AwsLogsType `json:"logsType"`
}

type S3FetcherResponse struct {
AccessKey string `json:"accessKey,omitempty"`
Arn string `json:"arn,omitempty"`
Bucket string `json:"bucket"`
Prefix string `json:"prefix,omitempty"`
Active bool `json:"active"`
AddS3ObjectKeyAsLogField bool `json:"addS3ObjectKeyAsLogField,omitempty"`
Region AwsRegion `json:"region"`
LogsType AwsLogsType `json:"logsType"`
Id int64 `json:"id,omitempty"`
}

type AwsRegion string
type AwsLogsType string

const (
RegionUsEast1 AwsRegion = "US_EAST_1"
RegionUsEast2 AwsRegion = "US_EAST_2"
RegionUsWest1 AwsRegion = "US_WEST_1"
RegionUsWest2 AwsRegion = "US_WEST_2"
RegionEuWest1 AwsRegion = "EU_WEST_1"
RegionEuWest2 AwsRegion = "EU_WEST_2"
RegionEuWest3 AwsRegion = "EU_WEST_3"
RegionEuCentral1 AwsRegion = "EU_CENTRAL_1"
RegionApNortheast1 AwsRegion = "AP_NORTHEAST_1"
RegionApNortheast2 AwsRegion = "AP_NORTHEAST_2"
RegionApSoutheast1 AwsRegion = "AP_SOUTHEAST_1"
RegionApSoutheast2 AwsRegion = "AP_SOUTHEAST_2"
RegionSaEast1 AwsRegion = "SA_EAST_1"
RegionApSouth1 AwsRegion = "AP_SOUTH_1"
RegionCaCentral1 AwsRegion = "CA_CENTRAL_1"

LogsTypeElb AwsLogsType = "elb"
LogsTypeVpcFlow AwsLogsType = "vpcflow"
LogsTypeS3Access AwsLogsType = "S3Access"
LogsTypeCloudfront AwsLogsType = "cloudfront"
)

func (r AwsRegion) String() string {
return string(r)
}

func (t AwsLogsType) String() string {
return string(t)
}

func New(apiToken, baseUrl string) (*S3FetcherClient, error) {
if len(apiToken) == 0 {
return nil, fmt.Errorf("API token not defined")
}
if len(baseUrl) == 0 {
return nil, fmt.Errorf("Base URL not defined")
}
c := &S3FetcherClient{
Client: client.New(apiToken, baseUrl),
}

return c, nil
}

func validateCreateUpdateS3FetcherRequest(req S3FetcherRequest) error {
if len(req.Bucket) == 0 {
return fmt.Errorf("field bucket must be set")
}

if len(req.Region) == 0 {
return fmt.Errorf("field region must be set")
}

if len(req.LogsType) == 0 {
return fmt.Errorf("field logsType must be set")
}

if req.Active == nil {
return fmt.Errorf("field active must be set")
}

if (req.AccessKey != "" && req.SecretKey == "") || (req.AccessKey == "" && req.SecretKey != "") {
return fmt.Errorf("both aws keys must be set")
}

if req.AccessKey == "" && req.SecretKey == "" && req.Arn == "" {
return fmt.Errorf("either keys or arn must be set")
}

err := isValidRegion(req.Region)
if err != nil {
return err
}

err = isValidLogsType(req.LogsType)
if err != nil {
return err
}

return nil
}

func GetValidRegions() []AwsRegion {
return []AwsRegion{
RegionUsEast1,
RegionUsEast2,
RegionUsWest1,
RegionUsWest2,
RegionEuWest1,
RegionEuWest2,
RegionEuWest3,
RegionEuCentral1,
RegionApNortheast1,
RegionApNortheast2,
RegionApSoutheast1,
RegionApSoutheast2,
RegionSaEast1,
RegionApSouth1,
RegionCaCentral1,
}
}

func isValidRegion(region AwsRegion) error {
validRegions := GetValidRegions()
for _, validRegion := range validRegions {
if validRegion == region {
return nil
}
}

return fmt.Errorf("invalid region. region must be one of: %s", validRegions)
}

func GetValidLogsType() []AwsLogsType {
return []AwsLogsType{
LogsTypeElb,
LogsTypeVpcFlow,
LogsTypeS3Access,
LogsTypeCloudfront,
}
}

func isValidLogsType(logsType AwsLogsType) error {
validLogsTypes := GetValidLogsType()

for _, validType := range validLogsTypes {
if validType == logsType {
return nil
}
}

return fmt.Errorf("invalid logs type. logs type must be one of: %s", validLogsTypes)
}
52 changes: 52 additions & 0 deletions s3_fetcher/client_s3_fetcher_create.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
package s3_fetcher

import (
"encoding/json"
"fmt"
logzio_client "github.com/logzio/logzio_terraform_client"
"net/http"
)

const (
createS3FetcherServiceUrl = s3FetcherServiceEndpoint
createS3FetcherServiceMethod = http.MethodPost
createS3FetcherMethodCreated = http.StatusCreated
createS3FetcherStatusNotFound = http.StatusNotFound
)

// CreateS3Fetcher creates a new S3 fetcher if successful, an error otherwise
func (c *S3FetcherClient) CreateS3Fetcher(create S3FetcherRequest) (*S3FetcherResponse, error) {
err := validateCreateUpdateS3FetcherRequest(create)
if err != nil {
return nil, err
}

createS3FetcherJson, err := json.Marshal(create)
if err != nil {
return nil, err
}

res, err := logzio_client.CallLogzioApi(logzio_client.LogzioApiCallDetails{
ApiToken: c.ApiToken,
HttpMethod: createS3FetcherServiceMethod,
Url: fmt.Sprintf(createS3FetcherServiceUrl, c.BaseUrl),
Body: createS3FetcherJson,
SuccessCodes: []int{createS3FetcherMethodCreated},
NotFoundCode: createS3FetcherStatusNotFound,
ResourceId: nil,
ApiAction: operationCreateS3Fetcher,
ResourceName: s3FetcherResourceName,
})

if err != nil {
return nil, err
}

var retVal S3FetcherResponse
err = json.Unmarshal(res, &retVal)
if err != nil {
return nil, err
}

return &retVal, nil
}
31 changes: 31 additions & 0 deletions s3_fetcher/client_s3_fetcher_delete.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package s3_fetcher

import (
"fmt"
logzio_client "github.com/logzio/logzio_terraform_client"
"net/http"
)

const (
deleteS3FetcherServiceUrl = s3FetcherServiceEndpoint + "/%d"
deleteS3FetcherServiceMethod = http.MethodDelete
deleteS3FetcherServiceSuccess = http.StatusOK
deleteS3FetcherNotFound = http.StatusNotFound
)

// DeleteS3Fetcher deletes a s3 fetcher specified by its unique id, returns an error if a problem is encountered
func (c *S3FetcherClient) DeleteS3Fetcher(s3FetcherId int64) error {
_, err := logzio_client.CallLogzioApi(logzio_client.LogzioApiCallDetails{
ApiToken: c.ApiToken,
HttpMethod: deleteS3FetcherServiceMethod,
Url: fmt.Sprintf(deleteS3FetcherServiceUrl, c.BaseUrl, s3FetcherId),
Body: nil,
SuccessCodes: []int{deleteS3FetcherServiceSuccess},
NotFoundCode: deleteS3FetcherNotFound,
ResourceId: s3FetcherId,
ApiAction: operationDeleteS3Fetcher,
ResourceName: s3FetcherResourceName,
})

return err
}
42 changes: 42 additions & 0 deletions s3_fetcher/client_s3_fetcher_get.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package s3_fetcher

import (
"encoding/json"
"fmt"
logzio_client "github.com/logzio/logzio_terraform_client"
"net/http"
)

const (
getS3FetcherServiceUrl = s3FetcherServiceEndpoint + "/%d"
getS3FetcherServiceMethod = http.MethodGet
getS3FetcherServiceSuccess = http.StatusOK
getS3FetcherServiceNotFound = http.StatusNotFound
)

// GetS3Fetcher returns a s3 fetcher given its unique identifier, an error otherwise
func (c *S3FetcherClient) GetS3Fetcher(s3FetcherId int64) (*S3FetcherResponse, error) {
res, err := logzio_client.CallLogzioApi(logzio_client.LogzioApiCallDetails{
ApiToken: c.ApiToken,
HttpMethod: getS3FetcherServiceMethod,
Url: fmt.Sprintf(getS3FetcherServiceUrl, c.BaseUrl, s3FetcherId),
Body: nil,
SuccessCodes: []int{getS3FetcherServiceSuccess},
NotFoundCode: getS3FetcherServiceNotFound,
ResourceId: s3FetcherId,
ApiAction: operationGetS3Fetcher,
ResourceName: s3FetcherResourceName,
})

if err != nil {
return nil, err
}

var s3Fetcher S3FetcherResponse
err = json.Unmarshal(res, &s3Fetcher)
if err != nil {
return nil, err
}

return &s3Fetcher, nil
}
Loading

0 comments on commit 9a3e8d9

Please sign in to comment.