Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds GCM Connector source #320

Merged
merged 60 commits into from
Jul 19, 2024
Merged
Show file tree
Hide file tree
Changes from 22 commits
Commits
Show all changes
60 commits
Select commit Hold shift + click to select a range
233ffaf
adds changes for gcm connector source
Jul 5, 2024
517ffda
adds gcm api processor
Jul 5, 2024
cdc24fd
adds gcm metadata extractor
Jul 5, 2024
b602404
adds gcm task proto
Jul 5, 2024
58f91ad
adds gcm source manager
Jul 5, 2024
b63b3b2
adds gcm task executor result builder
Jul 5, 2024
31b0d43
adds gcm task executor result builder + 2
Jul 5, 2024
b91286d
adds migrations
Jul 5, 2024
16505a7
adds api processor after changes
Jul 8, 2024
80dc9ed
adds gcm task proto after changes
Jul 8, 2024
ef8c39d
adds gcm source manager after changes
Jul 8, 2024
21b4799
adds metadataextractor, api processor changes
Jul 8, 2024
ce2695f
added log_sinks in gcm metadata_extractor and api_processor
Jul 8, 2024
0644834
adds asset manager
Jul 9, 2024
dfb24a4
added asset pb2.pyi file
Jul 9, 2024
f62c620
adds gcm task executor after changes
Jul 9, 2024
2c22ee3
minor change in models
Jul 10, 2024
9377aaa
Merge pull request #325 from DrDroidLab/gcmapiprocessor
Karansirohi2202 Jul 10, 2024
6638aaa
migrations
Jul 10, 2024
4572e4c
Merge pull request #326 from DrDroidLab/gcmsourcemanager
Karansirohi2202 Jul 11, 2024
4d5f8a2
Merge pull request #329 from DrDroidLab/gcmassetmanager
Karansirohi2202 Jul 11, 2024
ad9f227
back merge with parent branch
droid-mohit Jul 11, 2024
d514b69
metric assets done, logs left
Jul 15, 2024
fdd23e8
Made final changes to be reviewed(removed log assets)
Jul 16, 2024
157b517
Merge branch 'integration' of https://github.com/DrDroidLab/PlayBooks…
jayeshsadhwani99 Jul 17, 2024
b04b353
deleted migrations
Jul 17, 2024
0b7e7b2
Merge branch 'integration' of github.com:DrDroidLab/PlayBooks into ne…
Jul 17, 2024
3200a44
Merge branch 'new_integration/gcp' of https://github.com/DrDroidLab/P…
jayeshsadhwani99 Jul 17, 2024
e6db0c6
minor change
Jul 17, 2024
6752e41
Merge branch 'new_integration/gcp' of https://github.com/DrDroidLab/P…
jayeshsadhwani99 Jul 17, 2024
cd5354e
add gcm assets
jayeshsadhwani99 Jul 17, 2024
9430c1a
basic builders and task types added
jayeshsadhwani99 Jul 17, 2024
1c54909
changes made
Jul 17, 2024
3a7c582
Merge branch 'new_integration/gcp' of https://github.com/DrDroidLab/P…
jayeshsadhwani99 Jul 17, 2024
1ee4220
metrics
jayeshsadhwani99 Jul 17, 2024
f71110c
show step information
jayeshsadhwani99 Jul 17, 2024
df06746
push migrations
jayeshsadhwani99 Jul 17, 2024
1dc6660
gcm logs form
jayeshsadhwani99 Jul 17, 2024
a90b7bd
Merge branch 'integration' of github.com:DrDroidLab/PlayBooks into ne…
Jul 18, 2024
d9aa61b
using mql queries to get the metrics
Jul 18, 2024
e158bfd
back merge with integration
jayeshsadhwani99 Jul 18, 2024
e8007d4
Merge branch '@feature/gcp_ui' of https://github.com/DrDroidLab/PlayB…
jayeshsadhwani99 Jul 18, 2024
ce77e0c
Merge branch '@feature/gcp_ui' of github.com:DrDroidLab/PlayBooks int…
Jul 18, 2024
b85a069
fix gcm ui
jayeshsadhwani99 Jul 18, 2024
1fb3bb1
Merge branch '@feature/gcp_ui' of github.com:DrDroidLab/PlayBooks int…
Jul 18, 2024
80c3353
fix query
jayeshsadhwani99 Jul 18, 2024
e60fcd8
Merge branch '@feature/gcp_ui' of github.com:DrDroidLab/PlayBooks int…
Jul 18, 2024
4a0710c
end to end testing for metrics done
Jul 18, 2024
07c0720
end to end testing of logs done
Jul 18, 2024
e473041
did some clean up
Jul 18, 2024
30d4f7c
back merge with integration
droid-mohit Jul 19, 2024
c147b3d
back merge with remote branch
droid-mohit Jul 19, 2024
adb86fe
removes unwanted files
droid-mohit Jul 19, 2024
7a9b524
updates impl for gcm source
droid-mohit Jul 19, 2024
ffb0cf4
updates gcm logs contract
droid-mohit Jul 19, 2024
25650d7
adds gcm connector
droid-mohit Jul 19, 2024
ced69bc
back merge with integration
droid-mohit Jul 19, 2024
d10629a
updates form for gcm logs
droid-mohit Jul 19, 2024
5f2387c
Merge branch 'integration' of github.com:DrDroidLab/PlayBooks into ne…
droid-mohit Jul 19, 2024
5a66c2c
adds optional fields
droid-mohit Jul 19, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 55 additions & 0 deletions connectors/assets/extractor/gcm_metadata_extractor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import json
import logging
from connectors.assets.extractor.metadata_extractor import SourceMetadataExtractor
from executor.source_processors.gcm_api_processor import GcmApiProcessor
from protos.base_pb2 import Source, SourceModelType

logger = logging.getLogger(__name__)


class GcmSourceMetadataExtractor(SourceMetadataExtractor):

def __init__(self, project_id, service_account_json, account_id=None, connector_id=None):
self.__project_id = project_id
self.__service_account_json = service_account_json
super().__init__(account_id, connector_id, Source.GCM)

def extract_metric_descriptors(self, save_to_db=False):
model_type = SourceModelType.GCM_METRIC
model_data = {}
gcm_api_processor = GcmApiProcessor(self.__project_id, self.__service_account_json)
Karansirohi2202 marked this conversation as resolved.
Show resolved Hide resolved

try:
all_metric_descriptors = gcm_api_processor.fetch_metrics_list()
for descriptor in all_metric_descriptors:
metric_type = descriptor['type']
description = descriptor.get('description', '')
labels = descriptor.get('labels', [])

model_data[metric_type] = {
'description': description,
'labels': labels
}

if save_to_db:
self.create_or_update_model_metadata(model_type, metric_type, model_data[metric_type])
except Exception as e:
logger.error(f'Error extracting metric descriptors: {e}')

return model_data

def extract_log_sinks(self, save_to_db=False):
model_type = SourceModelType.GCM_LOG_SINK
model_data = {}
gcm_api_processor = GcmApiProcessor(self.__project_id, self.__service_account_json)
Karansirohi2202 marked this conversation as resolved.
Show resolved Hide resolved

try:
all_log_sinks = gcm_api_processor.fetch_log_sinks()
model_data[self.__project_id] = {'log_sinks': all_log_sinks}

if save_to_db:
self.create_or_update_model_metadata(model_type, self.__project_id, model_data[self.__project_id])
except Exception as e:
logger.error(f'Error extracting log sinks: {e}')

return model_data
2 changes: 2 additions & 0 deletions connectors/assets/extractor/metadata_extractor_facade.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from connectors.assets.extractor.datadog_metadata_extractor import DatadogSourceMetadataExtractor
from connectors.assets.extractor.eks_metadata_extractor import EksSourceMetadataExtractor
from connectors.assets.extractor.elastic_search_metadata_extractor import ElasticSearchSourceMetadataExtractor
from connectors.assets.extractor.gcm_metadata_extractor import GcmSourceMetadataExtractor
from connectors.assets.extractor.gke_metadata_extractor import GkeSourceMetadataExtractor
from connectors.assets.extractor.grafana_metadata_extractor import GrafanaSourceMetadataExtractor
from connectors.assets.extractor.grafana_vpc_metadata_extractor import GrafanaVpcSourceMetadataExtractor
Expand Down Expand Up @@ -40,3 +41,4 @@ def get_connector_metadata_extractor_class(self, connector_type: Source):
source_metadata_extractor_facade.register(Source.AZURE, AzureConnectorMetadataExtractor)
source_metadata_extractor_facade.register(Source.GKE, GkeSourceMetadataExtractor)
source_metadata_extractor_facade.register(Source.ELASTIC_SEARCH, ElasticSearchSourceMetadataExtractor)
source_metadata_extractor_facade.register(Source.GCM, GcmSourceMetadataExtractor)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

remove this from here for now

2 changes: 2 additions & 0 deletions connectors/assets/manager/asset_manager_facade.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from connectors.assets.manager.dd_asset_manager import DatadogAssetManager
from connectors.assets.manager.eks_assets_manager import EKSAssetManager
from connectors.assets.manager.es_asset_manager import ElasticSearchAssetManager
from connectors.assets.manager.gcm_assets_manager import GcmAssetManager
from connectors.assets.manager.gke_asset_manager import GkeAssetManager
from connectors.assets.manager.grafana_asset_manager import GrafanaAssetManager
from connectors.assets.manager.mimir_assets_manager import MimirAssetManager
Expand Down Expand Up @@ -66,3 +67,4 @@ def get_asset_model_values(self, connector: ConnectorProto, model_type: SourceMo
asset_manager_facade.register(Source.AZURE, AzureAssetManager())
asset_manager_facade.register(Source.GKE, GkeAssetManager())
asset_manager_facade.register(Source.ELASTIC_SEARCH, ElasticSearchAssetManager())
asset_manager_facade.register(Source.GCM, GcmAssetManager())
116 changes: 116 additions & 0 deletions connectors/assets/manager/gcm_assets_manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
from datetime import timezone
from google.protobuf.wrappers_pb2 import UInt64Value, StringValue

from connectors.assets.manager.asset_manager import ConnectorAssetManager
from protos.connectors.assets.asset_pb2 import \
AccountConnectorAssetsModelFilters as AccountConnectorAssetsModelFiltersProto, AccountConnectorAssets, \
ConnectorModelTypeOptions
from protos.connectors.assets.gcm_asset_pb2 import GcmLogSinkAssetOptions, GcmMetricAssetOptions, \
GcmAssets, GcmMetricAssetModel as GcmMetricAssetProto, \
GcmAssetModel as GcmAssetModelProto, GcmLogSinkAssetModel as GcmLogSinkAssetModelProto
from protos.base_pb2 import Source, SourceModelType
from protos.connectors.connector_pb2 import Connector as ConnectorProto


class GcmAssetManager(ConnectorAssetManager):
def __init__(self):
self.source = Source.GCM
self.asset_type_callable_map = {
SourceModelType.GCM_LOG_SINK: {
'options': self.get_gcm_log_sink_options,
'values': self.get_gcm_log_sink_values,
},
SourceModelType.GCM_METRIC: {
'options': self.get_gcm_metric_options,
'values': self.get_gcm_metric_values,
}
}

@staticmethod
def get_gcm_log_sink_options(gcm_log_sink_assets):
all_project_ids = []
for asset in gcm_log_sink_assets:
all_project_ids.append(asset.model_uid)
options = GcmLogSinkAssetOptions(project_ids=all_project_ids)
return ConnectorModelTypeOptions(model_type=SourceModelType.GCM_LOG_SINK,
gcm_log_sink_model_options=options)

@staticmethod
def get_gcm_log_sink_values(connector: ConnectorProto, filters: AccountConnectorAssetsModelFiltersProto,
gcm_log_sink_assets):

which_one_of = filters.WhichOneof('filters')
if which_one_of and which_one_of != 'gcm_log_sink_model_filters':
raise ValueError(f"Invalid filter: {which_one_of}")

options: GcmLogSinkAssetOptions = filters.gcm_log_sink_model_filters
if options.project_ids:
gcm_log_sink_assets = gcm_log_sink_assets.filter(model_uid__in=options.project_ids)

gcm_log_sink_protos = []
for asset in gcm_log_sink_assets:
gcm_log_sink_protos.append(GcmAssetModelProto(
id=UInt64Value(value=asset.id), connector_type=asset.connector_type,
type=asset.model_type,
last_updated=int(asset.updated_at.replace(tzinfo=timezone.utc).timestamp()) if (
asset.updated_at) else None,
gcm_log_sink=GcmLogSinkAssetModelProto(
project_id=StringValue(value=asset.model_uid),
log_sinks=asset.metadata.get('log_sinks', []))
))

return AccountConnectorAssets(gcm=GcmAssets(assets=gcm_log_sink_protos))

@staticmethod
def get_gcm_metric_options(gcm_metric_assets):
all_namespaces = []
for asset in gcm_metric_assets:
all_namespaces.append(asset.model_uid)
options = GcmMetricAssetOptions(namespaces=all_namespaces)
return ConnectorModelTypeOptions(model_type=SourceModelType.GCM_METRIC,
gcm_metric_model_options=options)

@staticmethod
def get_gcm_metric_values(connector: ConnectorProto, filters: AccountConnectorAssetsModelFiltersProto,
gcm_metric_assets):
which_one_of = filters.WhichOneof('filters')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is change expected in GCM Metric asset. Update this function accordingly.

if which_one_of and which_one_of != 'gcm_metric_model_filters':
raise ValueError(f"Invalid filter: {which_one_of}")

options: GcmMetricAssetOptions = filters.gcm_metric_model_filters
if options.namespaces:
gcm_metric_assets = gcm_metric_assets.filter(model_uid__in=options.namespaces)

gcm_metric_asset_protos = []
for asset in gcm_metric_assets:
all_metrics = []
all_label_value_metric_map = {}
for metric_type, labels in asset.metadata.items():
for label in labels:
label_name = label['key']
label_values = label['values']
all_label_value_metric_dict = all_label_value_metric_map.get(label_name, {})
all_label_value_metric_dict_values = all_label_value_metric_dict.get('values', [])
all_label_value_metric_dict_values.extend(label_values)
all_label_value_metric_dict['values'] = list(set(all_label_value_metric_dict_values))

all_label_value_metric_dict_metrics = all_label_value_metric_dict.get('metrics', [])
all_label_value_metric_dict_metrics.append(metric_type)
all_label_value_metric_dict['metrics'] = list(set(all_label_value_metric_dict_metrics))

all_label_value_metric_map[label_name] = all_label_value_metric_dict

for label_name, label_values_metrics in all_label_value_metric_map.items():
all_metrics.append(GcmMetricAssetProto.MetricLabel(name=StringValue(value=label_name),
values=label_values_metrics['values'],
metrics=label_values_metrics['metrics']))
gcm_metric_proto = GcmMetricAssetProto(namespace=StringValue(value=asset.model_uid),
label_value_metric_map=all_metrics)
gcm_metric_asset_protos.append(GcmAssetModelProto(
id=UInt64Value(value=asset.id), connector_type=asset.connector_type,
type=asset.model_type,
last_updated=int(asset.updated_at.replace(tzinfo=timezone.utc).timestamp()) if (
asset.updated_at) else None,
gcm_metric=gcm_metric_proto))

return AccountConnectorAssets(gcm=GcmAssets(assets=gcm_metric_asset_protos))
23 changes: 23 additions & 0 deletions connectors/migrations/0022_alter_connectorkey_key_type_and_more.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Generated by Django 4.1.13 on 2024-07-10 07:39

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('connectors', '0021_alter_connectorkey_key_type'),
]

operations = [
migrations.AlterField(
model_name='connectorkey',
name='key_type',
field=models.IntegerField(blank=True, choices=[(0, 'UNKNOWN_SKT'), (1, 'SENTRY_API_KEY'), (6, 'SENTRY_ORG_SLUG'), (2, 'DATADOG_APP_KEY'), (3, 'DATADOG_API_KEY'), (15, 'DATADOG_AUTH_TOKEN'), (18, 'DATADOG_API_DOMAIN'), (4, 'NEWRELIC_API_KEY'), (5, 'NEWRELIC_APP_ID'), (7, 'NEWRELIC_QUERY_KEY'), (19, 'NEWRELIC_API_DOMAIN'), (8, 'SLACK_BOT_AUTH_TOKEN'), (9, 'SLACK_CHANNEL_ID'), (46, 'SLACK_APP_ID'), (10, 'HONEYBADGER_USERNAME'), (11, 'HONEYBADGER_PASSWORD'), (12, 'HONEYBADGER_PROJECT_ID'), (13, 'AWS_ACCESS_KEY'), (14, 'AWS_SECRET_KEY'), (20, 'AWS_REGION'), (23, 'AWS_ASSUMED_ROLE_ARN'), (40, 'EKS_ROLE_ARN'), (16, 'GOOGLE_CHAT_BOT_OAUTH_TOKEN'), (17, 'GOOGLE_CHAT_BOT_SPACES'), (21, 'GRAFANA_HOST'), (22, 'GRAFANA_API_KEY'), (24, 'CLICKHOUSE_INTERFACE'), (25, 'CLICKHOUSE_HOST'), (26, 'CLICKHOUSE_PORT'), (27, 'CLICKHOUSE_USER'), (28, 'CLICKHOUSE_PASSWORD'), (29, 'GCM_PROJECT_ID'), (30, 'GCM_SERVICE_ACCOUNT_JSON'), (31, 'GCM_CLIENT_EMAIL'), (32, 'GCM_TOKEN_URI'), (33, 'POSTGRES_HOST'), (34, 'POSTGRES_USER'), (35, 'POSTGRES_PASSWORD'), (36, 'POSTGRES_PORT'), (37, 'POSTGRES_DATABASE'), (38, 'POSTGRES_OPTIONS'), (39, 'SQL_DATABASE_CONNECTION_STRING_URI'), (41, 'PAGER_DUTY_API_KEY'), (63, 'PAGER_DUTY_CONFIGURED_EMAIL'), (42, 'OPS_GENIE_API_KEY'), (43, 'AGENT_PROXY_HOST'), (44, 'AGENT_PROXY_API_KEY'), (45, 'GITHUB_ACTIONS_TOKEN'), (47, 'OPEN_AI_API_KEY'), (49, 'REMOTE_SERVER_PEM'), (50, 'REMOTE_SERVER_USER'), (51, 'REMOTE_SERVER_HOST'), (52, 'REMOTE_SERVER_PASSWORD'), (53, 'MIMIR_HOST'), (54, 'X_SCOPE_ORG_ID'), (55, 'SSL_VERIFY'), (56, 'AZURE_SUBSCRIPTION_ID'), (57, 'AZURE_TENANT_ID'), (58, 'AZURE_CLIENT_ID'), (59, 'AZURE_CLIENT_SECRET'), (60, 'GKE_PROJECT_ID'), (61, 'GKE_SERVICE_ACCOUNT_JSON'), (62, 'MS_TEAMS_CONNECTOR_WEBHOOK_URL'), (64, 'ELASTIC_SEARCH_PROTOCOL'), (65, 'ELASTIC_SEARCH_HOST'), (66, 'ELASTIC_SEARCH_PORT'), (67, 'ELASTIC_SEARCH_API_KEY_ID'), (68, 'ELASTIC_SEARCH_API_KEY'), (69, 'GRAFANA_LOKI_HOST'), (70, 'GRAFANA_LOKI_API_KEY')], default=0, null=True),
),
migrations.AlterField(
model_name='connectormetadatamodelstore',
name='model_type',
field=models.IntegerField(choices=[(0, 'UNKNOWN_MT'), (1, 'NEW_RELIC_POLICY'), (2, 'NEW_RELIC_CONDITION'), (3, 'NEW_RELIC_ENTITY'), (4, 'NEW_RELIC_ENTITY_DASHBOARD'), (5, 'NEW_RELIC_ENTITY_APPLICATION'), (6, 'NEW_RELIC_NRQL'), (101, 'DATADOG_MONITOR'), (102, 'DATADOG_DASHBOARD'), (103, 'DATADOG_LIVE_INTEGRATION_AWS'), (104, 'DATADOG_LIVE_INTEGRATION_AWS_LOG'), (105, 'DATADOG_LIVE_INTEGRATION_AZURE'), (106, 'DATADOG_LIVE_INTEGRATION_CLOUDFLARE'), (107, 'DATADOG_LIVE_INTEGRATION_FASTLY'), (108, 'DATADOG_LIVE_INTEGRATION_GCP'), (109, 'DATADOG_LIVE_INTEGRATION_CONFLUENT'), (110, 'DATADOG_SERVICE'), (111, 'DATADOG_METRIC'), (112, 'DATADOG_QUERY'), (201, 'CLOUDWATCH_METRIC'), (202, 'CLOUDWATCH_LOG_GROUP'), (301, 'GRAFANA_DATASOURCE'), (302, 'GRAFANA_DASHBOARD'), (303, 'GRAFANA_TARGET_METRIC_PROMQL'), (304, 'GRAFANA_PROMETHEUS_DATASOURCE'), (401, 'CLICKHOUSE_DATABASE'), (501, 'SLACK_CHANNEL'), (601, 'MARKDOWN'), (602, 'IFRAME'), (701, 'POSTGRES_QUERY'), (801, 'EKS_CLUSTER'), (901, 'SQL_DATABASE_CONNECTION_RAW_QUERY'), (1001, 'AZURE_WORKSPACE'), (1100, 'SSH_SERVER'), (1201, 'GRAFANA_MIMIR_PROMQL'), (1301, 'GKE_CLUSTER'), (1401, 'MS_TEAMS_CHANNEL'), (1501, 'PAGERDUTY_INCIDENT'), (1601, 'ELASTIC_SEARCH_INDEX'), (1701, 'GCM_METRIC'), (1702, 'GCM_LOG_SINK')], db_index=True, default=0),
),
]
10 changes: 8 additions & 2 deletions connectors/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,12 @@
SourceKeyType.X_SCOPE_ORG_ID
]
],
Source.GCM: [
[
SourceKeyType.GCM_PROJECT_ID,
SourceKeyType.GCM_SERVICE_ACCOUNT_JSON,
]
]
}

integrations_connector_key_display_name_map = {
Expand All @@ -281,7 +287,7 @@
SourceKeyType.AWS_SECRET_KEY: 'AWS Secret Key',
SourceKeyType.AWS_REGION: 'AWS Region',
SourceKeyType.GCM_PROJECT_ID: 'Project ID',
SourceKeyType.GCM_PRIVATE_KEY: 'Private Key',
SourceKeyType.GCM_SERVICE_ACCOUNT_JSON: 'Service Account Json',
SourceKeyType.GCM_CLIENT_EMAIL: 'Client Email',
SourceKeyType.GCM_TOKEN_URI: 'Token URI',
SourceKeyType.CLICKHOUSE_INTERFACE: 'Interface',
Expand Down Expand Up @@ -443,7 +449,7 @@ def proto(self):
SourceKeyType.AGENT_PROXY_HOST,
SourceKeyType.AWS_ASSUMED_ROLE_ARN,
SourceKeyType.CLICKHOUSE_USER, SourceKeyType.CLICKHOUSE_PASSWORD,
SourceKeyType.GCM_PROJECT_ID, SourceKeyType.GCM_PRIVATE_KEY,
SourceKeyType.GCM_PROJECT_ID, SourceKeyType.GCM_SERVICE_ACCOUNT_JSON,
SourceKeyType.GCM_CLIENT_EMAIL, SourceKeyType.PAGER_DUTY_API_KEY,
SourceKeyType.POSTGRES_PASSWORD, SourceKeyType.POSTGRES_USER,
SourceKeyType.OPS_GENIE_API_KEY,
Expand Down
10 changes: 9 additions & 1 deletion connectors/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from executor.source_processors.slack_api_processor import SlackApiProcessor
from executor.source_processors.vpc_api_processor import VpcApiProcessor
from executor.source_processors.ms_teams_api_processor import MSTeamsApiProcessor
from executor.source_processors.gcm_api_processor import GcmApiProcessor
from management.crud.task_crud import get_or_create_task, check_scheduled_or_running_task_run_for_task
from management.models import TaskRun, PeriodicTaskStatus
from protos.base_pb2 import SourceKeyType, Source
Expand All @@ -50,7 +51,8 @@
Source.MS_TEAMS: MSTeamsApiProcessor,
Source.PAGER_DUTY: PdApiProcessor,
Source.ELASTIC_SEARCH: ElasticSearchApiProcessor,
Source.GRAFANA_LOKI: GrafanaLokiApiProcessor
Source.GRAFANA_LOKI: GrafanaLokiApiProcessor,
Source.GCM: GcmApiProcessor
}


Expand Down Expand Up @@ -250,6 +252,12 @@ def generate_credentials_dict(connector_type, connector_keys):
credentials_dict['ssl_verify'] = 'true'
if conn_key.key.value.lower() == 'false':
credentials_dict['ssl_verify'] = 'false'
elif connector_type == Source.GCM:
for conn_key in connector_keys:
if conn_key.key_type == SourceKeyType.GCM_PROJECT_ID:
credentials_dict['project_id'] = conn_key.key.value
elif conn_key.key_type == SourceKeyType.GCM_SERVICE_ACCOUNT_JSON:
credentials_dict['service_account_json'] = conn_key.key.value
else:
return None
return credentials_dict
Expand Down
2 changes: 2 additions & 0 deletions executor/playbook_source_facade.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from executor.source_task_executors.bash_task_executor import BashSourceManager
from executor.source_task_executors.documentation_task_executor import DocumentationSourceManager
from executor.source_task_executors.elastic_search_task_executor import ElasticSearchSourceManager
from executor.source_task_executors.gcm_task_executor import GcmSourceManager
from executor.source_task_executors.gke_task_executor import GkeSourceManager
from executor.source_task_executors.grafana_loki_task_executor import GrafanaLokiSourceManager
from executor.source_task_executors.grafana_task_executor import GrafanaSourceManager
Expand Down Expand Up @@ -107,6 +108,7 @@ def execute_task(self, account_id, time_range, global_variable_set: Dict, task:
playbook_source_facade.register(Source.GRAFANA_MIMIR, MimirSourceManager())
playbook_source_facade.register(Source.AZURE, AzureSourceManager())
playbook_source_facade.register(Source.GKE, GkeSourceManager())
playbook_source_facade.register(Source.GCM, GcmSourceManager())
playbook_source_facade.register(Source.GRAFANA_LOKI, GrafanaLokiSourceManager())

playbook_source_facade.register(Source.POSTGRES, PostgresSourceManager())
Expand Down
Loading