Compare commits

5 Commits

Author SHA1 Message Date
carnal0wnage
05276732f1 update ec2 lib to do an userdata instance attribute check 2018-11-29 19:38:58 -05:00
carnal0wnage
5f27bcdfe6 gcp 2018-09-26 16:54:49 -04:00
carnal0wnage
2779af7787 iam checks + storage checks 2018-09-26 15:58:15 -04:00
carnal0wnage
b49df03312 basic gcp iam check 2018-09-26 14:39:20 -04:00
carnal0wnage
9a961bd48c gcp stuff 2018-09-26 14:11:30 -04:00
8 changed files with 281 additions and 17 deletions

View File

@@ -354,6 +354,56 @@ def get_instance_volume_details():
print("CTRL-C received, exiting...")
def get_instance_userdata():
'''
show volumes sorted by instanceId ex: instanceID-->multiple volumes less detail than get_instance_volume_details2
'''
try:
for region in regions:
try:
client = boto3.client('ec2', region_name=region)
instances = client.describe_instances()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'UnauthorizedOperation':
print('{} : (UnauthorizedOperation) when calling the DescribeInstances in ({}) -- sure you have ec2 permissions?' .format(AWS_ACCESS_KEY_ID, region))
continue
elif e.response['Error']['Code'] == 'AuthFailure':
print('{} : (AuthFailure) when calling the DescribeInstances in ({}) -- key is invalid or no permissions.' .format(AWS_ACCESS_KEY_ID, region))
continue
elif e.response['Error']['Code'] == 'OptInRequired':
print('{} : (OptInRequired) Has permissions but isnt signed up for service in ({})- ' .format(AWS_ACCESS_KEY_ID, region))
continue
else:
print(e)
continue
if len(instances['Reservations']) <= 0:
print("[-] List instances allowed for {} but no results [-]" .format(region))
else:
for r in instances['Reservations']:
for i in r['Instances']:
try:
userData = client.describe_instance_attribute(InstanceId=i['InstanceId'], Attribute='userData')
print("Instance ID: {} \n" .format(i['InstanceId']))
if len(userData['UserData']['Value']) >= 0:
print("Decoded Userdata values:")
pp.pprint(base64.b64decode(userData['UserData']['Value']).decode("utf-8"))
print("\n")
else:
print("no Userdata for: {}\n".format(i['InstanceId']))
except KeyError:
next
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'UnauthorizedOperation':
print('{} : (UnauthorizedOperation) when calling the DescribeVolumes -- sure you have required ec2 permissions?' .format(AWS_ACCESS_KEY_ID))
elif e.response['Error']['Code'] == 'SubscriptionRequiredException':
print('{} : Has permissions but isnt signed up for service - usually means you have a root account' .format(AWS_ACCESS_KEY_ID))
else:
print(e)
except KeyboardInterrupt:
print("CTRL-C received, exiting...")
def get_instance_volume_details2():
'''
show volumes by instanceId but instanceID->volume1 of ID, instanceID->volume2 of ID but more details.

28
libs/gcp/gcp_bigquery.py Normal file
View File

@@ -0,0 +1,28 @@
'''
GCP BigQuery functions for WeirdAAL
'''
import google.auth
import googleapiclient.discovery
import os
import sys
from google.oauth2 import service_account
from googleapiclient.errors import HttpError
from google.cloud import bigquery, exceptions
from google.cloud.exceptions import *
def gcp_bigquery_list_datasets(project_id, credentials):
bigquery_client = bigquery.Client(project=credentials.project_id)
datasets = list(bigquery_client.list_datasets())
project = bigquery_client.project
if datasets:
print('Datasets in project {}:'.format(project))
for dataset in datasets: # API request(s)
print('\t{}'.format(dataset.dataset_id))
else:
print('{} project does not contain any datasets.'.format(project))

43
libs/gcp/gcp_iam.py Normal file
View File

@@ -0,0 +1,43 @@
'''
GCP IAM functions for WeirdAAL
'''
import google.auth
import googleapiclient.discovery
import os
import sys
from google.oauth2 import service_account
from googleapiclient.errors import HttpError
# [START iam_list_keys]
def gcp_iam_list_keys(service_account_email, service):
"""Lists all keys for a service account."""
# pylint: disable=no-member
keys = service.projects().serviceAccounts().keys().list(
name='projects/-/serviceAccounts/' + service_account_email).execute()
for key in keys['keys']:
print('Key: ' + key['name'])
# [END iam_list_keys]
# [START iam_list_service_accounts]
def gcp_iam_list_service_accounts(project_id, service):
"""Lists all service accounts for the current project."""
# pylint: disable=no-member
service_accounts = service.projects().serviceAccounts().list(
name='projects/' + project_id).execute()
for account in service_accounts['accounts']:
print('Name: ' + account['name'])
print('Email: ' + account['email'])
print(' ')
return service_accounts
# [END iam_list_service_accounts]

39
libs/gcp/gcp_storage.py Normal file
View File

@@ -0,0 +1,39 @@
'''
GCP Storage functions for WeirdAAL
'''
import google.auth
import googleapiclient.discovery
import os
import sys
from google.oauth2 import service_account
from googleapiclient.errors import HttpError
from google.cloud import storage, exceptions
from google.cloud.exceptions import *
def gcp_storage_list_buckets(credentials):
list_of_buckets = []
'''list Google storage buckets for account'''
storage_client = storage.Client()
buckets = storage_client.list_buckets()
for buck in buckets:
print(buck.name)
list_of_buckets.append(buck.name)
return list_of_buckets
def gcp_storage_list_blobs(credentials, bucket_name):
'''Lists all the blobs in the bucket.'''
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
blobs = bucket.list_blobs()
for blob in blobs:
print('\t{}'.format(blob.name))
print('\n')

View File

@@ -40,6 +40,14 @@ def module_ec2_get_instance_volume_details():
get_instance_volume_details()
def module_ec2_get_instance_userdata():
'''
Show userData sorted by instanceId
python3 weirdAAL.py -m ec2_get_instance_userdata -t demo
'''
get_instance_userdata()
def module_ec2_get_instance_volume_details2():
'''
Show volumes by instanceId but instanceID->volume1 of ID, instanceID->volume2 of ID but more details.

89
modules/gcp/gcp_recon.py Normal file
View File

@@ -0,0 +1,89 @@
'''
This module handles the core GCP recon functionality by asking all the services
that have functions that done have arguments if we can access them :-)
'''
from libs.gcp.gcp_iam import *
from libs.gcp.gcp_storage import *
from libs.gcp.gcp_bigquery import *
credentials = service_account.Credentials.from_service_account_file(
filename=os.environ['GOOGLE_APPLICATION_CREDENTIALS'],
scopes=['https://www.googleapis.com/auth/cloud-platform'])
service = googleapiclient.discovery.build(
'iam', 'v1', credentials=credentials)
def module_gcp_recon_all():
'''
Main gcp_recon_all module - attempt to connect to each of the services to see if we have some privs
python3 weirdAAL.py -m gcp_recon_all -t demo
'''
try:
print("GCP IAM List Keys check")
# print(credentials)
gcp_iam_list_keys(credentials.service_account_email, service)
except HttpError as e:
# print(e)
if e.resp.status in [403, 500, 503]:
print("\tGCP IAM access denied for {}\n".format(credentials.service_account_email))
else:
print('{}\n'.format(e))
except google.auth.exceptions.RefreshError as f:
print(f)
print("Service key is invalid exiting")
sys.exit()
try:
print("GCP IAM list service accounts for the current project: {}.".format(credentials.project_id))
# print(credentials)
gcp_iam_list_service_accounts(credentials.project_id, service)
except HttpError as e:
# print(e)
if e.resp.status in [403, 500, 503]:
print("\tIAM access denied for {}\n".format(credentials.service_account_email))
else:
print('{}\n'.format(e))
except google.auth.exceptions.RefreshError as f:
print(f)
print("Service key is invalid exiting")
sys.exit()
'''
Storage bucket access checks
'''
try:
print("GCP Storage check")
buckets = gcp_storage_list_buckets(credentials)
if buckets:
print("\nAttempting to list bucket contents:")
for a in buckets:
print('Bucket: {}'.format(a))
gcp_storage_list_blobs(credentials, a)
except googleapiclient.errors.HttpError as e:
print('{}\n'.format(e))
except exceptions.Forbidden as e:
print("\t Forbidden")
print('{}\n'.format(e))
except exceptions.PermissionDenied as e:
print("\t PermissionDenied")
except google.auth.exceptions.RefreshError as f:
print(f)
'''
BigQuery access checks
'''
try:
print("GCP BigQuery check")
gcp_bigquery_list_datasets(credentials.project_id, credentials)
except googleapiclient.errors.HttpError as e:
print('{}\n'.format(e))
except exceptions.Forbidden as e:
print("\t Forbidden")
print('{}\n'.format(e))
except exceptions.PermissionDenied as e:
print("\t PermissionDenied")
except google.auth.exceptions.RefreshError as f:
print(f)

View File

@@ -7,3 +7,8 @@ s3transfer==0.1.11
six==1.11.0
virtualenv==15.1.0
tabulate==0.8.2
google-api-python-client==1.7.4
google.cloud==0.34.0
google-cloud-storage==1.12.0
google-cloud-bigquery==1.5.1

View File

@@ -17,8 +17,20 @@ import re
from tabulate import tabulate
import textwrap
# not pythonic but keeping google imports separate for now
import google.auth
import googleapiclient.discovery
from google.oauth2 import service_account
from googleapiclient.errors import HttpError
from google.cloud import storage, exceptions
from google.cloud.exceptions import *
os.environ['AWS_SHARED_CREDENTIALS_FILE'] = '.env'
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = 'gcp_keys/34.json'
# If you want to use a transparent + supports SSL proxy you can put it here
# os.environ['HTTPS_PROXY'] = 'https://127.0.0.1:3128'
@@ -42,17 +54,8 @@ builtins.db_name = "weirdAAL.db"
def perform_credential_check():
'''
Depending on the module, we determine which type of
credential check we perform.
'''
pass
def aws_cred_check():
'''
Check that the AWS keys work before we go any further.
It picks the keys up from the local .env file
We are letting boto3 do all the work that way we can
handle session tokens natively
Check that the AWS keys work before we go any further. It picks the keys up from the local .env file
We are letting boto3 do all the work that way we can handle session tokens natively
'''
try:
@@ -64,9 +67,9 @@ def aws_cred_check():
except ClientError as e:
print("The AWS Access Keys are not valid/active")
sys.exit(1)
def gcp_cred_check():
pass
# excepetion to catch the lack of aws cred here - temp fix
except Exception as e:
print('\t -')
def method_create():
try:
@@ -122,7 +125,6 @@ def make_tabulate_rows(hash, cloud_provider):
for item in hash[key]:
for (k,v) in item.items():
normalized_comment = normalize_comments(v)
k = re.sub("module_", "", k)
entire_contents.append([cloud_provider, key, k, normalized_comment])
return entire_contents