3 Commits

Author SHA1 Message Date
cktricky
de7acc8acb in the process of trying to do both an aws and gcp module check 2018-09-28 11:35:56 -07:00
Ken Johnson
dcc62019bc Merge pull request #66 from carnal0wnage/list_modules_change
Normalized module names when listing
2018-09-28 12:40:36 -04:00
cktricky
104118b280 just changed the printout so that it looks like how you would call it from the command line 2018-09-28 09:38:30 -07:00
8 changed files with 17 additions and 281 deletions

View File

@@ -354,56 +354,6 @@ def get_instance_volume_details():
print("CTRL-C received, exiting...") print("CTRL-C received, exiting...")
def get_instance_userdata():
'''
show volumes sorted by instanceId ex: instanceID-->multiple volumes less detail than get_instance_volume_details2
'''
try:
for region in regions:
try:
client = boto3.client('ec2', region_name=region)
instances = client.describe_instances()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'UnauthorizedOperation':
print('{} : (UnauthorizedOperation) when calling the DescribeInstances in ({}) -- sure you have ec2 permissions?' .format(AWS_ACCESS_KEY_ID, region))
continue
elif e.response['Error']['Code'] == 'AuthFailure':
print('{} : (AuthFailure) when calling the DescribeInstances in ({}) -- key is invalid or no permissions.' .format(AWS_ACCESS_KEY_ID, region))
continue
elif e.response['Error']['Code'] == 'OptInRequired':
print('{} : (OptInRequired) Has permissions but isnt signed up for service in ({})- ' .format(AWS_ACCESS_KEY_ID, region))
continue
else:
print(e)
continue
if len(instances['Reservations']) <= 0:
print("[-] List instances allowed for {} but no results [-]" .format(region))
else:
for r in instances['Reservations']:
for i in r['Instances']:
try:
userData = client.describe_instance_attribute(InstanceId=i['InstanceId'], Attribute='userData')
print("Instance ID: {} \n" .format(i['InstanceId']))
if len(userData['UserData']['Value']) >= 0:
print("Decoded Userdata values:")
pp.pprint(base64.b64decode(userData['UserData']['Value']).decode("utf-8"))
print("\n")
else:
print("no Userdata for: {}\n".format(i['InstanceId']))
except KeyError:
next
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'UnauthorizedOperation':
print('{} : (UnauthorizedOperation) when calling the DescribeVolumes -- sure you have required ec2 permissions?' .format(AWS_ACCESS_KEY_ID))
elif e.response['Error']['Code'] == 'SubscriptionRequiredException':
print('{} : Has permissions but isnt signed up for service - usually means you have a root account' .format(AWS_ACCESS_KEY_ID))
else:
print(e)
except KeyboardInterrupt:
print("CTRL-C received, exiting...")
def get_instance_volume_details2(): def get_instance_volume_details2():
''' '''
show volumes by instanceId but instanceID->volume1 of ID, instanceID->volume2 of ID but more details. show volumes by instanceId but instanceID->volume1 of ID, instanceID->volume2 of ID but more details.

View File

@@ -1,28 +0,0 @@
'''
GCP BigQuery functions for WeirdAAL
'''
import google.auth
import googleapiclient.discovery
import os
import sys
from google.oauth2 import service_account
from googleapiclient.errors import HttpError
from google.cloud import bigquery, exceptions
from google.cloud.exceptions import *
def gcp_bigquery_list_datasets(project_id, credentials):
bigquery_client = bigquery.Client(project=credentials.project_id)
datasets = list(bigquery_client.list_datasets())
project = bigquery_client.project
if datasets:
print('Datasets in project {}:'.format(project))
for dataset in datasets: # API request(s)
print('\t{}'.format(dataset.dataset_id))
else:
print('{} project does not contain any datasets.'.format(project))

View File

@@ -1,43 +0,0 @@
'''
GCP IAM functions for WeirdAAL
'''
import google.auth
import googleapiclient.discovery
import os
import sys
from google.oauth2 import service_account
from googleapiclient.errors import HttpError
# [START iam_list_keys]
def gcp_iam_list_keys(service_account_email, service):
"""Lists all keys for a service account."""
# pylint: disable=no-member
keys = service.projects().serviceAccounts().keys().list(
name='projects/-/serviceAccounts/' + service_account_email).execute()
for key in keys['keys']:
print('Key: ' + key['name'])
# [END iam_list_keys]
# [START iam_list_service_accounts]
def gcp_iam_list_service_accounts(project_id, service):
"""Lists all service accounts for the current project."""
# pylint: disable=no-member
service_accounts = service.projects().serviceAccounts().list(
name='projects/' + project_id).execute()
for account in service_accounts['accounts']:
print('Name: ' + account['name'])
print('Email: ' + account['email'])
print(' ')
return service_accounts
# [END iam_list_service_accounts]

View File

@@ -1,39 +0,0 @@
'''
GCP Storage functions for WeirdAAL
'''
import google.auth
import googleapiclient.discovery
import os
import sys
from google.oauth2 import service_account
from googleapiclient.errors import HttpError
from google.cloud import storage, exceptions
from google.cloud.exceptions import *
def gcp_storage_list_buckets(credentials):
list_of_buckets = []
'''list Google storage buckets for account'''
storage_client = storage.Client()
buckets = storage_client.list_buckets()
for buck in buckets:
print(buck.name)
list_of_buckets.append(buck.name)
return list_of_buckets
def gcp_storage_list_blobs(credentials, bucket_name):
'''Lists all the blobs in the bucket.'''
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
blobs = bucket.list_blobs()
for blob in blobs:
print('\t{}'.format(blob.name))
print('\n')

View File

@@ -40,14 +40,6 @@ def module_ec2_get_instance_volume_details():
get_instance_volume_details() get_instance_volume_details()
def module_ec2_get_instance_userdata():
'''
Show userData sorted by instanceId
python3 weirdAAL.py -m ec2_get_instance_userdata -t demo
'''
get_instance_userdata()
def module_ec2_get_instance_volume_details2(): def module_ec2_get_instance_volume_details2():
''' '''
Show volumes by instanceId but instanceID->volume1 of ID, instanceID->volume2 of ID but more details. Show volumes by instanceId but instanceID->volume1 of ID, instanceID->volume2 of ID but more details.

View File

@@ -1,89 +0,0 @@
'''
This module handles the core GCP recon functionality by asking all the services
that have functions that done have arguments if we can access them :-)
'''
from libs.gcp.gcp_iam import *
from libs.gcp.gcp_storage import *
from libs.gcp.gcp_bigquery import *
credentials = service_account.Credentials.from_service_account_file(
filename=os.environ['GOOGLE_APPLICATION_CREDENTIALS'],
scopes=['https://www.googleapis.com/auth/cloud-platform'])
service = googleapiclient.discovery.build(
'iam', 'v1', credentials=credentials)
def module_gcp_recon_all():
'''
Main gcp_recon_all module - attempt to connect to each of the services to see if we have some privs
python3 weirdAAL.py -m gcp_recon_all -t demo
'''
try:
print("GCP IAM List Keys check")
# print(credentials)
gcp_iam_list_keys(credentials.service_account_email, service)
except HttpError as e:
# print(e)
if e.resp.status in [403, 500, 503]:
print("\tGCP IAM access denied for {}\n".format(credentials.service_account_email))
else:
print('{}\n'.format(e))
except google.auth.exceptions.RefreshError as f:
print(f)
print("Service key is invalid exiting")
sys.exit()
try:
print("GCP IAM list service accounts for the current project: {}.".format(credentials.project_id))
# print(credentials)
gcp_iam_list_service_accounts(credentials.project_id, service)
except HttpError as e:
# print(e)
if e.resp.status in [403, 500, 503]:
print("\tIAM access denied for {}\n".format(credentials.service_account_email))
else:
print('{}\n'.format(e))
except google.auth.exceptions.RefreshError as f:
print(f)
print("Service key is invalid exiting")
sys.exit()
'''
Storage bucket access checks
'''
try:
print("GCP Storage check")
buckets = gcp_storage_list_buckets(credentials)
if buckets:
print("\nAttempting to list bucket contents:")
for a in buckets:
print('Bucket: {}'.format(a))
gcp_storage_list_blobs(credentials, a)
except googleapiclient.errors.HttpError as e:
print('{}\n'.format(e))
except exceptions.Forbidden as e:
print("\t Forbidden")
print('{}\n'.format(e))
except exceptions.PermissionDenied as e:
print("\t PermissionDenied")
except google.auth.exceptions.RefreshError as f:
print(f)
'''
BigQuery access checks
'''
try:
print("GCP BigQuery check")
gcp_bigquery_list_datasets(credentials.project_id, credentials)
except googleapiclient.errors.HttpError as e:
print('{}\n'.format(e))
except exceptions.Forbidden as e:
print("\t Forbidden")
print('{}\n'.format(e))
except exceptions.PermissionDenied as e:
print("\t PermissionDenied")
except google.auth.exceptions.RefreshError as f:
print(f)

View File

@@ -7,8 +7,3 @@ s3transfer==0.1.11
six==1.11.0 six==1.11.0
virtualenv==15.1.0 virtualenv==15.1.0
tabulate==0.8.2 tabulate==0.8.2
google-api-python-client==1.7.4
google.cloud==0.34.0
google-cloud-storage==1.12.0
google-cloud-bigquery==1.5.1

View File

@@ -17,20 +17,8 @@ import re
from tabulate import tabulate from tabulate import tabulate
import textwrap import textwrap
# not pythonic but keeping google imports separate for now
import google.auth
import googleapiclient.discovery
from google.oauth2 import service_account
from googleapiclient.errors import HttpError
from google.cloud import storage, exceptions
from google.cloud.exceptions import *
os.environ['AWS_SHARED_CREDENTIALS_FILE'] = '.env' os.environ['AWS_SHARED_CREDENTIALS_FILE'] = '.env'
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = 'gcp_keys/34.json'
# If you want to use a transparent + supports SSL proxy you can put it here # If you want to use a transparent + supports SSL proxy you can put it here
# os.environ['HTTPS_PROXY'] = 'https://127.0.0.1:3128' # os.environ['HTTPS_PROXY'] = 'https://127.0.0.1:3128'
@@ -54,8 +42,17 @@ builtins.db_name = "weirdAAL.db"
def perform_credential_check(): def perform_credential_check():
''' '''
Check that the AWS keys work before we go any further. It picks the keys up from the local .env file Depending on the module, we determine which type of
We are letting boto3 do all the work that way we can handle session tokens natively credential check we perform.
'''
pass
def aws_cred_check():
'''
Check that the AWS keys work before we go any further.
It picks the keys up from the local .env file
We are letting boto3 do all the work that way we can
handle session tokens natively
''' '''
try: try:
@@ -67,9 +64,9 @@ def perform_credential_check():
except ClientError as e: except ClientError as e:
print("The AWS Access Keys are not valid/active") print("The AWS Access Keys are not valid/active")
sys.exit(1) sys.exit(1)
# excepetion to catch the lack of aws cred here - temp fix
except Exception as e: def gcp_cred_check():
print('\t -') pass
def method_create(): def method_create():
try: try:
@@ -125,6 +122,7 @@ def make_tabulate_rows(hash, cloud_provider):
for item in hash[key]: for item in hash[key]:
for (k,v) in item.items(): for (k,v) in item.items():
normalized_comment = normalize_comments(v) normalized_comment = normalize_comments(v)
k = re.sub("module_", "", k)
entire_contents.append([cloud_provider, key, k, normalized_comment]) entire_contents.append([cloud_provider, key, k, normalized_comment])
return entire_contents return entire_contents