We use boto3 python library to access AWS S3.
if you are running in databricks then replace line number 3 to 9 by this code "s3 = boto3.client('s3')"
import boto3
s3 = boto3.client(
's3',
region_name='us-east-1',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
aws_session_token=AWS_SESSION_TOKEN
)
s3_path = location_path
bucket_name = s3_path.split('/')[2]
if not s3_path.endswith('/'):
s3_path += '/'
folders = []
response = s3.list_objects_v2(Bucket=bucket_name, Prefix='s3_path', Delimiter='/')
for common_prefix in response.get('CommnPrefixes', []):
folder = common_prefix['Prefix'].strip('/')
folders.append(folder)
print(f"Folders: {folders}")