(AccessDenied) when calling the DeleteObject operation: Access Denied

0

Hi,

**Question 1: ** I have added to my Amazon user the following policy (ecotechh2gambuckets) :

{
    "Version": "2012-10-17",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": [
                "s3:GetObject",
                "s3:PutObject",
                "s3:DeleteObject"
            ],
            "Resource": "arn:aws:s3:::eco-tech-h2gam",
            "Condition": {
                "StringEquals": {
                    "s3:prefix": [
                        "cams/fr/forecast/"
                    ]
                }
            }
        },
        {
            "Effect": "Allow",
            "Action": "s3:ListBucket",
            "Resource": "arn:aws:s3:::eco-tech-h2gam",
            "Condition": {
                "StringEquals": {
                    "s3:prefix": [
                        "cams/fr/forecast/"
                    ]
                }
            }
        }
    ]
}

But the function delete_previous_file_from_aws_and_save_new_file_to_aws(self, outputfile) in the python script below gives me the following error:

(AccessDenied) when calling the DeleteObject operation: Access Denied

import os, shutil
from datetime import date
import datetime as dt
import cdsapi
import yaml
import numpy as np
import pandas as pd
import xarray as xr
from tqdm import tqdm
from os import listdir
from os.path import isfile, join
import urllib3
urllib3.disable_warnings()
import platform
import boto3


class download_cams_forecast:
    
    def __init__(self):
        self.work_dir = None
        self.save_to = None
        self.bucket_name = 'eco-tech-h2gam'
        self.bucket_prefix = 'cams/fr/forecast/'
        self.s3 = boto3.client('s3')
        self.object_key_init_forecast = self.list_all_files_in_aws_s3_bucket()[1].split("/")[3]
        print(self.object_key_init_forecast)
    def list_all_files_in_aws_s3_bucket(self):
        file_list = []
        # Retrieve the list of files
        response = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.bucket_prefix)
        
        if 'Contents' in response:
            for obj in response['Contents']:
                file_list.append(obj['Key'])
        print(file_list)
        return file_list

    
    def delete_previous_file_from_aws_and_save_new_file_to_aws(self, outputfile):
        # Delete the previous file from the S3 bucket
        if self.object_key_init_forecast:
            key = self.bucket_prefix + self.object_key_init_forecast
            print("Debug",key)
            self.s3.delete_object(Bucket=self.bucket_name, Key=key)
            print(f"Deleted {self.object_key_init_forecast} from S3 bucket.")

        # Upload the new file to the S3 bucket
        new_object_key = f"{self.bucket_prefix}{os.path.basename(outputfile)}"
        self.s3.upload_file(outputfile, self.bucket_name, new_object_key)
        print(f"Uploaded {outputfile} to S3 bucket as {new_object_key}.")

    
    def download(self):
        print("Downloading CAMS data...")
        sys = platform.system()
        self.work_dir = os.path.dirname(os.path.abspath(__file__))
        
        print("sys:", sys)
        if sys == "Windows":
            self.save_to = os.path.join(self.work_dir, "cams", "fr", "forecast")
        else:
            self.save_to = os.path.join(self.work_dir, "cams", "fr", "forecast")

        folder = self.save_to
        for filename in os.listdir(folder):
            file_path = os.path.join(folder, filename)
            try:
                if os.path.isfile(file_path) or os.path.islink(file_path):
                    os.unlink(file_path)
                elif os.path.isdir(file_path):
                    shutil.rmtree(file_path)
            except Exception as e:
                print('Failed to delete %s. Reason: %s' % (file_path, e))
        
        print("System:", sys) 

        if not os.path.exists(self.save_to):
            os.makedirs(self.save_to)

        # get personal directory of cdsapi
        try:
            if sys == "Windows":
                with open(os.path.join(self.work_dir, ".cdsapirc_cams_windows"), 'r') as file:
                    cams_api = os.path.join(self.work_dir, ".cdsapirc")
            else:
                with open(os.path.join(self.work_dir, ".cdsapirc_cams"), "r") as file:
                    cams_api = os.path.join(self.work_dir, ".cdsapirc")
        except FileNotFoundError:
            raise FileNotFoundError("""cdsapirc file cannot be found. Write the
                directory of your personal .cdsapirc file in a local file called
                `.cdsapirc_cams` and place it in the directory where this script lies.""")

        # Download CAMS
        # -----------------------------------------------------------------------------
        print('Download data from CAMS ...', flush=True)

        with open(cams_api, 'r') as f:
            credentials = yaml.safe_load(f)

        mypath = os.path.join(self.work_dir, "cams")

        def findlatestdateofcamsdata(mypath):
            dates = []
            onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
            for filename in onlyfiles:
                dates.append(pd.to_datetime(filename[14:24]))
            
            if dates:
                return (dates, max(dates))
            else:
                return (dates, dt.date.today() - pd.Timedelta(1, unit="days"))

        prevday = dt.date.today() - pd.Timedelta("1 days")
        startdate = findlatestdateofcamsdata(mypath)[1]
        datesnotclean = pd.date_range(start=startdate, end=prevday).strftime("%Y-%m-%d").tolist()
        
        dates = []

        for date in datesnotclean:
            if date not in pd.to_datetime(findlatestdateofcamsdata(mypath)[0]):
                dates.append(date)

        print(dates)

        area = [51.75, -5.83, 41.67, 11.03]

        for date in tqdm(dates):
            print(date)
            file_name = f'cams-forecast-{date}.nc'
            output_file = os.path.join(self.save_to, file_name)
            if not os.path.exists(output_file):
                c = cdsapi.Client(url=credentials['url'], key=credentials['key'])
                c.retrieve(
                    'cams-europe-air-quality-forecasts',
                    {
                        'variable': [
                            'carbon_monoxide', 'nitrogen_dioxide', 'ozone',
                            'particulate_matter_10um', 'particulate_matter_2.5um', 'sulphur_dioxide',
                        ],
                        'model': 'ensemble',
                        'level': '0',
                        'date': date,
                        'type': 'forecast',
                        'time': '00:00',
                        'leadtime_hour': [
                            '0', '24', '48',
                            '72', '96'
                        ],
                        'area': area,
                        'format': 'netcdf',
                    },
                    output_file)
        self.delete_previous_file_from_aws_and_save_new_file_to_aws(output_file)
        print('Download finished.', flush=True)

if __name__ == '__main__':
    CamsHistForecasts = download_cams_forecast()
    CamsHistForecasts.download()

Question 2

The connexion to AWS S3 buckets works because I have a credentials file in C:\users<username>.aws\credentials

Although I am using heroku to deploy my application, I am forced to used AWS S3 for data initializing purposes, so where should the .aws\credrentials file go in the heroku app directory and is it a good practice to hash this file before pushing it on git (if so which python library should I use?) as the heroku app directory is initialized from the git repo?

Current Site/App Output https://www.eco-tech-h2gam.com/

profile picture
Ludo
asked 8 days ago205 views
7 Answers
1
Accepted Answer

"AWSCompromisedKeyQuarantineV2" policy is blocking "s3:DeleteObject".
In other words, unless you detach "AWSCompromisedKeyQuarantineV2" from the IAM user, that IAM user will not be able to delete the S3 object.
"s3:DeleteObject" is included in the "Deny" statement.
https://docs.aws.amazon.com/aws-managed-policy/latest/reference/AWSCompromisedKeyQuarantineV2.html

{
  "Version" : "2012-10-17",
  "Statement" : [
    {
      "Effect" : "Deny",
      "Action" : [
        "cloudtrail:LookupEvents",
        "ec2:RequestSpotInstances",
        "ec2:RunInstances",
        "ec2:StartInstances",
        "iam:AddUserToGroup",
        "iam:AttachGroupPolicy",
        "iam:AttachRolePolicy",
        "iam:AttachUserPolicy",
        "iam:ChangePassword",
        "iam:CreateAccessKey",
        "iam:CreateInstanceProfile",
        "iam:CreateLoginProfile",
        "iam:CreatePolicyVersion",
        "iam:CreateRole",
        "iam:CreateUser",
        "iam:DetachUserPolicy",
        "iam:PassRole",
        "iam:PutGroupPolicy",
        "iam:PutRolePolicy",
        "iam:PutUserPermissionsBoundary",
        "iam:PutUserPolicy",
        "iam:SetDefaultPolicyVersion",
        "iam:UpdateAccessKey",
        "iam:UpdateAccountPasswordPolicy",
        "iam:UpdateAssumeRolePolicy",
        "iam:UpdateLoginProfile",
        "iam:UpdateUser",
        "lambda:AddLayerVersionPermission",
        "lambda:AddPermission",
        "lambda:CreateFunction",
        "lambda:GetPolicy",
        "lambda:ListTags",
        "lambda:PutProvisionedConcurrencyConfig",
        "lambda:TagResource",
        "lambda:UntagResource",
        "lambda:UpdateFunctionCode",
        "lightsail:Create*",
        "lightsail:Delete*",
        "lightsail:DownloadDefaultKeyPair",
        "lightsail:GetInstanceAccessDetails",
        "lightsail:Start*",
        "lightsail:Update*",
        "organizations:CreateAccount",
        "organizations:CreateOrganization",
        "organizations:InviteAccountToOrganization",
        "s3:DeleteBucket",
        "s3:DeleteObject",
        "s3:DeleteObjectVersion",
        "s3:PutLifecycleConfiguration",
        "s3:PutBucketAcl",
        "s3:PutBucketOwnershipControls",
        "s3:DeleteBucketPolicy",
        "s3:ObjectOwnerOverrideToBucketOwner",
        "s3:PutAccountPublicAccessBlock",
        "s3:PutBucketPolicy",
        "s3:ListAllMyBuckets",
        "ec2:PurchaseReservedInstancesOffering",
        "ec2:AcceptReservedInstancesExchangeQuote",
        "ec2:CreateReservedInstancesListing",
        "savingsplans:CreateSavingsPlan"
      ],
      "Resource" : [
        "*"
      ]
    }
  ]
}
profile picture
EXPERT
answered 7 days ago
profile picture
EXPERT
reviewed 6 days ago
EXPERT
Leo K
reviewed 6 days ago
  • Thank ya all for the Nice help everything is working now I have removed AWSCompromisedKeyQuarantineV2 policy.

1

Hello.

**Question 1: ** I have added to my Amazon user the following policy (ecotechh2gambuckets) :

"GetObject", "PutObject", and "DeleteObject" cannot set "s3:prefix" in the condition key.
Therefore, to operate within a specific folder, you need to control it with "Resource" as shown below.
https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazons3.html

{
    "Version": "2012-10-17",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": [
                "s3:GetObject",
                "s3:PutObject",
                "s3:DeleteObject"
            ],
            "Resource": "arn:aws:s3:::eco-tech-h2gam/cams/fr/forecast/*"
        },
        {
            "Effect": "Allow",
            "Action": "s3:ListBucket",
            "Resource": "arn:aws:s3:::eco-tech-h2gam",
            "Condition": {
                "StringEquals": {
                    "s3:prefix": [
                        "cams/fr/forecast/"
                    ]
                }
            }
        }
    ]
}

I am forced to used AWS S3 for data initializing purposes, so where should the .aws\credrentials file go in the heroku app directory and is it a good practice to hash this file before pushing it on git (if so which python library should I use?) as the heroku app directory is initialized from the git repo?

I'm not very familiar with Heroku, but instead of using ".aws\credentials", why not create an "env" file or something like that and read the environment variables from the Python program?
I think that if you use "python-dotenv", you can read the access key and secret access key from the ".env" file as environment variables.
Also, even if you have hashed your access keys, it is not a good practice to upload them to a GitHub repository.
https://pypi.org/project/python-dotenv/

profile picture
EXPERT
answered 8 days ago
profile picture
EXPERT
reviewed 8 days ago
  • I also did a little research on Heroku, and was told to set the AWS access key and secret access key as environment variables. https://devcenter.heroku.com/articles/s3

  • Hi, I modified the policy with your proposition but still get the same access denied error, any ideas?

  • I tried IAM policy in my environment and confirmed that files in the "cams/fr/forecast/" folder of the S3 bucket can be deleted. This means that the problem may not be with the IAM policy, but with the Python code, or it may be prevented by something like the S3 bucket policy. Could you please share the exact error message? By the way, how is the object key displayed in "print(self.object_key_init_forecast)" on line 27 of the code?

  • Exact error message: Debug cams/fr/forecast/cams-forecast-2024-06-30.nc _file) File "C:\Users\ludov\Desktop\eco-tech-h2gam\DownloadCAMSforecast.py", line 63, in delete_previous_file_from_aws_and_save_new_file_to_aws self.s3.delete_object(Bucket=self.bucket_name, Key=key) File "C:\Users\ludov\anaconda3\envs\eco-tech-h2gam\Lib\site-packages\botocore\client.py", line 553, in _api_call return self._make_api_call(operation_name, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\ludov\anaconda3\envs\eco-tech-h2gam\Lib\site-packages\botocore\client.py", line 1009, in _make_api_call raise error_class(parsed_response, operation_name) botocore.exceptions.ClientError: An error occurred (AccessDenied) when calling the DeleteObject operation: Access Denied The by the way question: cams-forecast-2024-06-30.nc

  • Thank you for sharing the error message. By the way, does the code work properly if I set the "AmazonS3FullAccess" policy? If it does not work even after attaching the "AmazonS3FullAccess" policy, it may be blocked by the S3 bucket policy or SCP, or the IAM policy may be set for the wrong IAM user. https://docs.aws.amazon.com/aws-managed-policy/latest/reference/AmazonS3FullAccess.html
    Also, can you confirm that objects can be deleted with a simple sample code like below?

    import boto3
    
    bucket_name = 'eco-tech-h2gam'
    bucket_prefix = 'cams/fr/forecast/'
    object_key_init_forecast = 'cams-forecast-2024-06-30.nc'
    
    s3 = boto3.client('s3')
    
    key = bucket_prefix + object_key_init_forecast
    
    s3.delete_object(Bucket=bucket_name, Key=key)
    
1

You need to check whether the access key loaded by the Python code is correct.
As @Leo K says, add the following code to your Python script and make sure the access key of the IAM user "ludovic.giraud@essec.edu" is used.
By the way, is the IAM policy that I initially provided correctly set for the IAM user "ludovic.giraud@essec.edu"?

import boto3

sts = boto3.client('sts')

print(sts.get_caller_identity())
profile picture
EXPERT
answered 7 days ago
profile picture
EXPERT
reviewed 6 days ago
  • I believe we are going around in circles, when I print the sts.get_caller_identity() function I get the correct user, the key I am using was created in January 2024 and the policy attached to the user is the following, I still get the access denied error: { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": [ "s3:GetObject", "s3:PutObject", "s3:DeleteObject" ], "Resource": "arn:aws:s3:::eco-tech-h2gam/cams/fr/forecast/*" }, { "Effect": "Allow", "Action": "s3:ListBucket", "Resource": "arn:aws:s3:::eco-tech-h2gam", "Condition": { "StringEquals": { "s3:prefix": [ "cams/fr/forecast/" ] } } } ] }

  • Just to be sure, is the S3 bucket name correct? What is strange is that the sample code below did not work even though this IAM policy was set.

    import boto3
    
    bucket_name = 'eco-tech-h2gam'
    bucket_prefix = 'cams/fr/forecast/'
    object_key_init_forecast = 'cams-forecast-2024-06-30.nc'
    
    s3 = boto3.client('s3')
    
    key = bucket_prefix + object_key_init_forecast
    
    s3.delete_object(Bucket=bucket_name, Key=key)
    
  • Riku_kobayashi: I think what is wrong is that I have a policy in IAM policies but I don't have a policy in the bucket permissions, I am not sure what the correct syntax should be for the policy in the bucket permissions can you please advise?

  • If no bucket policy is set, access is implicitly denied unless IAM policy allows access. However, if the IAM policy allows access to S3, you should be able to delete the object. By the way, is the S3 bucket in the same AWS account as the IAM user? If they are not in the same AWS account, the following S3 bucket policy is required.

    {
        "Version": "2012-10-17",
        "Statement": [
            {
                "Effect": "Allow",
                "Principal": {
                    "AWS": "arn:aws:iam::000000000000:user/ludovic.giraud@essec.edu"
                },
                "Action": [
                    "s3:GetObject",
                    "s3:PutObject",
                    "s3:DeleteObject"
                ],
                "Resource": "arn:aws:s3:::eco-tech-h2gam/cams/fr/forecast/*"
            },
            {
                "Effect": "Allow",
                "Principal": {
                    "AWS": "arn:aws:iam::000000000000:user/ludovic.giraud@essec.edu"
                },
                "Action": "s3:ListBucket",
                "Resource": "arn:aws:s3:::eco-tech-h2gam",
                "Condition": {
                    "StringEquals": {
                        "s3:prefix": [
                            "cams/fr/forecast/"
                        ]
                    }
                }
            }
        ]
    }
    
0

Open the S3 console, open the bucket, and copy the bucket policy shown on the Permissions tab here.

On the Properties tab of the bucket, copy the ARN of the bucket and paste it here.

Also, make sure to use the full IAM policy Riku Kobayashi gave you earlier. It must contain both the statement I quoted, with s3:DeleteObject as one of the actions, and also the second statement with the s3:ListBucket action. The error you were receiving about DeleteObject failing is specific to the first statement, but your overall Python program requires both policy statements to function.

If it doesn't work with both policy statements configured, then in addition to extracting the bucket policy and bucket ARN as explained above, include this as debug output in your code. Make sure not to include the output in this chat, because it reveals your account ID and username, but do make sure that your code is executing under the exact same IAM user ARN that you configured the policy statements for:

print(boto3.client('sts').get_caller_identity())
EXPERT
Leo K
answered 7 days ago
  • Ok I think we've nailed things down, there is nothing in the bucket policy, what should I write in there, the tab does not accept the same policy as the one in the IAM user policy tab.

  • Ok I think we've nailed things down, there is nothing in the bucket policy, what should I write in there, the tab does not accept the same policy as the one in the IAM user policy tab.

  • When the IAM user and S3 bucket are in the same AWS account (identified by the 12-digit ID number), the permissions only need to be granted by either the IAM policy attached to the user or the S3 bucket policy -- not both. The reason we suggested checking the S3 bucket policy is that it would be possible for an explicit "Deny" statement to be present there to block object deletions, but if there's no policy set at all, no Deny statement is present, either.

0

The initial answer from Riku Kobayashi is correct. You most likely have a typo in the IAM policy attached to your IAM user. Could you just paste the whole ecotechh2gambuckets policy here as it is after implementing the suggested change. Make sure that the policy statement Riku Kobayashi gave you is in the policy exactly as advised, particularly with the /* at the end of the Resource specification:

{
    "Effect": "Allow",
    "Action": [
        "s3:GetObject",
        "s3:PutObject",
        "s3:DeleteObject"
    ],
    "Resource": "arn:aws:s3:::eco-tech-h2gam/cams/fr/forecast/*"
}

If it still isn't working, then as you already had the key variable output by your code, what did it show?

            key = self.bucket_prefix + self.object_key_init_forecast
            print("Debug",key)

Also, what you pasted above as the bucket policy isn't a bucket policy. It's your IAM policy. Open the S3 console, switch to the "permissions" tab, and copy the bucket policy shown there to this discussion. Please also use code block formatting for it, so we can read it clearly.

EXPERT
Leo K
answered 7 days ago
  • Here is the new policy I entered following your guidance: { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": [ "s3:GetObject", "s3:PutObject", "s3:DeleteObject" ], "Resource": "arn:aws:s3:::eco-tech-h2gam/cams/fr/forecast/*" } ] } ... Still not working: This is what the key variable outputs: Debug cams/fr/forecast/cams-forecast-2024-06-30.nc

0

The reason we're stuck at this point is that the operation attempted is very, very simple -- literally deleting one, single object -- and it's clearly permitted by the identity-based policy we've been discussing and whose contents you just confirmed moments ago.

The other obvious applicable policy that could block the operation is the S3 bucket policy. It isn't the one you've attached to the IAM user. It's the one attached to the S3 bucket. You'll need to open the S3 console, open the bucket, and copy the bucket policy shown on the Permissions tab here.

EXPERT
Leo K
answered 7 days ago
  • Leo K: Right, but there is no bucket policy, the Permissions tab shows No policy to display, what's the correct syntax please?

0

Since there is an explicit "Allow" statement in the policy attached to the IAM user and there isn't a "Deny" statement in the S3 bucket policy (because one isn't set at all), and since both the user and the bucket are likely in the same AWS account, that only leaves a set of generally unlikely possible explanations:

  • The object could be protected against deletion or replacement by S3 Object Lock. => Not likely in your own bucket without your knowledge.
  • A Service Control Policy set in your AWS Organizations organisation could block s3:DeleteObject operations. => Not likely for an everyday operation like deleting an object.
  • A VPC endpoint policy could be blocking the access, if your Python program is accessing the bucket via an endpoint. => Not likely to block routine delete operations.
  • The IAM user could have an IAM permissions boundary attached to it, and the delete operation could be missing from its allowed actions, or it could be explicitly blocked by a "Deny" statement. => Also not likely for your own user.

The least unlikely of those unlikely explanations would be the last one. When you open your IAM user in the IAM console, underneath the policies attached, you should see a reference to a "Permissions boundary." Does it say that one is "not set"? If so, then it's not causing the issue, but if a permissions boundary is attached, then the s3:DeleteObject operation might be missing from its "Allow" statements or even be explicitly blocked by a "Deny".

EXPERT
Leo K
answered 7 days ago
  • Yes it says not set... Also I have a policy called AWSCompromisedKeyQuarantineV2 because the key was hardcoded in the Python script could this be blocking?