Authenticate against R2 API using auth tokens
The following example shows how to authenticate against R2 using the S3 API and an API token.
Ensure you have set the following environmental variables prior to running either example:
export R2_ACCOUNT_ID=your_account_idexport R2_ACCESS_KEY_ID=your_access_key_idexport R2_SECRET_ACCESS_KEY=your_secret_access_keyexport R2_BUCKET_NAME=your_bucket_name
Install the aws-sdk
package for the S3 API:
npm install aws-sdk
const AWS = require('aws-sdk');const crypto = require('crypto');
const ACCOUNT_ID = process.env.R2_ACCOUNT_ID;const ACCESS_KEY_ID = process.env.R2_ACCESS_KEY_ID;const SECRET_ACCESS_KEY = process.env.R2_SECRET_ACCESS_KEY;const BUCKET_NAME = process.env.R2_BUCKET_NAME;
// Hash the secret access keyconst hashedSecretKey = crypto.createHash('sha256').update(SECRET_ACCESS_KEY).digest('hex');
// Configure the S3 client for Cloudflare R2const s3Client = new AWS.S3({ endpoint: `https://${ACCOUNT_ID}.r2.cloudflarestorage.com`, accessKeyId: ACCESS_KEY_ID, secretAccessKey: hashedSecretKey, signatureVersion: 'v4', region: 'auto' // Cloudflare R2 doesn't use regions, but this is required by the SDK});
// Specify the object keyconst objectKey = '2024/08/02/ingested_0001.parquet';
// Function to fetch the objectasync function fetchObject() { try { const params = { Bucket: BUCKET_NAME, Key: objectKey };
const data = await s3Client.getObject(params).promise(); console.log('Successfully fetched the object');
// Process the data as needed // For example, to get the content as a Buffer: // const content = data.Body;
// Or to save the file (requires 'fs' module): // const fs = require('fs').promises; // await fs.writeFile('ingested_0001.parquet', data.Body);
} catch (error) { console.error('Failed to fetch the object:', error); }}
fetchObject();
Install the boto3
S3 API client:
pip install boto3
Run the following Python script with python3 get_r2_object.py
. Ensure you change object_key
to point to an existing file in your R2 bucket.
import osimport hashlibimport boto3from botocore.client import Config
ACCOUNT_ID = os.environ.get('R2_ACCOUNT_ID')ACCESS_KEY_ID = os.environ.get('R2_ACCESS_KEY_ID')SECRET_ACCESS_KEY = os.environ.get('R2_SECRET_ACCESS_KEY')BUCKET_NAME = os.environ.get('R2_BUCKET_NAME')
# Hash the secret access key using SHA-256hashed_secret_key = hashlib.sha256(SECRET_ACCESS_KEY.encode()).hexdigest()
# Configure the S3 client for Cloudflare R2s3_client = boto3.client('s3', endpoint_url=f'https://{ACCOUNT_ID}.r2.cloudflarestorage.com', aws_access_key_id=ACCESS_KEY_ID, aws_secret_access_key=hashed_secret_key, config=Config(signature_version='s3v4'))
# Specify the object keyobject_key = '2024/08/02/ingested_0001.parquet'
try: # Fetch the object response = s3_client.get_object(Bucket=BUCKET_NAME, Key=object_key)
print('Successfully fetched the object')
# Process the response content as needed # For example, to read the content: # object_content = response['Body'].read()
# Or to save the file: # with open('ingested_0001.parquet', 'wb') as f: # f.write(response['Body'].read())
except Exception as e: print(f'Failed to fetch the object. Error: {str(e)}')
Use go get
to add the aws-sdk-go-v2
packages to your Go project:
go get github.com/aws/aws-sdk-go-v2go get github.com/aws/aws-sdk-go-v2/configgo get github.com/aws/aws-sdk-go-v2/credentialsgo get github.com/aws/aws-sdk-go-v2/service/s3
Run the following Go application as a script with go run main.go
. Ensure you change objectKey
to point to an existing file in your R2 bucket.
package main
import ( "context" "crypto/sha256" "encoding/hex" "fmt" "io" "log" "os"
"github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/credentials" "github.com/aws/aws-sdk-go-v2/service/s3")
func main() { // Load environment variables accountID := os.Getenv("R2_ACCOUNT_ID") accessKeyID := os.Getenv("R2_ACCESS_KEY_ID") secretAccessKey := os.Getenv("R2_SECRET_ACCESS_KEY") bucketName := os.Getenv("R2_BUCKET_NAME")
// Hash the secret access key hasher := sha256.New() hasher.Write([]byte(secretAccessKey)) hashedSecretKey := hex.EncodeToString(hasher.Sum(nil))
// Configure the S3 client for Cloudflare R2 r2Resolver := aws.EndpointResolverWithOptionsFunc(func(service, region string, options ...interface{}) (aws.Endpoint, error) { return aws.Endpoint{ URL: fmt.Sprintf("https://%s.r2.cloudflarestorage.com", accountID), }, nil })
cfg, err := config.LoadDefaultConfig(context.TODO(), config.WithEndpointResolverWithOptions(r2Resolver), config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(accessKeyID, hashedSecretKey, "")), config.WithRegion("auto"), // Cloudflare R2 doesn't use regions, but this is required by the SDK ) if err != nil { log.Fatalf("Unable to load SDK config, %v", err) }
// Create an S3 client client := s3.NewFromConfig(cfg)
// Specify the object key objectKey := "2024/08/02/ingested_0001.parquet"
// Fetch the object output, err := client.GetObject(context.TODO(), &s3.GetObjectInput{ Bucket: aws.String(bucketName), Key: aws.String(objectKey), }) if err != nil { log.Fatalf("Unable to fetch object, %v", err) } defer output.Body.Close()
fmt.Println("Successfully fetched the object")
// Process the object content as needed // For example, to save the file: // file, err := os.Create("ingested_0001.parquet") // if err != nil { // log.Fatalf("Unable to create file, %v", err) // } // defer file.Close() // _, err = io.Copy(file, output.Body) // if err != nil { // log.Fatalf("Unable to write file, %v", err) // }
// Or to read the content: content, err := io.ReadAll(output.Body) if err != nil { log.Fatalf("Unable to read object content, %v", err) } fmt.Printf("Object content length: %d bytes\n", len(content))}