Import AWS SDK for PHP
Ensure that PHP v5.5 or a higher version is installed.
Other recommended configurations
- Use cURL 7.16.2 or a higher version
- Use OPCache
- Do not use Xdebug
- Use Composer for automatic dependency loading
- More information please refer to:https://docs.aws.amazon.com/sdk-for-php/v3/developer-guide/getting-started_requirements.html
Add AWS SDK for PHP
composer require aws/aws-sdk-php=3.281.15
For each code example that follows,create the code under the main.php
,then execute
php main.php
UploadObject
Client-side upload
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
$request = $s3Client->createPresignedRequest($s3Client->getCommand("GetObject", ["Bucket" => "<Bucket>", "Key" => "<Key>"]), "+1 hours");
echo $request->getUri() . "\n";
This code will generate a pre-signed client-side upload URL, valid for 1 hours, which the client can use to send an PUT request and upload a file within the expiration time.
The following is an example of uploading a file using curl:
curl -X PUT --upload-file "<path/to/file>" "<presigned url>"
Server-side upload
PutObject(file)
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$putObjectResponse = $s3Client->putObject(["Bucket" => "<Bucket>", "Key" => "<Key>", "SourceFile" => "<path/to/upload>"]);
echo "ETag: " . $putObjectResponse["ETag"] . "\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
PutObject(stream)
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$putObjectResponse = $s3Client->putObject(["Bucket" => "<Bucket>", "Key" => "<Key>", "Body" => "Hello, SUFY S3!"]);
echo "ETag: " . $putObjectResponse["ETag"] . "\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
MultipartUpload(file)
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
const PART_SIZE = 5 * 1024 * 1024; // part size is 5 MB
try {
$createMultipartUploadResponse = $s3Client->createMultipartUpload(["Bucket" => "<Bucket>", "Key" => "<Key>"]);
$file = fopen("<path/to/upload>", "rb");
if (!$file) {
return;
}
$parts = array();
try {
// The example given here is a serial multipart upload. You can modify it to perform a parallel multipart upload to further improve the upload speed.
for ($partNumber = 1;; $partNumber++) {
$partBody = fread($file, PART_SIZE);
if (!$partBody) {
break;
}
$uploadPartResponse = $s3Client->uploadPart([
"Bucket" => $createMultipartUploadResponse["Bucket"],
"Key" => $createMultipartUploadResponse["Key"],
"UploadId" => $createMultipartUploadResponse["UploadId"],
"PartNumber" => $partNumber,
"Body" => $partBody,
]);
array_push($parts, ["ETag" => $uploadPartResponse["ETag"], "PartNumber" => $partNumber]);
}
} finally {
if (!$file) {
fclose($file);
}
}
$completeMultipartUploadResponse = $s3Client->completeMultipartUpload([
"Bucket" => $createMultipartUploadResponse["Bucket"],
"Key" => $createMultipartUploadResponse["Key"],
"UploadId" => $createMultipartUploadResponse["UploadId"],
"MultipartUpload" => [
"Parts" => $parts,
]
]);
echo 'ETag: ' . $completeMultipartUploadResponse["ETag"] . "\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
UploadObject
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
$file = fopen("<path/to/upload>", "r") or die("Unable to open file");
try {
$resp = $s3Client->upload("<Bucket>", "<Key>", $file);
echo 'ETag: ' . $resp["ETag"] . "\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
} finally {
fclose($file);
}
GetObject
Client-side get object
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
$request = $s3Client->createPresignedRequest($s3Client->getCommand("PutObject", ["Bucket" => "<Bucket>", "Key" => "<Key>"]), "+1 hours");
echo $request->getUri() . "\n";
This code will generate a pre-signed client-side download URL, valid for 1 hour, which the client can use to send an GET request and download the file within the expiration time.
The following is an example of downloading a file using curl:
curl -o "<path/to/download>" "<presigned url>"
Server-side get object
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$getObjectResponse = $s3Client->getObject(["Bucket" => "<Bucket>", "Key" => "<Key>", "SaveAs" => "<path/to/download>"]);
echo "ETag: " . $getObjectResponse["ETag"] . "\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
ObjectOperations
HeadObject
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$headObjectResponse = $s3Client->headObject(["Bucket" => "<Bucket>", "Key" => "<Key>"]);
echo "ETag: " . $headObjectResponse["ETag"] . "\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
ChangeStorageClass
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$s3Client->copyObject([
"Bucket" => "<Bucket>",
"Key" => "<Key>",
"MetadataDirective" => "REPLACE",
"CopySource" => "/<Bucket>/<Key>",
"StorageClass" => "GLACIER",
]);
echo "Done\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
CopyObject
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$s3Client->copyObject([
"Bucket" => "<ToBucket>",
"Key" => "<ToKey>",
"MetadataDirective" => "COPY",
"CopySource" => "/<FromBucket>/<FromKey>",
]);
echo "Done\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
CopyObject(> 5GB)
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
const PART_SIZE = 5 * 1024 * 1024; // part size is 5 MB
try {
$headObjectRequest = ["Bucket" => "<FromBucket>", "Key" => "<FromKey>"];
$headObjectResponse = $s3Client->headObject($headObjectRequest);
$createMultipartUploadResponse = $s3Client->createMultipartUpload(["Bucket" => "<ToBucket>", "Key" => "<ToKey>"]);
$parts = array();
$copied = 0;
// The example given here is a serial multipart copy. You can modify it to perform a parallel multipart copy to further improve the copy speed.
for ($partNumber = 1; $headObjectResponse["ContentLength"] > $copied; $partNumber++) {
$partSize = min(PART_SIZE, $headObjectResponse["ContentLength"] - $copied);
$uploadPartCopyResponse = $s3Client->uploadPartCopy([
"Bucket" => $createMultipartUploadResponse["Bucket"],
"Key" => $createMultipartUploadResponse["Key"],
"UploadId" => $createMultipartUploadResponse["UploadId"],
"PartNumber" => $partNumber,
"CopySource" => "/" . $headObjectRequest["Bucket"] . "/" . $headObjectRequest["Key"],
"CopySourceRange" => "bytes=" . $copied . "-" . ($copied + $partSize),
]);
array_push($parts, ["ETag" => $uploadPartCopyResponse["CopyPartResult"]["ETag"], "PartNumber" => $partNumber]);
$copied += $partSize;
}
$completeMultipartUploadResponse = $s3Client->completeMultipartUpload([
"Bucket" => $createMultipartUploadResponse["Bucket"],
"Key" => $createMultipartUploadResponse["Key"],
"UploadId" => $createMultipartUploadResponse["UploadId"],
"MultipartUpload" => [
"Parts" => $parts,
]
]);
echo 'ETag: ' . $completeMultipartUploadResponse["ETag"] . "\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
DeleteObject
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$s3Client->deleteObject(["Bucket" => "<Bucket>", "Key" => "<Key>"]);
echo "Done\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
ListObjects
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$listObjectsV2Response = $s3Client->listObjectsV2(["Bucket" => "<Bucket>", "Prefix" => "<KeyPrefix>"]);
foreach ($listObjectsV2Response["Contents"] as $content) {
echo "Key: " . $content["Key"] . "\n";
echo "ETag: " . $content["ETag"] . "\n";
}
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}
DeleteObjects
Create main.php
<?php
require 'vendor/autoload.php';
use Aws\S3\S3Client;
use Aws\Credentials\Credentials;
use Aws\Exception\AwsException;
$s3Client = new S3Client([
"region" => "ap-southeast-2", // Asia Pacific (Hanoi) RegionID
"endpoint" => "https://mos.ap-southeast-2.sufybkt.com", // Asia Pacific (Hanoi) Endpoint
"credentials" => new Credentials("<AccessKey>", "<SecretKey>"),
]);
try {
$s3Client->deleteObjects([
"Bucket" => "<Bucket>",
"Delete" => [
"Objects" => [
["Key" => "<Key1>"],
["Key" => "<Key2>"],
["Key" => "<Key3>"],
]
],
]);
echo "Done\n";
} catch (AwsException $e) {
echo "Error: " . $e . "\n";
}