I have a php script where I store a file uploaded by the user in a tmp folder and then move it to s3. I also check if the emails they entered are valid in the database. I am using chunking so that large files can get uploaded to the server quickly. My post_max_size is 8M, however I am only able to upload files that are very small. If I upload a 7M file, it doesn't work. Am I doing something incorrectly? I'm fairly new to php so I'd appreciate the help!
This is my ajax call to the php file:
$.ajax({
type: "POST",
url: "../FileDrop/dbSystem.php",
cache: false,
processData: false,
contentType: false,
data: formData,
success: function(result) {
result = JSON.parse(result);
if (result.validity === "valid emails") {
resetInputs();
location.reload();
$(".outputDiv").show();
}
else if (result.validity === "invalid emails") {
var tagsBrackets = result.emails.toString().replace(/[\[\]']+/g, '');
var tagsQuotes = tagsBrackets.replace(/['"]+/g, '');
var tagsInvalid = tagsQuotes.replace(/,/g, ", ");
$('#alertModal').modal({show:true});
document.getElementById('invalid').textContent = tagsInvalid;
}
else {
$('#alertModalFile').modal({show:true});
document.getElementById('userFile').value = null;
}
}
});
This is my dbSystem.php file:
<?php
//ini_set('post_max_size', '1024M');
//ini_set('upload_max_filesize', '1024M');
$thisPage = "dbSystem.php";
include_once 'validateAccess.php';
include_once "../Resources/s3Functions.php";
$output = [];
$fileName = $_FILES['file']['name'];
$currentUserID = $userID;
$currentUserEmail = $userEmail;
mysqli_report(MYSQLI_REPORT_ERROR | MYSQLI_REPORT_STRICT);
ini_set('display_errors', 1);
ini_set('display_startup_errors', 1);
error_reporting(E_ALL);
// create connection
$conn = new mysqli($servername, $username, $password, $db);
$tags = json_decode($_POST['tags']);
$result = 'SELECT distinct email_address FROM nva_users where email_address in (?' . str_repeat(',?', count($tags) - 1) . ') and is_active = "Y"';
$rows = $conn->prepare($result);
// bind values from $tags to the query
$rows->bind_param(str_repeat('s', count($tags)), ...$tags);
// execute
$rows->execute();
// bind a variable for the result
$rows->bind_result($email);
// retrieve the emails in a new array
$dbMails = [];
while ($rows->fetch()) {
$dbMails[] = $email;
}
// retrieve ids of users in a new array
$receiverIds = [];
foreach ($dbMails as $mails) {
$dbIds = "SELECT id FROM nva_users where email_address='$mails'";
$queryResult = mysqli_query($conn, $dbIds);
while ($row = $queryResult->fetch_row()) {
$receiverIds[] = $row;
}
}
// anything that's in $tags but not in $dbMails is invalid
$invalidEmails = array_diff($tags, $dbMails);
if ($invalidEmails) {
$output["validity"] = "invalid emails";
$output["emails"] = array_values($invalidEmails);
} else {
$output["validity"] = "valid emails";
$receiverIds = str_replace(array('[', ']'), '', str_replace(array('"', '"'), '', str_replace(array(','), ', ', json_encode($receiverIds))));;
$mailInsert = str_replace(array('[', ']'), '', str_replace(array('"', '"'), '', str_replace(array(','), ', ', json_encode($dbMails))));
$insertQuery = "INSERT INTO filedrop_logs (sender_id, sender_email, receiver_ids, receiver_emails, file_name, s3_upload_status) VALUES ('$currentUserID', '$userEmail', '$receiverIds', '$mailInsert', '$fileName', 'N')";
$sql = "DELETE FROM filedrop_logs WHERE sender_id='$currentUserID'";
if (mysqli_query($conn, $insertQuery)) {
getAffectedRows($conn);
}
}
if(array_key_exists('file', $_FILES)) {
if ($_FILES['file']['error'] === UPLOAD_ERR_OK) {
if ($_FILES['file']['size'] <= ini_get('post_max_size')) {
if (isset($_POST) && $output["validity"] === "valid emails") {
$fileName = basename($_FILES["file"]["name"]);
$error = $_FILES["file"]["error"];
$tmpName = $_FILES["file"]["tmp_name"];
if (isset($_FILES["fileName"]) && $fileName != '' && $tmpName != '' && file_exists(sys_get_temp_dir())) {
$separator = DIRECTORY_SEPARATOR;
$newDir = sys_get_temp_dir() . $separator . "FileDrop" . microtime(true);
if (!file_exists($newDir)) {
mkdir($newDir, 0777, true); // creates temp FileDrop directory
$tempFilePath = $newDir . $separator . $fileName; // creates temp file inside FileDrop directory
// dealing with the chunks
$chunk = isset($_REQUEST["chunk"]) ? intval($_REQUEST["chunk"]) : 0;
$chunks = isset($_REQUEST["chunks"]) ? intval($_REQUEST["chunks"]) : 0;
$out = @fopen("{$tempFilePath}.part", $chunk == 0 ? "wb" : "ab");
if ($out) {
$in = @fopen($tmpName, "rb");
if ($in) {
while ($buff = fread($in, 4096)) {
fwrite($out, $buff);
}
} else {
verbose(0, "Failed to open input stream");
}
@fclose($in);
@fclose($out);
//deleteDir($newDir);
} else {
verbose(0, "Failed to open output stream");
}
// check if file was uploaded
if (!$chunks || $chunk == $chunks - 1) {
rename("{$tempFilePath}.part", $tempFilePath);
$s3FileName = "FileDrop" . substr($newDir, 4) . $separator . $fileName;
$result = putFileToS3($s3FileName, $tempFilePath, 'public-read');
deleteDir($newDir);
}
// verbose(1, "Upload OK");
}
}
}
}
else {
$output = "Upload failed";
}
}
else {
$output = "Upload failed";
}
}
else {
$output = "Upload failed";
}
// the response function
function verbose($ok = 1, $info = "")
{
// failure to upload throws 400 error
if ($ok == 0) {
http_response_code(400);
}
die(json_encode(["ok" => $ok, "info" => $info]));
}
function deleteDir($dirPath)
{
if (!is_dir($dirPath)) {
if (file_exists($dirPath) !== false) {
unlink($dirPath);
}
return;
}
if ($dirPath[strlen($dirPath) - 1] != '/') {
$dirPath .= '/';
}
$files = glob($dirPath . '*', GLOB_MARK);
foreach ($files as $file) {
if (is_dir($file)) {
deleteDir($file);
} else {
unlink($file);
}
}
rmdir($dirPath);
}
echo json_encode($output);
?>
source https://stackoverflow.com/questions/67942300/uploading-chunked-files-to-s3-with-php
No comments:
Post a Comment