网站建设资讯

NEWS

网站建设资讯

结合Resumable.js实现在Server端PHP支持的大文件上传、断点续传功能

   作为一个php开发猿来说,文件上传是常见的问题。在处理小文件上传的时候还得心应手,可能在面对大文件几百M或者上G的文件,如果这时候还使用对待小文件的处理方式,这个时候会不会出现问题呢?如果文件很大上传过程中超时或者上传过程中断电!此时就需要我们提供断点续传功能了。php猿们考虑一下,该如何实现该功能呢?别着急,本博文宅鸟根据实际项目中的经历提供一个切实可行的解决方案。一共大家参考!废话不多说,直上干货。

专业从事成都做网站、网站建设,高端网站制作设计,小程序定制开发,网站推广的成都做网站的公司。优秀技术团队竭力真诚服务,采用H5页面制作+CSS3前端渲染技术,响应式网站,让网站在手机、平板、PC、微信下都能呈现。建站过程建立专项小组,与您实时在线互动,随时提供解决方案,畅聊想法和感受。

   Resumable.js是一个JavaScript库,通过HTML5 File API来为应用加入多文件同步上传、稳定传输和断点续传功能。该库在HTTP上传大型文件的过程中加入了容错系统,并把每个文件分成小块,在文件上传失败时,只重新上传失败的部分,同时还允许在网络连接中断恢复后,自动恢复文件的上传。此外,该库还允许用户暂停、恢复、重新上传文件。

Resumable.js除了HTML5 FILE API(用于将文件分割成小块)外,不依赖任何其它的库。

关于Resumable.js的详细介绍请看官方文档.

可以下载该库阅读代码.

下面给出宅鸟精简并实现上述功能后的一个文件列表

结合Resumable.js实现在Server端PHP支持的大文件上传、断点续传功能

resumable.js的代码:

/*
* MIT Licensed
* http://www.23developer.com/opensource
* http://github.com/23/resumable.js
* Steffen Tiedemann Christensen, steffen@23company.com
*/
var Resumable = function(opts){
  if ( !(this instanceof Resumable ) ) {
    return new Resumable( opts );
  }
  // SUPPORTED BY BROWSER?
  // Check if these features are support by the browser:
  // - File object type
  // - Blob object type
  // - FileList object type
  // - slicing files
  this.support = (
                 (typeof(File)!=='undefined')
                 &&
                 (typeof(Blob)!=='undefined')
                 &&
                 (typeof(FileList)!=='undefined')
                 &&
                 (!!Blob.prototype.webkitSlice||!!Blob.prototype.mozSlice||Blob.prototype.slice||false)
                 );
  if(!this.support) return(false);
  // PROPERTIES
  var $ = this;
  $.files = [];
  $.defaults = {
    chunkSize:1*1024*1024,
    forceChunkSize:false,
    simultaneousUploads:3,
    fileParameterName:'file',
    throttleProgressCallbacks:0.5,
    query:{},
    headers:{},
    preprocess:null,
    method:'multipart',
    prioritizeFirstAndLastChunk:false,
    target:'/',
    testChunks:true,
    generateUniqueIdentifier:null,
    maxChunkRetries:undefined,
    chunkRetryInterval:undefined,
    permanentErrors:[415, 500, 501],
    maxFiles:undefined,
    maxFilesErrorCallback:function (files, errorCount) {
      var maxFiles = $.getOpt('maxFiles');
      alert('Please upload ' + maxFiles + ' file' + (maxFiles === 1 ? '' : 's') + ' at a time.');
    },
    minFileSize:undefined,
    minFileSizeErrorCallback:function(file, errorCount) {
      alert(file.fileName +' is too small, please upload files larger than ' + $h.formatSize($.getOpt('minFileSize')) + '.');
    },
    maxFileSize:undefined,
    maxFileSizeErrorCallback:function(file, errorCount) {
      alert(file.fileName +' is too large, please upload files less than ' + $h.formatSize($.getOpt('maxFileSize')) + '.');
    },
    fileType: [],
    fileTypeErrorCallback: function(file, errorCount) {
      alert(file.fileName +' has type not allowed, please upload files of type ' + $.getOpt('fileType') + '.');
    }
  };
  $.opts = opts||{};
  $.getOpt = function(o) {
    var $this = this;
    // Get multiple option if passed an array
    if(o instanceof Array) {
      var options = {};
      $h.each(o, function(option){
        options[option] = $this.getOpt(option);
      });
      return options;
    }
    // Otherwise, just return a simple option
    if ($this instanceof ResumableChunk) {
      if (typeof $this.opts[o] !== 'undefined') { return $this.opts[o]; }
      else { $this = $this.fileObj; }
    }
    if ($this instanceof ResumableFile) {
      if (typeof $this.opts[o] !== 'undefined') { return $this.opts[o]; }
      else { $this = $this.resumableObj; }
    }
    if ($this instanceof Resumable) {
      if (typeof $this.opts[o] !== 'undefined') { return $this.opts[o]; }
      else { return $this.defaults[o]; }
    }
  };
  // EVENTS
  // catchAll(event, ...)
  // fileSuccess(file), fileProgress(file), fileAdded(file, event), fileRetry(file), fileError(file, message),
  // complete(), progress(), error(message, file), pause()
  $.events = [];
  $.on = function(event,callback){
    $.events.push(event.toLowerCase(), callback);
  };
  $.fire = function(){
    // `arguments` is an object, not array, in FF, so:
    var args = [];
    for (var i=0; i 0 && !$h.contains(o.fileType, file.type.split('/')[1])) {
            o.fileTypeErrorCallback(file, errorCount++);
            return false;
        }
        if (typeof(o.minFileSize)!=='undefined' && file.sizeo.maxFileSize) {
            o.maxFileSizeErrorCallback(file, errorCount++);
            return false;
        }
        // directories have size == 0
        if (file.size > 0 && !$.getFromUniqueIdentifier($h.generateUniqueIdentifier(file))) {
          var f = new ResumableFile($, file);
          $.files.push(f);
          files.push(f);
          $.fire('fileAdded', f, event);
        }
      });
    $.fire('filesAdded', files);
  };
  // INTERNAL OBJECT TYPES
  function ResumableFile(resumableObj, file){
    var $ = this;
    $.opts = {};
    $.getOpt = resumableObj.getOpt;
    $._prevProgress = 0;
    $.resumableObj = resumableObj;
    $.file = file;
    $.fileName = file.fileName||file.name; // Some confusion in different versions of Firefox
    $.size = file.size;
    $.relativePath = file.webkitRelativePath || $.fileName;
    $.uniqueIdentifier = $h.generateUniqueIdentifier(file);
    var _error = false;
    // Callback when something happens within the chunk
    var chunkEvent = function(event, message){
      // event can be 'progress', 'success', 'error' or 'retry'
      switch(event){
      case 'progress':
        $.resumableObj.fire('fileProgress', $);
        break;
      case 'error':
        $.abort();
        _error = true;
        $.chunks = [];
        $.resumableObj.fire('fileError', $, message);
        break;
      case 'success':
        if(_error) return;
        $.resumableObj.fire('fileProgress', $); // it's at least progress
        if($.progress()==1) {
          $.resumableObj.fire('fileSuccess', $, message);
        }
        break;
      case 'retry':
        $.resumableObj.fire('fileRetry', $);
        break;
      }
    }
    // Main code to set up a file object with chunks,
    // packaged to be able to handle retries if needed.
    $.chunks = [];
    $.abort = function(){
      // Stop current uploads
      $h.each($.chunks, function(c){
          if(c.status()=='uploading') c.abort();
        });
      $.resumableObj.fire('fileProgress', $);
    }
    $.cancel = function(){
      // Reset this file to be void
      var _chunks = $.chunks;
      $.chunks = [];
      // Stop current uploads
      $h.each(_chunks, function(c){
          if(c.status()=='uploading')  {
            c.abort();
            $.resumableObj.uploadNextChunk();
          }
        });
      $.resumableObj.removeFile($);
      $.resumableObj.fire('fileProgress', $);
    },
    $.retry = function(){
      $.bootstrap();
      $.resumableObj.upload();
    }
    $.bootstrap = function(){
      $.abort();
        _error = false;
      // Rebuild stack of chunks from file
      $.chunks = [];
      $._prevProgress = 0;
      var round = $.getOpt('forceChunkSize') ? Math.ceil : Math.floor;
      for (var offset=0; offset0.999 ? 1 : ret))
      ret = Math.max($._prevProgress, ret); // We don't want to lose percentages when an upload is paused
      $._prevProgress = ret;
      return(ret);
    }
    // Bootstrap and return
    $.bootstrap();
    return(this);
  }
  function ResumableChunk(resumableObj, fileObj, offset, callback){
    var $ = this;
    $.opts = {};
    $.getOpt = resumableObj.getOpt;
    $.resumableObj = resumableObj;
    $.fileObj = fileObj;
    $.fileObjSize = fileObj.size;
    $.offset = offset;
    $.callback = callback;
    $.lastProgressCallback = (new Date);
    $.tested = false;
    $.retries = 0;
    $.preprocessState = 0; // 0 = unprocessed, 1 = processing, 2 = finished
    // Computed properties
    var chunkSize = $.getOpt('chunkSize');
    $.loaded = 0;
    $.startByte = $.offset*chunkSize;
    $.endByte = Math.min($.fileObjSize, ($.offset+1)*chunkSize);
    if ($.fileObjSize-$.endByte < chunkSize && !$.getOpt('forceChunkSize')) {
      // The last chunk will be bigger than the chunk size, but less than 2*chunkSize
      $.endByte = $.fileObjSize;
    }
    $.xhr = null;
    // test() makes a GET request without any data to see if the chunk has already been uploaded in a previous session
    $.test = function(){
      // Set up request and listen for event
      $.xhr = new XMLHttpRequest();
      var testHandler = function(e){
        $.tested = true;
        var status = $.status();
        if(status=='success') {
          $.callback(status, $.message());
          $.resumableObj.uploadNextChunk();
        } else {
          $.send();
        }
      }
      $.xhr.addEventListener("load", testHandler, false);
      $.xhr.addEventListener("error", testHandler, false);
      // Add data from the query options
      var url = ""
      var params = [];
      var customQuery = $.getOpt('query');
      if(typeof customQuery == "function") customQuery = customQuery($.fileObj, $);
      $h.each(customQuery, function(k,v){
          params.push([encodeURIComponent(k), encodeURIComponent(v)].join('='));
        });
      // Add extra data to identify chunk
      params.push(['resumableChunkNumber', encodeURIComponent($.offset+1)].join('='));
      params.push(['resumableChunkSize', encodeURIComponent($.getOpt('chunkSize'))].join('='));
      params.push(['resumableCurrentChunkSize', encodeURIComponent($.endByte - $.startByte)].join('='));
      params.push(['resumableTotalSize', encodeURIComponent($.fileObjSize)].join('='));
      params.push(['resumableIdentifier', encodeURIComponent($.fileObj.uniqueIdentifier)].join('='));
      params.push(['resumableFilename', encodeURIComponent($.fileObj.fileName)].join('='));
      params.push(['resumableRelativePath', encodeURIComponent($.fileObj.relativePath)].join('='));
      // Append the relevant chunk and send it
      $.xhr.open("GET", $.getOpt('target') + '?' + params.join('&'));
      // Add data from header options
      $h.each($.getOpt('headers'), function(k,v) {
        $.xhr.setRequestHeader(k, v);
      });
      $.xhr.send(null);
    }
    $.preprocessFinished = function(){
      $.preprocessState = 2;
      $.send();
    }
    // send() uploads the actual data in a POST call
    $.send = function(){
      var preprocess = $.getOpt('preprocess');
      if(typeof preprocess === 'function') {
        switch($.preprocessState) {
          case 0: preprocess($); $.preprocessState = 1; return;
          case 1: return;
          case 2: break;
        }
      }
      if($.getOpt('testChunks') && !$.tested) {
        $.test();
        return;
      }
      // Set up request and listen for event
      $.xhr = new XMLHttpRequest();
      // Progress
      $.xhr.upload.addEventListener("progress", function(e){
          if( (new Date) - $.lastProgressCallback > $.getOpt('throttleProgressCallbacks') * 1000 ) {
            $.callback('progress');
            $.lastProgressCallback = (new Date);
          }
          $.loaded=e.loaded||0;
        }, false);
      $.loaded = 0;
      $.callback('progress');
      // Done (either done, failed or retry)
      var doneHandler = function(e){
        var status = $.status();
        if(status=='success'||status=='error') {
          $.callback(status, $.message());
          $.resumableObj.uploadNextChunk();
        } else {
          $.callback('retry', $.message());
          $.abort();
          $.retries++;
          var retryInterval = $.getOpt('chunkRetryInterval');
          if(retryInterval !== undefined) {
              setTimeout($.send, retryInterval);
          } else {
            $.send();
          }
        }
      };
      $.xhr.addEventListener("load", doneHandler, false);
      $.xhr.addEventListener("error", doneHandler, false);
      // Set up the basic query data from Resumable
      var query = {
        resumableChunkNumber: $.offset+1,
        resumableChunkSize: $.getOpt('chunkSize'),
        resumableCurrentChunkSize: $.endByte - $.startByte,
        resumableTotalSize: $.fileObjSize,
        resumableIdentifier: $.fileObj.uniqueIdentifier,
        resumableFilename: $.fileObj.fileName,
        resumableRelativePath: $.fileObj.relativePath
      }
      // Mix in custom data
      var customQuery = $.getOpt('query');
      if(typeof customQuery == "function") customQuery = customQuery($.fileObj, $);
      $h.each(customQuery, function(k,v){
        query[k] = v;
      });
      // Add data from header options
      $h.each($.getOpt('headers'), function(k,v) {
        $.xhr.setRequestHeader(k, v);
      });
      var func   = ($.fileObj.file.slice ? 'slice' : ($.fileObj.file.mozSlice ? 'mozSlice' : ($.fileObj.file.webkitSlice ? 'webkitSlice' : 'slice'))),
          bytes  = $.fileObj.file[func]($.startByte,$.endByte),
          data   = null,
          target = $.getOpt('target');
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                        
      if ($.getOpt('method') === 'octet') {
        // Add data from the query options
        data = bytes;
        var params = [];
        $h.each(query, function(k,v){
          params.push([encodeURIComponent(k), encodeURIComponent(v)].join('='));
        });
        target += '?' + params.join('&');
      } else {
        // Add data from the query options
        data = new FormData();
        $h.each(query, function(k,v){
          data.append(k,v);
        });
        data.append($.getOpt('fileParameterName'), bytes);
      }
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                            
      $.xhr.open('POST', target);
      $.xhr.send(data);
    }
    $.abort = function(){
      // Abort and reset
      if($.xhr) $.xhr.abort();
      $.xhr = null;
    }
    $.status = function(){
      // Returns: 'pending', 'uploading', 'success', 'error'
      if(!$.xhr) {
        return('pending');
      } else if($.xhr.readyState<4) {
        // Status is really 'OPENED', 'HEADERS_RECEIVED' or 'LOADING' - meaning that stuff is happening
        return('uploading');
      } else {
        if($.xhr.status==200) {
          // HTTP 200, perfect
          return('success');
        } else if($h.contains($.getOpt('permanentErrors'), $.xhr.status) || $.retries >= $.getOpt('maxChunkRetries')) {
          // HTTP 415/500/501, permanent error
          return('error');
        } else {
          // this should never happen, but we'll reset and queue a retry
          // a likely case for this would be 503 service unavailable
          $.abort();
          return('pending');
        }
      }
    }
    $.message = function(){
      return($.xhr ? $.xhr.responseText : '');
    }
    $.progress = function(relative){
      if(typeof(relative)==='undefined') relative = false;
      var factor = (relative ? ($.endByte-$.startByte)/$.fileObjSize : 1);
      var s = $.status();
      switch(s){
      case 'success':
      case 'error':
        return(1*factor);
      case 'pending':
        return(0*factor);
      default:
        return($.loaded/($.endByte-$.startByte)*factor);
      }
    }
    return(this);
  }
  // QUEUE
  $.uploadNextChunk = function(){
    var found = false;
    // In some cases (such as videos) it's really handy to upload the first
    // and last chunk of a file quickly; this let's the server check the file's
    // metadata and determine if there's even a point in continuing.
    if ($.getOpt('prioritizeFirstAndLastChunk')) {
      $h.each($.files, function(file){
          if(file.chunks.length && file.chunks[0].status()=='pending' && file.chunks[0].preprocessState === 0) {
            file.chunks[0].send();
            found = true;
            return(false);
          }
          if(file.chunks.length>1 && file.chunks[file.chunks.length-1].status()=='pending' && file.chunks[0].preprocessState === 0) {
            file.chunks[file.chunks.length-1].send();
            found = true;
            return(false);
          }
        });
      if(found) return(true);
    }
    // Now, simply look for the next, best thing to upload
    $h.each($.files, function(file){
        $h.each(file.chunks, function(chunk){
            if(chunk.status()=='pending' && chunk.preprocessState === 0) {
              chunk.send();
              found = true;
              return(false);
            }
          });
        if(found) return(false);
      });
    if(found) return(true);
    // The are no more outstanding chunks to upload, check is everything is done
    $h.each($.files, function(file){
        outstanding = false;
        $h.each(file.chunks, function(chunk){
            var status = chunk.status();
            if(status=='pending' || status=='uploading' || chunk.preprocessState === 1) {
              outstanding = true;
              return(false);
            }
          });
        if(outstanding) return(false);
      });
    if(!outstanding) {
      // All chunks have been uploaded, complete
      $.fire('complete');
    }
    return(false);
  };
  // PUBLIC METHODS FOR RESUMABLE.JS
  $.assignBrowse = function(domNodes, isDirectory){
    if(typeof(domNodes.length)=='undefined') domNodes = [domNodes];
    // We will create an  and overlay it on the domNode
    // (crappy, but since HTML5 doesn't have a cross-browser.browse() method we haven't a choice.
    //  FF4+ allows click() for this though: https://developer.mozilla.org/en/using_files_from_web_applications)
    $h.each(domNodes, function(domNode) {
        var input;
        if(domNode.tagName==='INPUT' && domNode.type==='file'){
            input = domNode;
        } else {
            input = document.createElement('input');
            input.setAttribute('type', 'file');
            // Place  with the dom node an position the input to fill the entire space
            domNode.style.display = 'inline-block';
            domNode.style.position = 'relative';
            input.style.position = 'absolute';
            input.style.top = input.style.left = input.style.bottom = input.style.right = 0;
            input.style.opacity = 0;
            input.style.cursor = 'pointer';
            domNode.appendChild(input);
        }
        var maxFiles = $.getOpt('maxFiles');
        if (typeof(maxFiles)==='undefined'||maxFiles!=1){
          input.setAttribute('multiple', 'multiple');
        } else {
          input.removeAttribute('multiple');
        }
        if(isDirectory){
          input.setAttribute('webkitdirectory', 'webkitdirectory');
        } else {
          input.removeAttribute('webkitdirectory');
        }
        // When new files are added, simply append them to the overall list
        input.addEventListener('change', function(e){
            appendFilesFromFileList(e.target.files);
            e.target.value = '';
        }, false);
    });
  };
  $.assignDrop = function(domNodes){
    if(typeof(domNodes.length)=='undefined') domNodes = [domNodes];
    $h.each(domNodes, function(domNode) {
        domNode.addEventListener('dragover', onDragOver, false);
        domNode.addEventListener('drop', onDrop, false);
      });
  };
  $.unAssignDrop = function(domNodes) {
    if (typeof(domNodes.length) == 'undefined') domNodes = [domNodes];
    $h.each(domNodes, function(domNode) {
        domNode.removeEventListener('dragover', onDragOver);
        domNode.removeEventListener('drop', onDrop);
      });
  };
  $.isUploading = function(){
    var uploading = false;
    $h.each($.files, function(file){
        $h.each(file.chunks, function(chunk){
            if(chunk.status()=='uploading') {
              uploading = true;
              return(false);
            }
          });
        if(uploading) return(false);
      });
    return(uploading);
  }
  $.upload = function(){
    // Make sure we don't start too many uploads at once
    if($.isUploading()) return;
    // Kick off the queue
    $.fire('uploadStart');
    for (var num=1; num<=$.getOpt('simultaneousUploads'); num++) {
      $.uploadNextChunk();
    }
  };
  $.pause = function(){
    // Resume all chunks currently being uploaded
    $h.each($.files, function(file){
        file.abort();
      });
    $.fire('pause');
  };
  $.cancel = function(){
    $h.each($.files, function(file){
        file.cancel();
      });
    $.fire('cancel');
  };
  $.progress = function(){
    var totalDone = 0;
    var totalSize = 0;
    // Resume all chunks currently being uploaded
    $h.each($.files, function(file){
        totalDone += file.progress()*file.size;
        totalSize += file.size;
      });
    return(totalSize>0 ? totalDone/totalSize : 0);
  };
  $.addFile = function(file){
    appendFilesFromFileList([file]);
  };
  $.removeFile = function(file){
    var files = [];
    $h.each($.files, function(f,i){
        if(f!==file) files.push(f);
      });
    $.files = files;
  };
  $.getFromUniqueIdentifier = function(uniqueIdentifier){
    var ret = false;
    $h.each($.files, function(f){
        if(f.uniqueIdentifier==uniqueIdentifier) ret = f;
      });
    return(ret);
  };
  $.getSize = function(){
    var totalSize = 0;
    $h.each($.files, function(file){
        totalSize += file.size;
      });
    return(totalSize);
  };
  return(this);
}

index.html的代码:



  
    Resumable.js - Multiple simultaneous, stable and resumable uploads via the HTML5 File API
    
    
  
  
    

Resumable.js

It's a JavaScript library providing multiple simultaneous, stable and resumable uploads via the HTML5 File API.

The library is designed to introduce fault-tolerance into the upload of large files through HTTP. This is done by splitting each files into small chunks; whenever the upload of a chunk fails, uploading is retried until the procedure completes. This allows uploads to automatically resume uploading after a network connection is lost either locally or to the server. Additionally, it allows for users to pause and resume uploads without loosing state.

Resumable.js relies on the HTML5 File API and the ability to chunks files into smaller pieces. Currently, this means that support is limited to Firefox 4+ and Chrome 11+.


Demo

Your browser, unfortunately, is not supported by Resumable.js. The library requires support for the HTML5 File API along with file slicing.
Drop video files here to upload or select from your computer

    需要注意index.html文件中下面的js代码:

    var r = new Resumable({
    target:'upload.php',
    chunkSize:1*1024*1024,
    simultaneousUploads:4,
    testChunks:true,
    throttleProgressCallbacks:1
    });

    target:后端的处理文件上传的upload.php代码

    chunkSize:文件分割成小片段的时候的大小,这里为1M

    testChunks:在上传每一段小文件的时候是否先去服务器检查是否存在

    下面给出宅鸟用php实现的server端代码:

    php主要处理的是:接受每一段文件,并且保存在临时目录下,然后得所有文件上传结束后,合并成一个完整文件过程。

    0)
        {
            $chunkNumber = $_GET['resumableChunkNumber'];
            $chunkSize = $_GET['resumableChunkSize'];
            $totalSize = $_GET['resumableTotalSize'];
            $identifier = $_GET['resumableIdentifier'];
            $filename = iconv ( 'UTF-8', 'GB2312', $_GET ['resumableFilename'] );
            if(validateRequest($chunkNumber, $chunkSize, $totalSize, $identifier, $filename)=='valid')
            {
                $chunkFilename = getChunkFilename($chunkNumber, $identifier,$filename);
                {
                    if(file_exists($chunkFilename)){
                        echo "found";
                    } else {
                        header("HTTP/1.0 404 Not Found");
                        echo "not_found";
                    }
                }
            }
            else
            {
                header("HTTP/1.0 404 Not Found");
                echo "not_found";
            }}
    }
    function getChunkFilename ($chunkNumber, $identifier,$filename){
        global $uploads_dir;
        $temp_dir = $uploads_dir.'/'.$identifier;
        return  $temp_dir.'/'.$filename.'.part'.$chunkNumber;
    }
    function cleanIdentifier ($identifier){
        return $identifier;
        //return  preg_replace('/^0-9A-Za-z_-/', '', $identifier);
    }
    //$maxFileSize = 2*1024*1024*1024;
    function validateRequest ($chunkNumber, $chunkSize, $totalSize, $identifier, $filename, $fileSize=''){
        // Clean up the identifier
        //$identifier = cleanIdentifier($identifier);
        // Check if the request is sane
        if ($chunkNumber==0 || $chunkSize==0 || $totalSize==0 || $identifier==0 || $filename=="") {
            return 'non_resumable_request';
        }
        $numberOfChunks = max(floor($totalSize/($chunkSize*1.0)), 1);
        if ($chunkNumber>$numberOfChunks) {
            return 'invalid_resumable_request1';
        }
        // Is the file too big?
    //      if($maxFileSize && $totalSize>$maxFileSize) {
    //          return 'invalid_resumable_request2';
    //      }
        if($fileSize!="") {
            if($chunkNumber<$numberOfChunks && $fileSize!=$chunkSize) {
                // The chunk in the POST request isn't the correct size
                return 'invalid_resumable_request3';
            }
            if($numberOfChunks>1 && $chunkNumber==$numberOfChunks && $fileSize!=(($totalSize%$chunkSize)+$chunkSize)) {
                // The chunks in the POST is the last one, and the fil is not the correct size
                return 'invalid_resumable_request4';
            }
            if($numberOfChunks==1 && $fileSize!=$totalSize) {
                // The file is only a single chunk, and the data size does not fit
                return 'invalid_resumable_request5';
            }
        }
        return 'valid';
    }
    // loop through files and move the chunks to a temporarily created directory
    if($REQUEST_METHOD == "POST"){
        if(count($_POST)>0)
        {
            $resumableFilename = iconv ( 'UTF-8', 'GB2312', $_POST ['resumableFilename'] );
            $resumableIdentifier=$_POST['resumableIdentifier'];
            $resumableChunkNumber=$_POST['resumableChunkNumber'];
            $resumableTotalSize=$_POST['resumableTotalSize'];
            $resumableChunkSize=$_POST['resumableChunkSize'];
            if (!empty($_FILES)) foreach ($_FILES as $file) {
                // check the error status
                if ($file['error'] != 0) {
                    _log('error '.$file['error'].' in file '.$resumableFilename);
                    continue;
                }
                // init the destination file (format .part<#chunk>
                // the file is stored in a temporary directory
                                                                                                                                                                                                                                                                 
                global $uploads_dir;
                                                                                                                                                                                                                                                                 
                $temp_dir = $uploads_dir.'/'.$resumableIdentifier;
                $dest_file = $temp_dir.'/'.$resumableFilename.'.part'.$resumableChunkNumber;
                // create the temporary directory
                if (!is_dir($temp_dir)) {
                    mkdir($temp_dir, 0777, true);
                }
                // move the temporary file
                if (!move_uploaded_file($file['tmp_name'], $dest_file)) {
                    _log('Error saving (move_uploaded_file) chunk '.$resumableChunkNumber.' for file '.$resumableFilename);
                } else {
                    // check if all the parts present, and create the final destination file
                    createFileFromChunks($temp_dir, $resumableFilename,$resumableChunkSize, $resumableTotalSize);
                }
            }
        }
    }
    /**
     *
     * Logging operation - to a file (upload_log.txt) and to the stdout
     * @param string $str - the logging string
     */
    function _log($str) {
        // log to the output
        $log_str = date('d.m.Y').": {$str}\r\n";
        echo $log_str;
        // log to file
        if (($fp = fopen('upload_log.txt', 'a+')) !== false) {
            fputs($fp, $log_str);
            fclose($fp);
        }
    }
    /**
     *
     * Delete a directory RECURSIVELY
     * @param string $dir - directory path
     * @link http://php.net/manual/en/function.rmdir.php
     */
    function rrmdir($dir) {
        if (is_dir($dir)) {
            $objects = scandir($dir);
            foreach ($objects as $object) {
                if ($object != "." && $object != "..") {
                    if (filetype($dir . "/" . $object) == "dir") {
                        rrmdir($dir . "/" . $object);
                    } else {
                        unlink($dir . "/" . $object);
                    }
                }
            }
            reset($objects);
            rmdir($dir);
        }
    }
    /**
     *
     * Check if all the parts exist, and
     * gather all the parts of the file together
     * @param string $dir - the temporary directory holding all the parts of the file
     * @param string $fileName - the original file name
     * @param string $chunkSize - each chunk size (in bytes)
     * @param string $totalSize - original file size (in bytes)
     */
    function createFileFromChunks($temp_dir, $fileName, $chunkSize, $totalSize) {
        // count all the parts of this file
        $total_files = 0;
        foreach(scandir($temp_dir) as $file) {
            if (stripos($file, $fileName) !== false) {
                $total_files++;
            }
        }
        // check that all the parts are present
        // the size of the last part is between chunkSize and 2*$chunkSize
        if ($total_files * $chunkSize >=  ($totalSize - $chunkSize + 1)) {
            global $uploads_dir;
            // create the final destination file
            if (($fp = fopen($uploads_dir.'/'.$fileName, 'w')) !== false) {
                for ($i=1; $i<=$total_files; $i++) {
                    fwrite($fp, file_get_contents($temp_dir.'/'.$fileName.'.part'.$i));
                    //_log('writing chunk '.$i);
                }
                fclose($fp);
            } else {
                _log('cannot create the destination file');
                return false;
            }
            // rename the temporary directory (to avoid access from other
            // concurrent chunks uploads) and than delete it
            if (rename($temp_dir, $temp_dir.'_UNUSED')) {
                rrmdir($temp_dir.'_UNUSED');
            } else {
                rrmdir($temp_dir);
            }
        }
    }
    ?>

    通过以上脚本文件可以实现多文件上传,大文件上传,断点续传等功能,php猿们可以通过附件下载到本地,根据自己的实际需求运用到生产环境下。

    下面演示一下在Chrome下上传过程中关闭它,然后用Firefox接着上传的过程。

    最后把分段上传的文件块,合并成一个完整的过程。

    结合Resumable.js实现在Server端PHP支持的大文件上传、断点续传功能

    firefox下开始接着上传:

    结合Resumable.js实现在Server端PHP支持的大文件上传、断点续传功能

    上传完成:

    结合Resumable.js实现在Server端PHP支持的大文件上传、断点续传功能

    在服务器上查看大文件被分割的片段:

    结合Resumable.js实现在Server端PHP支持的大文件上传、断点续传功能

    上传完成后合并成完整文件:

    结合Resumable.js实现在Server端PHP支持的大文件上传、断点续传功能

    干货吐槽结束,如果有不足之处欢迎拍砖。

    附件:http://down.51cto.com/data/2363910

    网页题目:结合Resumable.js实现在Server端PHP支持的大文件上传、断点续传功能
    网页地址:http://njwzjz.com/article/joedgj.html