Kae Travis

ASP.Net Upload Handler using SSH.Net and Plupload

Posted on by in ASP.Net

Ok. I won’t drag this out too much since there’s FAR too much to comment on. But my aim was to create a web-based upload facility that is compatible with old browsers (such as IE8) and has the ability to upload over SFTP (ASP.Net doesn’t natively support this via the FtpWebRequest object).

So in summary I’ve used:

Plupload – This is the upload widget, which degrades nicely from HTML5 down to Flash, Silverlight and HTML4. (note that Flash doesn’t support chunking of uploads)

SSH.Net – This is the free library I used to upload via SFTP. It really is brilliant.

Here is an excerpt of the ASPX page containing Plupload. Things to note:

  • It supports uploading of zip,rar,7z,iso file types only
  • It reports upload speed
  • It uses a default chunk size of 10mb (where chunking is supported)
  • Points to an upload handler called FileUpload.ashx
  • Passes 3 parameters to the upload handler – client, applicationID and uploadType
  • Has a button to reset the uploader
  • Informs user to use a newer browser for better performance (based on Plupload render type)
<!--ASPX Code....-->
<div id="uploader">
<p>Your browser doesn't have Flash, Silverlight or HTML5 support.</p>
</div>
<asp:LinkButton ID="btnReset" runat="server">Reset Uploader</asp:LinkButton>
Upload Speed: <asp:Label ID="speedLbl" runat="server" Text="0 bytes/sec (0 kilobytes/sec)"></asp:Label>
<!--ASPX Code....-->
<script type="text/javascript">
// Initialize the widget when the DOM is ready
$(function () {
// Setup html5 version
$("#uploader").pluploadQueue({
// General settings
runtimes: 'html5,flash,silverlight,html4',
url : "FileUpload.ashx",
chunk_size: '10mb',
max_retries: 3,   
rename : true,
dragdrop: true, 
// Specify what files to browse for               
filters: {
mime_types : [
{ title: "Compressed Files", extensions: "zip,rar,7z,iso" }
]
},
init: attachCallbacks, 
// Flash settings
flash_swf_url : 'plugins/plupload/js/Moxie.swf',     
// Silverlight settings
silverlight_xap_url : 'plugins/plupload/js/Moxie.xap',
multipart_params: {
'client': '<%= clientLbl.Text %>',
'uamID': '<%= uamIDLbl.Text %>',
'uploadType': '<%= uploadTypeLbl.Text %>'
}
});
// attach callbacks for FileUploaded and Error
function attachCallbacks(uploader) {
uploader.bind('Error', function (up, error) {
alert("An error occurred.  Please try later or contact Support. " + error.message);
});
uploader.bind('UploadComplete', function (up, files) {
//files are uploaded, call script for each file...etc
if (up.total.failed > 0)
{
//files are uploaded, call script for each file...etc
alert("Upload Error");
}
else
{
//files are uploaded, call script for each file...etc
alert("Upload Completed Successfully");
}
});
uploader.bind('StateChanged', function (up) {
//files are uploaded, call script for each file...etc
if (up.state == plupload.STARTED)
{
//started upload
alert("Started Upload.....Please Wait");
}
else
{
//stopped upload
}
});             
uploader.bind('FileUploaded', function (up, file, response) {
//display error if file upload failed
var jsonResponse = jQuery.parseJSON(response.response);                  
if (jsonResponse.result !== null && jsonResponse.result != 'Success') {
file.status = plupload.FAILED;
alert(file.name + ': ' + jsonResponse.result);                     
}
else
{
alert(file.name + ' uploaded successfully!');
}
});
uploader.bind('UploadProgress', function (up, file) {     
var speedb = up.total.bytesPerSec;
var speedkb = Math.round((speedb / 1024) * 100) / 100;
$("#speedLbl").text(speedb + " bytes/sec (" + speedkb + " kilobytes/sec)");
});
uploader.bind('PostInit', function (up) {
var currentRuntime = up.runtime;
if (currentRuntime == "flash") {
alert("We recommend using a more recent browser such as Internet Explorer 10 or Google Chrome to upload larger files (over 2GB) and to improve upload performance.");
}
});
}
});
//tweak to reset the interface for new file upload, the core API didn't provide this functionality
$('#btnReset').click(function () {
var uploader = $('#uploader').pluploadQueue();
//clear files object
uploader.files.length = 0;
$('div.plupload_buttons').css('display', 'block');
$('span.plupload_upload_status').html('');
$('span.plupload_upload_status').css('display', 'none');
$('a.plupload_start').addClass('plupload_disabled');
//resetting the flash container css property
$('.flash').css({
position: 'absolute', top: '292px',
background: 'none repeat scroll 0% 0% transparent',
width: '77px',
height: '22px',
left: '16px'
});
//clear the upload list
$('#uploader_filelist li').each(function (idx, val) {
$(val).remove();
});
});
</script>

And here is the upload handler, FileUpload.ashx. I’ve stripped some logic out where I obtain things like ftpPassword etc (I obtain these in reality by running a CAML query on a SharePoint list). When I obtain this encrypted password from SharePoint (based on the ‘client’ parameter passed in from Plupload), I decrypt it using the Rijndael decryption method here. Also, AlkaneLogging is just a logging class I wrote which writes debug messages to a text file. Comment these lines out if needs be:

using System;
using System.Web;
using System.Net;
using System.IO;
using System.Text;
using Microsoft.SharePoint;
using System.Configuration;
using System.Web.SessionState;
using System.Web.Script.Serialization;
using Renci.SshNet;
using System.Security.Cryptography;
namespace AlkaneSolutions.Layouts.AlkaneSolutions
{
public class FileUpload : IHttpHandler, IRequiresSessionState
{
//important to implement IRequiresSessionState
//IReadOnlySessionState (for read-only access) or IRequiresSessionState (for read-write access)
public bool IsReusable
{
get
{
return false;
}
}
int chunk = 0;
int chunks = 0;
string fileName = "";
string fileExt = "";
string client = "";
string applicationID = "";
string uploadType = "";
//unique session names based on applicationID and UploadType
string ftpUsernameUniqueSessionName = "";
string ftpPasswordUniqueSessionName = "";
string ftpUploadProtocolUniqueSessionName = "";
string ftpUploadPortUniqueSessionName = "";
string ftpServerUniqueSessionName = "";
string ftpDownloadProtocolUniqueSessionName = "";
string ftpDownloadPortUniqueSessionName = "";
string ftpUsername = "";
string ftpPassword = "";
string ftpSalt = "";
string ftpUploadProtocol = "";
int ftpUploadPort = 0;
string ftpServer = "";
string ftpDownloadProtocol = "";
string ftpDownloadPort = "";
public void ProcessRequest(HttpContext context)
{
try
{
if (context.Request.Files.Count > 0)
{
for (int a = 0; a <= context.Request.Files.Count - 1; a++)
{
chunk = context.Request["chunk"] != null ? int.Parse(context.Request["chunk"]) : 0;
chunks = context.Request["chunks"] != null ? int.Parse(context.Request["chunks"]) -1 : 0;
fileName = context.Request["name"] ?? string.Empty;
fileExt = Path.GetExtension(fileName).ToLower();
client = context.Request["client"] ?? string.Empty;
applicationID = context.Request["applicationID"] ?? string.Empty;
uploadType = context.Request["uploadType"] ?? string.Empty;
//set up unique session names per application (useful for concurrent application uploads where applicationID is unique.  These sessions are used 
//when we upload using chunks (so we dont keep having to read from a data source for every chunk!)
ftpUsernameUniqueSessionName = applicationID + "_" + uploadType + "_ftpUsername";
ftpPasswordUniqueSessionName = applicationID + "_" + uploadType + "_ftpPassword";
ftpUploadProtocolUniqueSessionName = applicationID + "_" + uploadType + "_ftpUploadProtocol";
ftpUploadPortUniqueSessionName = applicationID + "_" + uploadType + "_ftpUploadPort";
ftpServerUniqueSessionName = applicationID + "_" + uploadType + "_ ftpServer";
ftpDownloadPortUniqueSessionName = applicationID + "_" + uploadType + "_ftpDownloadPort";
ftpDownloadProtocolUniqueSessionName = applicationID + "_" + uploadType + "_ftpDownloadProtocol";
if (fileExt != ".zip" && fileExt != ".7z" && fileExt != ".rar" && fileExt != ".iso")
{
AlkaneLogging.WriteToLog("File Upload Error: " + applicationID + " - illegal extension of " + fileExt);
context.Response.ContentType = "application/json";
context.Response.ContentEncoding = Encoding.UTF8;
context.Response.StatusCode = 601;
context.Response.Write("{\"jsonrpc\" : \"2.0\", \"error\" : {\"code\": \"601\", \"message\": " + JsonEncode("Invalid File Type") + "}, \"id\" : \"id\"}");
context.ApplicationInstance.CompleteRequest();
}
if (chunk == 0)
{
//if it's the first chunk get FTP credentials (I've removed lots of code here - I obtain them from a SharePoint query but you may use any other data source)
ftpUsername = "ftpUsername";
ftpSalt = "ftpSalt";
//I retrieve the encrypted password stored in SharePoint and decrypt it here.
ftpPassword = Decrypt(ftpPassword, ftpUsername, ftpSalt, "InitialisationVector");
ftpServer = "ftpServer";
ftpUploadPort = 25;
ftpUploadProtocol = "ftpUploadProtocol";
ftpDownloadProtocol = "ftpDownloadProtocol";
ftpDownloadPort = "ftpDownloadPort";
//store in session, as we don't want to keep searching Sharepoint for each subsequent chunk!
context.Session[ftpUsernameUniqueSessionName] = ftpUsername;
context.Session[ftpPasswordUniqueSessionName] = ftpPassword;
context.Session[ftpUploadProtocolUniqueSessionName] = ftpUploadProtocol;
context.Session[ftpUploadPortUniqueSessionName] = ftpUploadPort;
context.Session[ftpServerUniqueSessionName] = ftpServer;
context.Session[ftpDownloadPortUniqueSessionName] = ftpDownloadPort;
context.Session[ftpDownloadProtocolUniqueSessionName] = ftpDownloadProtocol;
context.Session[ftpDownloadPortUniqueSessionName] = ftpDownloadPort;
}
else
{
//keep session alive for each chunk by resetting it                    
context.Session[ftpUsernameUniqueSessionName] = (string)(context.Session[ftpUsernameUniqueSessionName]);
context.Session[ftpPasswordUniqueSessionName] = (string)(context.Session[ftpPasswordUniqueSessionName]);
context.Session[ftpUploadProtocolUniqueSessionName] = (string)(context.Session[ftpUploadProtocolUniqueSessionName]);
context.Session[ftpUploadPortUniqueSessionName] = (int)(context.Session[ftpUploadPortUniqueSessionName]);
context.Session[ftpServerUniqueSessionName] = (string)(context.Session[ftpServerUniqueSessionName]);
context.Session[ftpDownloadPortUniqueSessionName] = (string)(context.Session[ftpDownloadPortUniqueSessionName]);
context.Session[ftpDownloadProtocolUniqueSessionName] = (string)(context.Session[ftpDownloadProtocolUniqueSessionName]);
//we'll need to set these vars in case it isn't the first chunk
ftpUsername = (string)(context.Session[ftpUsernameUniqueSessionName]);
ftpPassword = (string)(context.Session[ftpPasswordUniqueSessionName]);
ftpUploadProtocol = (string)(context.Session[ftpUploadProtocolUniqueSessionName]);
ftpUploadPort = (int)(context.Session[ftpUploadPortUniqueSessionName]);
ftpServer = (string)(context.Session[ftpServerUniqueSessionName]);
ftpDownloadPort = (string)(context.Session[ftpDownloadPortUniqueSessionName]);
ftpDownloadProtocol = (string)(context.Session[ftpDownloadProtocolUniqueSessionName]);
}
AlkaneLogging.WriteToLog("Server: " + ftpServer + "\r\nUsername: " + ftpUsername + "\r\nPassword: " + ftpPassword + "\r\nUpload Protocol: " + ftpUploadProtocol + "\r\nUpload Port: " + ftpUploadPort + "\r\nDownload Protocol: " + ftpDownloadPort + "\r\nDownload Port: " + ftpDownloadPort);
//construct upload string
string remoteDirectory = "SOURCE/" + applicationID + "/";
string remoteFilePath = remoteDirectory + fileName;
string SFTPFilePath = ftpUploadProtocol + "://" + ftpServer + ":" + ftpUploadPort + "/SOURCE/" + applicationID + "/" + fileName;
AlkaneLogging.WriteToLog("Attempting to upload: " + SFTPFilePath + " with creds: " + ftpUsername + " " + ftpPassword);
try
{
using (var ftp = new SftpClient(ftpServer, ftpUploadPort, ftpUsername, ftpPassword))
{
//connect to FTP stream
ftp.Connect();                           
AlkaneLogging.WriteToLog("Connected successfully to: " + ftpServer);
AlkaneLogging.WriteToLog("Attempting to create stream to: " + remoteFilePath);
//if first chunk, create.  If subsequent chunk append
using (var destStream = ftp.Open(remoteFilePath, chunk != 0 ? FileMode.Append : FileMode.CreateNew, FileAccess.Write))
{
//if remote folder doesnt exist, create it!
if (!ftp.Exists(remoteDirectory))
{
AlkaneLogging.WriteToLog("Creating directory: " + remoteDirectory);
ftp.CreateDirectory(remoteDirectory);
}
AlkaneLogging.WriteToLog("Writing chunk " + (chunk).ToString() + " of " + chunks.ToString() + " to stream");
Stream streamReader = context.Request.Files[a].InputStream;
byte[] bFile = new byte[streamReader.Length];
streamReader.Read(bFile, 0, (int)streamReader.Length);
streamReader.Close();
streamReader.Dispose();
int offset = 0;
int buffer = (bFile.Length > 2048) ? 2048 : bFile.Length;
while (offset < bFile.Length)
{
destStream.Write(bFile, offset, buffer);
offset += buffer;
buffer = (bFile.Length - offset < buffer) ? (bFile.Length - offset) : buffer;
}
destStream.Flush();
destStream.Close();
AlkaneLogging.WriteToLog("File chunk for " + fileName + " uploaded successfully");
}
//disconnect from SFTP object
ftp.Disconnect();
}
//if no chunks (maybe not supported by browser or not specified) or final chunk
if ((chunks == 0) || (chunk == (chunks)))
{
ftpDownloadPort = (string)(context.Session[ftpDownloadPortUniqueSessionName]);
ftpDownloadProtocol = (string)(context.Session[ftpDownloadProtocolUniqueSessionName]);
string SharepointFTPPath = ftpDownloadProtocol + "://" + ftpServer + ":" + ftpDownloadPort + "?u=" + (string)(context.Session[ftpUsernameUniqueSessionName]) + "&p=" + (string)(context.Session[ftpPasswordUniqueSessionName]) + "&path=" + "/SOURCE/" + applicationID + "/" + fileName;
//update Sharepoint/database with upload path (removed in this code excerpt)
//remove FTP credentials from session
context.Session.Remove(ftpUsernameUniqueSessionName);
context.Session.Remove(ftpPasswordUniqueSessionName);
AlkaneLogging.WriteToLog("File Upload Success: " + applicationID);
context.Response.ContentType = "application/json";
context.Response.ContentEncoding = Encoding.UTF8;
context.Response.StatusCode = 200; 
context.Response.Write("{\"jsonrpc\" : \"2.0\", \"result\" : null, \"id\" : \"id\"}");
context.ApplicationInstance.CompleteRequest();
}
}
catch (Exception ex)
{
AlkaneLogging.WriteToLog("File Upload Error: " + applicationID + " " + ex.Message);
context.Response.Write(ex.Message);
context.Response.StatusCode = 500;
context.ApplicationInstance.CompleteRequest();
}
}
}
else
{
AlkaneLogging.WriteToLog("File Upload Error: " + applicationID + " - no files to upload");
context.Response.Write("No files to upload");
context.Response.StatusCode = 500;
context.ApplicationInstance.CompleteRequest();
}
}
catch (WebException e)
{
String status = ((FtpWebResponse)e.Response).StatusDescription;
AlkaneLogging.WriteToLog("File Upload Error: " + applicationID + " - " + status);
context.Response.Clear();
context.Response.Write(status);
context.Response.StatusCode = 500;
context.ApplicationInstance.CompleteRequest();
}
}
public static bool FTPFileExists(string ftpServer, int ftpPort, string ftpUsername, string ftpPassword, string ftpFile)
{
bool fileExists = false;
try
{
using (var ftp = new SftpClient(ftpServer, ftpPort, ftpUsername, ftpPassword))
{   
ftp.Connect();
if (ftp.Exists(ftpFile))
{
AlkaneLogging.WriteToLog(ftpFile + " exists");
fileExists = true;
}
else
{
AlkaneLogging.WriteToLog(ftpFile + " does not exist");                  
fileExists = false;
}
//disconnect from SFTP object
ftp.Disconnect();
return fileExists;
}
}
catch
{
AlkaneLogging.WriteToLog(ftpFile + " does not exist");
return fileExists;
}
}
protected string JsonEncode(object value)
{
var ser = new JavaScriptSerializer();
return ser.Serialize(value);
}
}
}
ASP.Net Upload Handler using SSH.Net and Plupload
ASP.Net Upload Handler using SSH.Net and Plupload

Leave a Reply