8 Apr 2022

Direct-to-S3 .NET samples

net samples

With the announcement of OSS migrating to Direct-to-S3 approach we want help you make this transition smoother. This time we will start with the .NET utility for the new binary transfer in Autodesk Forge services. These samples are built using LTS version of .NET. 

The team is also working on the development of a new SDK that will use the direct to S3 approach. 

Our teammate Joao Martins worked on a curated utility file that includes all the newly released endpoints for the OSS Direct to S3 approach. 

The Github repositories can be found here, and in that repo we can find the .NET 6 branch available here and if you work with .NET Core 3.1 you can find it here.

BinarytransferClient.cs

using Autodesk.Forge;
using Newtonsoft.Json;
using RestSharp;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using System.Web;

namespace Forge_Upload_DirectToS3
{
  public static class BinarytransferClient
  {
    public static string BASE_URL { get; set; }

    public static dynamic CREDENTIAL { get; set; }

    public static int UPLOAD_CHUNK_SIZE { get; set; }

    public static string CLIENT_ID { get; set; }
    public static string CLIENT_SECRET { get; set; }

    public static int MAX_RETRY { get; set; }


    /// <summary>
    /// Return the URLs to upload the file
    /// </summary>
    /// <param name="bucketKey">Bucket key</param>
    /// <param name="objectKey">Object key</param>
    /// <param name="parts">[parts=1] How many URLs to generate in case of multi-part upload</param>
    /// <param name="firstPart">B[firstPart=1] Index of the part the first returned URL should point to</param>
    /// <param name="uploadKey">[uploadKey] Optional upload key if this is a continuation of a previously initiated upload</param>
    /// <param name="minutesExpiration">[minutesExpiration] Custom expiration for the upload URLs (within the 1 to 60 minutes range). If not specified, default is 2 minutes.
    public static async Task<dynamic> getUploadUrls(string bucketKey, string objectKey, int? minutesExpiration, int parts = 1, int firstPart = 1, string uploadKey = null)
    {
      string endpoint = $"/buckets/{bucketKey}/objects/{HttpUtility.UrlEncode(objectKey)}/signeds3upload";

      RestClient client = new RestClient(BASE_URL);
      RestRequest request = new RestRequest(endpoint, RestSharp.Method.GET);
      request.AddHeader("Authorization", "Bearer " + CREDENTIAL.access_token);
      request.AddHeader("Content-Type", "application/json");
      request.AddParameter("parts", parts, ParameterType.QueryString);
      request.AddParameter("firstPart", firstPart, ParameterType.QueryString);

      if (!string.IsNullOrEmpty(uploadKey))
      {
        request.AddParameter("uploadKey", uploadKey, ParameterType.QueryString);
      }

      if (minutesExpiration != null)
      {
        request.AddParameter("minutesExpiration", minutesExpiration, ParameterType.QueryString);
      }

      var response = await client.ExecuteAsync(request);

      //Here we handle 429 for Get Upload URLs
      if (response.StatusCode == HttpStatusCode.TooManyRequests)
      {
        int retryAfter = 0;
        int.TryParse(response.Headers.ToList()
            .Find(x => x.Name == "Retry-After")
            .Value.ToString(), out retryAfter);
        Task.WaitAll(Task.Delay(retryAfter));
        return await getUploadUrls(bucketKey, objectKey, minutesExpiration, parts, firstPart, uploadKey);
      }

      return JsonConvert.DeserializeObject(response.Content);
    }

    /// <summary>
    /// Upload the FileStream to specified bucket
    /// </summary>
    /// <param name="bucketKey">Bucket key</param>
    /// <param name="objectKey">Object key</param>
    /// <param name="fileStream">FileStream from input file</param>
    public static async Task<dynamic> UploadToBucket(string bucketKey, string objectKey, FileStream fileStream)
    {
      long fileSize = fileStream.Length;
      int maxBatches = 25;
      int numberOfChunks = (int)Math.Round((double)(fileSize / UPLOAD_CHUNK_SIZE)) + 1;
      int partsUploaded = 0;
      long start = 0;
      List<string> uploadUrls = new List<string>();
      string uploadKey = null;

      using (BinaryReader reader = new BinaryReader(fileStream))
      {
        while (partsUploaded < numberOfChunks)
        {
          int attempts = 0;

          long end = Math.Min((partsUploaded + 1) * UPLOAD_CHUNK_SIZE, fileSize);

          long numberOfBytes = end - start;
          byte[] fileBytes = new byte[numberOfBytes];
          reader.BaseStream.Seek((int)start, SeekOrigin.Begin);
          int count = reader.Read(fileBytes, 0, (int)numberOfBytes);

          while (true)
          {
            attempts++;
            Console.WriteLine($"Uploading part {partsUploaded + 1}, attempt {attempts}");
            if (uploadUrls.Count == 0)
            {
              CREDENTIAL = await Get2LeggedTokenAsync(new Scope[] { Scope.DataRead, Scope.DataWrite, Scope.DataCreate });
              dynamic uploadParams = await getUploadUrls(bucketKey, objectKey, null, Math.Min(numberOfChunks - partsUploaded, maxBatches), partsUploaded + 1, uploadKey);
              uploadKey = uploadParams.uploadKey;
              uploadUrls = uploadParams.urls.ToObject<List<string>>();
            }

            string currentUrl = uploadUrls[0];
            uploadUrls.RemoveAt(0);

            try
            {
              var responseBuffer = await UploadBufferRestSharp(currentUrl, fileBytes);

              int statusCode = (int)responseBuffer.StatusCode;

              switch (statusCode)
              {
                case 403:
                  Console.WriteLine("403, refreshing urls");
                  uploadUrls = new List<string>();
                  break;
                case int n when (n >= 400):
                  throw new Exception(responseBuffer.Content);
                default:
                  goto NextChunk;
              }

            }
            catch (Exception ex)
            {
              Console.WriteLine(ex.Message);
              if (attempts == MAX_RETRY)
                throw;
            }
          }
        NextChunk:
          partsUploaded++;
          start = end;
          System.Console.WriteLine($"{partsUploaded.ToString()} parts uploaded!");

        }
      }

      var responseUpload = await CompleteUpload(bucketKey, objectKey, uploadKey);

      return responseUpload;
    }

    /// <summary>
    /// Upload the specific part through url
    /// </summary>
    /// <param name="url">URL to upload the specified part</param>
    /// <param name="buffer">Buffer array to upload</param>
    public static async Task<dynamic> UploadBufferRestSharp(string url, byte[] buffer)
    {
      RestClient client = new RestClient();
      RestRequest request = new RestRequest(url, RestSharp.Method.PUT);
      request.AddParameter("", buffer, ParameterType.RequestBody);

      var response = await client.ExecuteAsync(request);

      return response;
    }

    /// <summary>
    /// Finalizes the upload of a file to OSS.
    /// </summary>
    /// <param name="bucketKey">Bucket key</param>
    /// <param name="objectKey">Object key</param>
    /// <param name="uploadKey">[uploadKey] Optional upload key if this is a continuation of a previously initiated upload</param>
    public static async Task<dynamic> CompleteUpload(string bucketKey, string objectKey, string uploadKey)
    {
      string endpoint = $"/buckets/{bucketKey}/objects/{HttpUtility.UrlEncode(objectKey)}/signeds3upload";
      RestClient client = new RestClient(BASE_URL);
      RestRequest request = new RestRequest(endpoint, Method.POST);

      request.AddHeader("Authorization", "Bearer " + CREDENTIAL.access_token);
      request.AddHeader("Content-Type", "application/json");

      request.AddJsonBody(new { uploadKey = $"{uploadKey}" });

      var response = await client.ExecuteAsync(request);

      return response;
    }

    /// <summary>
    /// Return the URLs to upload the file
    /// </summary>
    /// <param name="bucketKey">Bucket key</param>
    /// <param name="objectKey">Object key</param>
    /// <param name="minutesExpiration">[minutesExpiration] Custom expiration for the upload URLs (within the 1 to 60 minutes range). If not specified, default is 2 minutes.
    public static async Task<dynamic> getDownloadUrl(string bucketKey, string objectKey, int? minutesExpiration)
    {
      string endpoint = $"/buckets/{bucketKey}/objects/{HttpUtility.UrlEncode(objectKey)}/signeds3download";
      RestClient client = new RestClient(BASE_URL);
      RestRequest request = new RestRequest(endpoint, RestSharp.Method.GET);
      request.AddHeader("Authorization", "Bearer " + CREDENTIAL.access_token);
      request.AddHeader("Content-Type", "application/json");

      if (minutesExpiration != null)
      {
        request.AddParameter("minutesExpiration", minutesExpiration, ParameterType.QueryString);
      }

      var response = await client.ExecuteAsync(request);

      //Here we handle 429 for Get Download URLs
      if (response.StatusCode == HttpStatusCode.TooManyRequests)
      {
        int retryAfter = 0;
        int.TryParse(response.Headers.ToList()
            .Find(x => x.Name == "Retry-After")
            .Value.ToString(), out retryAfter);
        Task.WaitAll(Task.Delay(retryAfter));
        return await getDownloadUrl(bucketKey, objectKey, minutesExpiration);
      }

      return JsonConvert.DeserializeObject(response.Content);
    }

    /// <summary>
    /// Download the specific part through url
    /// </summary>
    /// <param name="url">URL to download the file</param>
    public static byte[] DownloadBufferRestSharp(string url)
    {
      RestClient client = new RestClient();
      RestRequest request = new RestRequest(url, RestSharp.Method.GET);

      byte[] data = client.DownloadData(request);

      return data;
    }

    /// <summary>
    /// Return the byte array of the downloaded content
    /// </summary>
    /// <param name="bucketKey">Bucket key</param>
    /// <param name="objectKey">Object key</param>
    /// <param name="minutesExpiration">[minutesExpiration] Custom expiration for the upload URLs (within the 1 to 60 minutes range). If not specified, default is 2 minutes.
    public static async Task<byte[]> DownloadFromBucket(string bucketKey, string objectKey, int? minutesExpiration)
    {
      dynamic downloadParams = await getDownloadUrl(bucketKey, objectKey, minutesExpiration);

      if (downloadParams.status != "complete")
      {
        throw new Exception("File not available for download yet.");
      }

      byte[] downloadedBuffer = DownloadBufferRestSharp(downloadParams.url.ToString());

      return downloadedBuffer;

    }

    /// <summary>
    /// Get the access token from Autodesk
    /// </summary>
    public static async Task<dynamic> Get2LeggedTokenAsync(Scope[] scopes)
    {
      TwoLeggedApi oauth = new TwoLeggedApi();
      string grantType = "client_credentials";
      dynamic bearer = await oauth.AuthenticateAsync(
        CLIENT_ID,
        CLIENT_SECRET,
        grantType,
        scopes);
      return bearer;
    }
  }
}

Default expiration time of the pre-signed urls is 2 minutes as default (longer expiration times can be set using the minutesExpiration param up to 60 minutes).. 

Download

Let's start with the Download process. This will need a set of 2 steps in order to directly download your files from AWS S3 using pre-signed urls. Here is the pseudo code explaining how it works. 

  1. Generate a download URL using the GET buckets/:bucketKey/objects/:objectName/signeds3download endpoint
  2. Use the new URL to download the OSS object directly from AWS S3
    • Consider retrying (for example, with an exponential backoff) the download when the response code is 100-199, 429, or 500-599

Here is how the code looks for downloading a file from OSS bucket (receiving the entire file into memory first)

using System.IO;
using System.Threading.Tasks;
using Autodesk.Forge;

namespace Forge_Upload_DirectToS3.test
{
  public class download_from_bucket
  {
    public static async Task<dynamic> DownloadFile(string filePath, string bucketKey, string objectKey)
    {
      BinarytransferClient.CREDENTIAL = await BinarytransferClient.Get2LeggedTokenAsync(new Scope[] { Scope.DataRead, Scope.DataWrite, Scope.DataCreate });

      dynamic response = new System.Dynamic.ExpandoObject();
      response.Status = "Download started!";

      System.Console.WriteLine(response.Status);

      byte[] downloadedBuffer = await BinarytransferClient.DownloadFromBucket(bucketKey, objectKey, null);

      await File.WriteAllBytesAsync(filePath, downloadedBuffer);

      response.Status = "Download Complete!";

      return response;
    }
  }
}

Upload

Let's now look into the Upload process. This will need a set of 3 steps in order to directly upload your files from AWS S3 using pre-signed urls. Here is the pseudo code explaining how it works. 

  1. Calculate the number of parts of the file to upload
    • Note: each uploaded part except for the last one must be at least 5MB
  2. Generate up to 25 URLs for uploading specific parts of the file using the GET buckets/:bucketKey/objects/:objectKey/signeds3upload?firstPart=<index of first part>&parts=<number of parts> endpoint
    • The part numbers start with 1
    • For example, to generate upload URLs for parts 10 through 15, set firstPart to 10 and parts to 6
    • This endpoint also returns an uploadKey that is used later to request additional URLs or to finalize the upload
  3. Upload remaining parts of the file to their corresponding upload URLs
    • Consider retrying (for example, with an exponential backoff) individual uploads when the response code is 100-199, 429, or 500-599
    • If the response code is 403, the upload URLs have expired; go back to step #2
    • If you've used up all the upload URLs and there are still parts that must be uploaded, go back to step #2
  4. Finalize the upload using the POST buckets/:bucketKey/objects/:objectKey/signeds3upload endpoint, using the uploadKey value from step #2

Here is how the code looks when uploading a local file to OSS bucket (through FileStream)

using System.IO;
using System.Threading.Tasks;

namespace Forge_Upload_DirectToS3.test
{
  public class upload_to_bucket
  {
    public static async Task<dynamic> UploadFile(string filePath, string bucketKey, string objectKey)
    {
      FileStream fileStream = new FileStream(filePath, FileMode.Open);

      var response = await BinarytransferClient.UploadToBucket(bucketKey, objectKey, fileStream);

      return response;
    }
  }
}

And let's not forget about uploading a local file to a Data Management hub (such as BIM 360, Fusion Teams, or ACC)

using Autodesk.Forge;
using Autodesk.Forge.Model;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;

namespace Forge_Upload_DirectToS3.test
{
  public static class upload_to_docs
  {
    public static async Task<dynamic> UploadFile(string filePath, string projectId, string folderId, string fileName)
    {
      BinarytransferClient.CREDENTIAL = await BinarytransferClient.Get2LeggedTokenAsync(new Scope[] { Scope.DataRead, Scope.DataWrite, Scope.DataCreate });

      FileStream fileStream = new FileStream(filePath, FileMode.Open);

      // prepare storage
      ProjectsApi projectApi = new ProjectsApi();
      projectApi.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
      StorageRelationshipsTargetData storageRelData = new StorageRelationshipsTargetData(StorageRelationshipsTargetData.TypeEnum.Folders, folderId);
      CreateStorageDataRelationshipsTarget storageTarget = new CreateStorageDataRelationshipsTarget(storageRelData);
      CreateStorageDataRelationships storageRel = new CreateStorageDataRelationships(storageTarget);
      BaseAttributesExtensionObject attributes = new BaseAttributesExtensionObject(string.Empty, string.Empty, new JsonApiLink(string.Empty), null);
      CreateStorageDataAttributes storageAtt = new CreateStorageDataAttributes(fileName, attributes);
      CreateStorageData storageData = new CreateStorageData(CreateStorageData.TypeEnum.Objects, storageAtt, storageRel);
      CreateStorage storage = new CreateStorage(new JsonApiVersionJsonapi(JsonApiVersionJsonapi.VersionEnum._0), storageData);
      dynamic storageCreated = await projectApi.PostStorageAsync(projectId, storage);

      string[] storageIdParams = ((string)storageCreated.data.id).Split('/');
      string[] bucketKeyParams = storageIdParams[storageIdParams.Length - 2].Split(':');
      string bucketKey = bucketKeyParams[bucketKeyParams.Length - 1];
      string objectName = storageIdParams[storageIdParams.Length - 1];

      // upload the file/object, which will create a new object
      ObjectsApi objects = new ObjectsApi();
      objects.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;

      //This is the only difference from the old method
      var response = await BinarytransferClient.UploadToBucket(bucketKey, objectName, fileStream);

      if ((int)response.StatusCode >= 400)
      {
        throw new Exception(response.Content);
      }

      // check if file already exists...
      FoldersApi folderApi = new FoldersApi();
      folderApi.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
      var filesInFolder = await folderApi.GetFolderContentsAsync(projectId, folderId);
      string itemId = string.Empty;
      foreach (KeyValuePair<string, dynamic> item in new DynamicDictionaryItems(filesInFolder.data))
        if (item.Value.attributes.displayName == fileName)
          itemId = item.Value.id; // this means a file with same name is already there, so we'll create a new version

      // now decide whether create a new item or new version
      if (string.IsNullOrWhiteSpace(itemId))
      {
        // create a new item
        BaseAttributesExtensionObject baseAttribute = new BaseAttributesExtensionObject(projectId.StartsWith("a.") ? "items:autodesk.core:File" : "items:autodesk.bim360:File", "1.0");
        CreateItemDataAttributes createItemAttributes = new CreateItemDataAttributes(fileName, baseAttribute);
        CreateItemDataRelationshipsTipData createItemRelationshipsTipData = new CreateItemDataRelationshipsTipData(CreateItemDataRelationshipsTipData.TypeEnum.Versions, CreateItemDataRelationshipsTipData.IdEnum._1);
        CreateItemDataRelationshipsTip createItemRelationshipsTip = new CreateItemDataRelationshipsTip(createItemRelationshipsTipData);
        StorageRelationshipsTargetData storageTargetData = new StorageRelationshipsTargetData(StorageRelationshipsTargetData.TypeEnum.Folders, folderId);
        CreateStorageDataRelationshipsTarget createStorageRelationshipTarget = new CreateStorageDataRelationshipsTarget(storageTargetData);
        CreateItemDataRelationships createItemDataRelationhips = new CreateItemDataRelationships(createItemRelationshipsTip, createStorageRelationshipTarget);
        CreateItemData createItemData = new CreateItemData(CreateItemData.TypeEnum.Items, createItemAttributes, createItemDataRelationhips);
        BaseAttributesExtensionObject baseAttExtensionObj = new BaseAttributesExtensionObject(projectId.StartsWith("a.") ? "versions:autodesk.core:File" : "versions:autodesk.bim360:File", "1.0");
        CreateStorageDataAttributes storageDataAtt = new CreateStorageDataAttributes(fileName, baseAttExtensionObj);
        CreateItemRelationshipsStorageData createItemRelationshipsStorageData = new CreateItemRelationshipsStorageData(CreateItemRelationshipsStorageData.TypeEnum.Objects, storageCreated.data.id);
        CreateItemRelationshipsStorage createItemRelationshipsStorage = new CreateItemRelationshipsStorage(createItemRelationshipsStorageData);
        CreateItemRelationships createItemRelationship = new CreateItemRelationships(createItemRelationshipsStorage);
        CreateItemIncluded includedVersion = new CreateItemIncluded(CreateItemIncluded.TypeEnum.Versions, CreateItemIncluded.IdEnum._1, storageDataAtt, createItemRelationship);
        CreateItem createItem = new CreateItem(new JsonApiVersionJsonapi(JsonApiVersionJsonapi.VersionEnum._0), createItemData, new List<CreateItemIncluded>() { includedVersion });

        ItemsApi itemsApi = new ItemsApi();
        itemsApi.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
        var newItem = await itemsApi.PostItemAsync(projectId, createItem);
        return newItem;
      }
      else
      {
        // create a new version
        BaseAttributesExtensionObject attExtensionObj = new BaseAttributesExtensionObject(projectId.StartsWith("a.") ? "versions:autodesk.core:File" : "versions:autodesk.bim360:File", "1.0");
        CreateStorageDataAttributes storageDataAtt = new CreateStorageDataAttributes(fileName, attExtensionObj);
        CreateVersionDataRelationshipsItemData dataRelationshipsItemData = new CreateVersionDataRelationshipsItemData(CreateVersionDataRelationshipsItemData.TypeEnum.Items, itemId);
        CreateVersionDataRelationshipsItem dataRelationshipsItem = new CreateVersionDataRelationshipsItem(dataRelationshipsItemData);
        CreateItemRelationshipsStorageData itemRelationshipsStorageData = new CreateItemRelationshipsStorageData(CreateItemRelationshipsStorageData.TypeEnum.Objects, storageCreated.data.id);
        CreateItemRelationshipsStorage itemRelationshipsStorage = new CreateItemRelationshipsStorage(itemRelationshipsStorageData);
        CreateVersionDataRelationships dataRelationships = new CreateVersionDataRelationships(dataRelationshipsItem, itemRelationshipsStorage);
        CreateVersionData versionData = new CreateVersionData(CreateVersionData.TypeEnum.Versions, storageDataAtt, dataRelationships);
        CreateVersion newVersionData = new CreateVersion(new JsonApiVersionJsonapi(JsonApiVersionJsonapi.VersionEnum._0), versionData);

        VersionsApi versionsApis = new VersionsApi();
        versionsApis.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
        dynamic newVersion = await versionsApis.PostVersionAsync(projectId, newVersionData);
        return newVersion;
      }
    }
  }
}

Thank you again for all your business and please feel free to reach out to us through forge.help@autodesk.com

Tags:

Related Article