diff --git a/src/FSharp.Azure.StorageTypeProvider/Configuration.fs b/src/FSharp.Azure.StorageTypeProvider/Configuration.fs index 1a3bdbe..5757efd 100644 --- a/src/FSharp.Azure.StorageTypeProvider/Configuration.fs +++ b/src/FSharp.Azure.StorageTypeProvider/Configuration.fs @@ -61,4 +61,4 @@ module ConnectionValidation = |> ignore Success with | ex -> Failure ex - let validateConnectionString = memoize checkConnectionString + let validateConnectionString = memoize checkConnectionString \ No newline at end of file diff --git a/src/FSharp.Azure.StorageTypeProvider/Table/TableRepository.fs b/src/FSharp.Azure.StorageTypeProvider/Table/TableRepository.fs index 30b5ecc..defa2cf 100644 --- a/src/FSharp.Azure.StorageTypeProvider/Table/TableRepository.fs +++ b/src/FSharp.Azure.StorageTypeProvider/Table/TableRepository.fs @@ -8,6 +8,21 @@ open Microsoft.WindowsAzure.Storage.Table open Microsoft.WindowsAzure.Storage.Table.Queryable open System +///series of constant values describing size limits for azure storage tables in kb (source: https://msdn.microsoft.com/en-us/library/dd179338.aspx) +module SizeLimits = + let batch = 4000 + let partitionKey = 1 + let rowKey = 1 + let timestamp = 2 + let dateTime = 2 + let byteArr = 64 + let bool = 1 + let double = 2 + let guid = 4 + let int32 = 1 + let int64 = 1 + let string = 64 + let internal getTableClient connection = CloudStorageAccount.Parse(connection).CreateCloudTableClient() let buildTableEntity partitionKey rowKey names (values: obj []) = @@ -75,12 +90,34 @@ let private batch size source = | head::tail -> doBatch output (head::currentBatch) (counter + 1) tail doBatch [] [] 0 (source |> Seq.toList) +let private calcMaxSize (entity:DynamicTableEntity) = + let basicSize = SizeLimits.rowKey + SizeLimits.partitionKey + SizeLimits.timestamp + let propsSize = + entity.Properties.Values + |> Seq.sumBy(fun p -> + match p.PropertyType with + | Table.EdmType.DateTime -> SizeLimits.dateTime + | Table.EdmType.Binary -> SizeLimits.byteArr + | Table.EdmType.Boolean -> SizeLimits.bool + | Table.EdmType.Double -> SizeLimits.double + | Table.EdmType.Guid -> SizeLimits.guid + | Table.EdmType.Int32 -> SizeLimits.int32 + | Table.EdmType.Int64 -> SizeLimits.int64 + | Table.EdmType.String -> SizeLimits.string + | _ -> failwith("Unknown EdmType")) + (basicSize + propsSize) + +let private calcBatchSize entity = + let entityMaxSize = calcMaxSize entity + SizeLimits.batch / entityMaxSize + let internal executeBatchOperation createTableOp (table:CloudTable) entities = + let batchSize = entities |> Seq.head |> calcBatchSize entities |> Seq.groupBy(fun (entity:DynamicTableEntity) -> entity.PartitionKey) |> Seq.collect(fun (partitionKey, entities) -> entities - |> batch 100 + |> batch batchSize |> Seq.map(fun entityBatch -> let batchForPartition = TableBatchOperation() entityBatch |> Seq.iter (createTableOp >> batchForPartition.Add) diff --git a/tests/IntegrationTests/TableHelpers.fs b/tests/IntegrationTests/TableHelpers.fs index dea6219..0ba1d33 100644 --- a/tests/IntegrationTests/TableHelpers.fs +++ b/tests/IntegrationTests/TableHelpers.fs @@ -10,6 +10,31 @@ let private getTable = .CreateCloudTableClient() .GetTableReference +let bArr = [| for i in 1 .. 255 do yield i |> byte |] + +type LargeEntity() = +//20 byte array properties with a max size of 64kb each give this entity type a maz size of c. 1.3Mb + inherit TableEntity() + member val ByteArr1 = bArr with get, set + member val ByteArr2 = bArr with get, set + member val ByteArr3 = bArr with get, set + member val ByteArr4 = bArr with get, set + member val ByteArr5 = bArr with get, set + member val ByteArr6 = bArr with get, set + member val ByteArr7 = bArr with get, set + member val ByteArr9 = bArr with get, set + member val ByteArr10 = bArr with get, set + member val ByteArr11= bArr with get, set + member val ByteArr12 = bArr with get, set + member val ByteArr13 = bArr with get, set + member val ByteArr14 = bArr with get, set + member val ByteArr15 = bArr with get, set + member val ByteArr16 = bArr with get, set + member val ByteArr17 = bArr with get, set + member val ByteArr18 = bArr with get, set + member val ByteArr19 = bArr with get, set + member val ByteArr20 = bArr with get, set + type RandomEntity() = inherit TableEntity() member val Name = String.Empty with get, set @@ -23,12 +48,22 @@ let insertRow (pk, rk, name, yearsWorking, dob, salary, isManager) (table:CloudT |> TableOperation.Insert |> table.Execute +let insertLargeRow (pk, rk)(table:CloudTable) = + LargeEntity(PartitionKey = pk, RowKey = rk) + |> TableOperation.Insert + |> table.Execute + let resetData() = let recreateTable (table:CloudTable) = if table.Exists() then table.Delete() table.Create() table + let lgeTable = getTable "large" |> recreateTable + lgeTable |> insertLargeRow("1","1") |> ignore + lgeTable |> insertLargeRow("1","2") |> ignore + lgeTable |> insertLargeRow("2","1") |> ignore + let employeeTable = getTable "employee" |> recreateTable getTable "emptytable" |> recreateTable |> ignore diff --git a/tests/IntegrationTests/TableUnitTests.fs b/tests/IntegrationTests/TableUnitTests.fs index 7cce5fb..a743618 100644 --- a/tests/IntegrationTests/TableUnitTests.fs +++ b/tests/IntegrationTests/TableUnitTests.fs @@ -11,6 +11,7 @@ open Xunit type Local = AzureTypeProvider<"DevStorageAccount", ""> let table = Local.Tables.employee +let lgeTable = Local.Tables.large type ResetTableDataAttribute() = inherit BeforeAfterTestAttribute() @@ -20,6 +21,7 @@ type ResetTableDataAttribute() = [] let ``Correctly identifies tables``() = // compiles! + Local.Tables.large |> ignore Local.Tables.employee [] @@ -171,4 +173,30 @@ let ``Cloud Table Client relates to the same data as the type provider``() = [] let ``DeletePartition deletes entries with given partition key``() = table.DeletePartition "men" - Assert.Equal (0,table.Query().``Where Partition Key Is``.``Equal To``("men").Execute().Length) \ No newline at end of file + Assert.Equal (0,table.Query().``Where Partition Key Is``.``Equal To``("men").Execute().Length) + + + +[] +[] +let ``Insert suceeds for entries over 4Mb``() = + let generateLargeEntity partitionKey rowKey = + let byteArr20kb = [| for i in 1 .. 20000 do yield i |> byte |] + Local.Domain.largeEntity(partitionKey,rowKey,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb,byteArr20kb) + let generateBatchOfLargeEntities partitionKey size = + [| for i in 1 .. size do yield generateLargeEntity partitionKey (Row(Guid.NewGuid().ToString())) |] + + let resultsOfInsert = + generateBatchOfLargeEntities (Partition("1")) 10 + |> lgeTable.Insert + + let failureCount = + resultsOfInsert + |> Array.map snd + |> Array.concat + |> Array.filter (fun r -> + match r with + | SuccessfulResponse(e,i) -> false + | _ -> true) + |> Array.length + Assert.Equal(0, failureCount) \ No newline at end of file diff --git a/tests/IntegrationTests/paket.references b/tests/IntegrationTests/paket.references index d1cedbf..dfd89e7 100644 --- a/tests/IntegrationTests/paket.references +++ b/tests/IntegrationTests/paket.references @@ -2,4 +2,5 @@ FsUnit.xUnit Unquote WindowsAzure.Storage xunit +xunit.runners FSharp.Core \ No newline at end of file