diff --git a/.formatter.exs b/.formatter.exs index 525446d..189441d 100644 --- a/.formatter.exs +++ b/.formatter.exs @@ -1,4 +1,5 @@ # Used by "mix format" [ + import_deps: [:tesla], inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"] ] diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..6b41697 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,25 @@ +on: push + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: erlef/setup-beam@v1 + with: + otp-version: "24.0" + elixir-version: "1.12.0" + - uses: actions/cache@v2 + with: + path: | + deps + _build + priv/plts + key: ${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }} + restore-keys: | + ${{ runner.os }}-mix- + - run: docker-compose up -d + - run: mix deps.get + - run: mix credo --strict + - run: mix dialyzer + - run: mix test --include external diff --git a/.gitignore b/.gitignore index 7fbd415..8cc731f 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,9 @@ # The directory Mix downloads your dependencies sources to. /deps/ +# Dialyzer builds +priv/plts + # Where 3rd-party dependencies like ExDoc output generated docs. /doc/ @@ -23,3 +26,5 @@ erl_crash.dump ex_microsoft_azure_storage-*.tar /.elixir_ls +/.idea/ +*.iml diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..e2ada72 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,73 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## Unreleased + +## 0.3.4 - 2021-10-25 + +- `Blob.put_blob_from_url` content type warning is now suppressible in config. + +## 0.3.3 - 2021-10-06 + +### Fixed + +- `Blob.get_blob_properties` no longer raises when the blob is not found. + +## 0.3.2 - 2021-08-08 + +### Fixed + +- `BlobStorage.get_blob_service_properties` now works correctly with the Azure Storage simulator + Azurite. + +## 0.3.1 - 2021-07-12 + +### Fixed + +- #3 Workaround now preserves all blob properties, or attempts to at least. + +## 0.3.0 - 2021-07-12 + +### Added + +- Support for Azure Storage connection strings +- Support for getting, setting and updating blob properties +- #3 Workaround for loss of `content-type` when using `Blob.put_blob_from_url/2` + +## 0.2.3 - 2021-07-05 + +### Added + +- Generate SAS tokens overriding content-disposition, content-encoding, content-language and so on. + +## 0.2.2 - 2021-07-05 + +### Fixed + +- SAS tokens now correctly generated to API version `2020-04-08` + +## 0.2.1 - 2021-07-05 + +### Fixed + +- `BlobStorage.get_blob_service_properties` and `BlobStorage.get_blob_service_stats` now function as expected. + +## 0.2.0 - 2021-07-05 + +### Added + +- Add `Blob.put_blob` and `Blob.put_blob_from_url` APIs + +### Changed + +- Bump API version from `2018-03-28` to `2020-04-08` + +## 0.1.0 - 2021-07-05 + +### Added + +- Pull in work from original [ex_microsoft_azure_storage](https://github.com/chgeuer/ex_microsoft_azure_storage). diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..9436edf --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,20 @@ +Copyright (c) 2021 + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md index 0b4153b..a10426e 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,9 @@ + +Based off https://github.com/joeapearson/elixir-azure because of outdated dependencies + # ExMicrosoftAzureStorage -An early prototype of an SDK to interact with Microsoft Azure Storage. +A SDK to interact with Microsoft Azure Storage. ## Installation @@ -10,7 +13,11 @@ by adding `ex_microsoft_azure_storage` to your list of dependencies in `mix.exs` ```elixir def deps do [ - {:ex_microsoft_azure_storage, app: false, github: "chgeuer/ex_microsoft_azure_storage", ref: "master"} + {:ex_microsoft_azure_storage, "~> 1.0"}, + + # Optional dependency, you can also add your own json_library dependency + # and config with `config :ex_microsoft_azure_storage, json_library, YOUR_JSON_LIBRARY` + {:jason, "~> 1.1"} ] end ``` diff --git a/config/config.exs b/config/config.exs index 9b9813f..1e6b696 100644 --- a/config/config.exs +++ b/config/config.exs @@ -1,3 +1,3 @@ -use Mix.Config +import Mix.Config # Do not put any config here... diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8069927 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,7 @@ +version: "3.9" +services: + azure: + image: mcr.microsoft.com/azure-storage/azurite:3.11.0 + ports: + - "10000:10000" + command: azurite-blob --blobHost 0.0.0.0 --blobPort 10000 diff --git a/lib/ex_microsoft_azure_storage.ex b/lib/ex_microsoft_azure_storage.ex new file mode 100644 index 0000000..b197cc4 --- /dev/null +++ b/lib/ex_microsoft_azure_storage.ex @@ -0,0 +1,5 @@ +defmodule ExMicrosoftAzureStorage do + @moduledoc """ + Documentation for `ExMicrosoftAzureStorage`. + """ +end diff --git a/lib/fiddler.ex b/lib/fiddler.ex deleted file mode 100644 index 99911e3..0000000 --- a/lib/fiddler.ex +++ /dev/null @@ -1,5 +0,0 @@ -defmodule Fiddler do - def enable(), do: "http_proxy" |> System.put_env("127.0.0.1:8888") - - def disable(), do: "http_proxy" |> System.delete_env() -end diff --git a/lib/microsoft/azure/storage/blob.ex b/lib/microsoft/azure/storage/blob.ex deleted file mode 100644 index 8742453..0000000 --- a/lib/microsoft/azure/storage/blob.ex +++ /dev/null @@ -1,268 +0,0 @@ -defmodule Microsoft.Azure.Storage.Blob do - import SweetXml - use NamedArgs - # import SweetXml - import Microsoft.Azure.Storage.RequestBuilder - alias Microsoft.Azure.Storage.{Container} - - @enforce_keys [:container, :blob_name] - defstruct [:container, :blob_name] - - def new(container = %Container{}, blob_name) - when is_binary(blob_name), - do: %__MODULE__{container: container, blob_name: blob_name} - - @max_block_size_100MB 104_857_600 - - # |> Base.encode64() - def to_block_id(block_id) when is_binary(block_id), do: block_id - def to_block_id(block_id) when is_integer(block_id), do: <> |> Base.encode64() - - @doc """ - The `put_block` operation creates a new block to be committed as part of a blob. - """ - def put_block( - %__MODULE__{ - container: %Container{storage_context: context, container_name: container_name}, - blob_name: blob_name - }, - block_id, - content - ) - when is_binary(block_id) and byte_size(content) <= @max_block_size_100MB do - # https://docs.microsoft.com/en-us/rest/api/storageservices/put-block - - response = - context - |> new_azure_storage_request() - |> method(:put) - |> url("/#{container_name}/#{blob_name}") - |> add_param(:query, :comp, "block") - # |> to_block_id()) - |> add_param(:query, :blockid, block_id) - |> body(content) - |> add_header_content_md5() - |> sign_and_call(:blob_service) - - case response do - %{status: status} when 400 <= status and status < 500 -> - {:error, response |> create_error_response()} - - %{status: 201} -> - {:ok, - response - |> create_success_response()} - end - end - - @doc """ - The `put_block_list` operation writes a blob by specifying the list of block IDs that make up the blob. - """ - def put_block_list( - %__MODULE__{ - container: %Container{storage_context: context, container_name: container_name}, - blob_name: blob_name - }, - block_list - ) - when is_list(block_list) do - # https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-list - response = - context - |> new_azure_storage_request() - |> method(:put) - |> url("/#{container_name}/#{blob_name}") - |> add_param(:query, :comp, "blocklist") - |> body(block_list |> serialize_block_list()) - |> sign_and_call(:blob_service) - - case response do - %{status: status} when 400 <= status and status < 500 -> - {:error, response |> create_error_response()} - - %{status: 201} -> - {:ok, - response - |> create_success_response()} - end - end - - @template_block_list """ - - - <%= for block <- @block_list do %> - <%= block |> Microsoft.Azure.Storage.Blob.to_block_id() %> - <% end %> - - """ - - defp serialize_block_list(block_list), - do: @template_block_list |> EEx.eval_string(assigns: [block_list: block_list]) - - defp deserialize_block_list(xml_body) do - deserialize_block = fn node -> - %{ - name: node |> xpath(~x"./Name/text()"s), - size: - node - |> xpath( - ~x"./Size/text()"s - |> transform_by(fn t -> t |> Integer.parse() |> elem(0) end) - ) - } - end - - %{ - committed_blocks: - xml_body - |> xpath(~x"/BlockList/CommittedBlocks/Block"l) - |> Enum.map(deserialize_block), - uncommitted_blocks: - xml_body - |> xpath(~x"/BlockList/UncommittedBlocks/Block"l) - |> Enum.map(deserialize_block) - } - end - - def get_block_list( - %__MODULE__{ - container: %Container{storage_context: context, container_name: container_name}, - blob_name: blob_name - }, - block_list_type \\ :all, - snapshot \\ nil - ) - when block_list_type in [:all, :committed, :uncommitted] do - # https://docs.microsoft.com/en-us/rest/api/storageservices/get-block-list - - response = - context - |> new_azure_storage_request() - |> method(:get) - |> url("/#{container_name}/#{blob_name}") - |> add_param(:query, :comp, "blocklist") - |> add_param(:query, :blocklisttype, block_list_type |> Atom.to_string()) - |> add_param_if(snapshot != nil, :query, :snapshot, snapshot) - |> sign_and_call(:blob_service) - - case response do - %{status: status} when 400 <= status and status < 500 -> - {:error, response |> create_error_response()} - - %{status: 200} -> - {:ok, - response - |> create_success_response() - |> Map.merge(response.body |> deserialize_block_list())} - end - end - - def upload_file(container = %Container{}, filename) do - mega_byte = 1024 * 1024 - block_size = 4 * mega_byte - max_concurrency = 3 - blob_name = String.replace(filename, Path.dirname(filename) <> "/", "") |> URI.encode() - blob = container |> __MODULE__.new(blob_name) - - %{size: size} = File.stat!(filename) - - existing_block_ids = - case blob |> get_block_list(:all) do - {:error, %{status: 404}} -> - %{} - - {:ok, %{uncommitted_blocks: uncommitted_blocks, committed_blocks: committed_blocks}} -> - a = - uncommitted_blocks - |> Enum.reduce(%{}, fn %{name: name, size: size}, map -> - map |> Map.put(name, size) - end) - - committed_blocks - |> Enum.reduce(a, fn %{name: name, size: size}, map -> map |> Map.put(name, size) end) - end - - {:ok, block_list_pid} = Agent.start_link(fn -> existing_block_ids end) - - add_block = fn block_id, content -> - block_list_pid - |> Agent.update(&Map.put(&1, block_id, byte_size(content))) - end - - uploaded_bytes = fn -> - block_list_pid - |> Agent.get(&(&1 |> Map.values() |> Enum.reduce(0, fn a, b -> a + b end))) - end - - filename - |> File.stream!([:read_ahead, :binary], block_size) - |> Stream.zip(1..50_000) - |> Task.async_stream( - fn {content, i} -> - block_id = - i - |> to_block_id() - - if !(existing_block_ids |> Map.has_key?(block_id)) do - IO.puts("Start to upload block #{i}") - - {:ok, _} = blob |> put_block(block_id, content) - - add_block.(block_id, content) - - IO.puts("#{100 * uploaded_bytes.() / size}% (finished upload of #{i}") - end - end, - max_concurrency: max_concurrency, - ordered: true, - timeout: :infinity - ) - |> Enum.to_list() - - in_storage = - block_list_pid - |> Agent.get(&(&1 |> Map.keys() |> Enum.into([]))) - - block_ids = - 1..50_000 - |> Enum.map(&to_block_id/1) - |> Enum.filter(&(&1 in in_storage)) - - blob - |> put_block_list(block_ids) - end - - def delete_blob( - %__MODULE__{ - container: %Container{storage_context: context, container_name: container_name}, - blob_name: blob_name - }, - opts \\ [] - ) do - # https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob - - %{snapshot: snapshot, timeout: timeout} = - case [snapshot: nil, timeout: -1] - |> Keyword.merge(opts) - |> Enum.into(%{}) do - %{snapshot: snapshot, timeout: timeout} -> %{snapshot: snapshot, timeout: timeout} - end - - response = - context - |> new_azure_storage_request() - |> method(:delete) - |> url("/#{container_name}/#{blob_name}") - |> add_param_if(snapshot != nil, :query, :snapshot, snapshot) - |> add_param_if(timeout > 0, :query, :timeout, timeout) - |> sign_and_call(:blob_service) - - case response do - %{status: status} when 400 <= status and status < 500 -> - {:error, response |> create_error_response()} - - %{status: 202} -> - {:ok, response |> create_success_response()} - end - end -end diff --git a/lib/microsoft/azure/storage/shared_access_signature.ex b/lib/microsoft/azure/storage/shared_access_signature.ex deleted file mode 100644 index 0073b00..0000000 --- a/lib/microsoft/azure/storage/shared_access_signature.ex +++ /dev/null @@ -1,220 +0,0 @@ -defmodule Microsoft.Azure.Storage.SharedAccessSignature do - alias Microsoft.Azure.Storage - alias Microsoft.Azure.Storage.ApiVersion - import Microsoft.Azure.Storage.Utilities, only: [add_to: 3, set_to_string: 2] - - # https://docs.microsoft.com/en-us/rest/api/storageservices/delegating-access-with-a-shared-access-signature - # https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 - # https://github.com/chgeuer/private_gists/blob/76db1345142d25d3359af6ce4ba7b9eef1aeb769/azure/AccountSAS/AccountSas.cs - - defstruct [ - :service_version, - :target_scope, - :services, - :resource_type, - :permissions, - :start_time, - :expiry_time, - :resource, - :permissions, - :ip_range, - :protocol - ] - - def new(), do: %__MODULE__{} - - def for_storage_account(v = %__MODULE__{target_scope: nil}), - do: v |> Map.put(:target_scope, :account) - - def for_blob_service(v = %__MODULE__{target_scope: nil}), do: v |> Map.put(:target_scope, :blob) - - def for_table_service(v = %__MODULE__{target_scope: nil}), - do: v |> Map.put(:target_scope, :table) - - def for_queue_service(v = %__MODULE__{target_scope: nil}), - do: v |> Map.put(:target_scope, :queue) - - # https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas#specifying-account-sas-parameters - @services_map %{blob: "b", queue: "q", table: "t", file: "f"} - def add_service_blob(v = %__MODULE__{target_scope: :account}), do: v |> add_to(:services, :blob) - - def add_service_queue(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:services, :queue) - - def add_service_table(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:services, :table) - - def add_service_file(v = %__MODULE__{target_scope: :account}), do: v |> add_to(:services, :file) - - @resource_types_map %{service: "s", object: "o", container: "c"} - def add_resource_type_service(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:resource_type, :service) - - def add_resource_type_container(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:resource_type, :container) - - def add_resource_type_object(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:resource_type, :object) - - - @resource_map %{ - # https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#specifying-the-signed-resource-blob-service-only - container: "c", - blob: "b", - # https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#specifying-the-signed-resource-file-service-only - share: "s", - file: "f" - } - - def add_resource_blob_container(v = %__MODULE__{}), do: v |> add_to(:resource, :container) - - def add_resource_blob_blob(v = %__MODULE__{}), do: v |> add_to(:resource, :blob) - - - @permissions_map %{ - read: "r", - write: "w", - delete: "d", - list: "l", - add: "a", - create: "c", - update: "u", - process: "p" - } - def add_permission_read(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:permissions, :read) - - def add_permission_write(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:permissions, :write) - - def add_permission_delete(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:permissions, :delete) - - def add_permission_list(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:permissions, :list) - - def add_permission_add(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:permissions, :add) - - def add_permission_create(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:permissions, :create) - - def add_permission_update(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:permissions, :update) - - def add_permission_process(v = %__MODULE__{target_scope: :account}), - do: v |> add_to(:permissions, :process) - - defp as_time(t), do: t |> Timex.format!("{YYYY}-{0M}-{0D}T{0h24}:{0m}:{0s}Z") - - def service_version(v = %__MODULE__{}, service_version), - do: %{v | service_version: service_version} - - def start_time(v = %__MODULE__{}, start_time), do: %{v | start_time: start_time} - def expiry_time(v = %__MODULE__{}, expiry_time), do: %{v | expiry_time: expiry_time} - def resource(v = %__MODULE__{}, resource), do: %{v | resource: resource} - def ip_range(v = %__MODULE__{}, ip_range), do: %{v | ip_range: ip_range} - def protocol(v = %__MODULE__{}, protocol), do: %{v | protocol: protocol} - - def encode({key, value}) do - case key do - :service_version -> {"sv", value} - :start_time -> {"st", value |> as_time()} - :expiry_time -> {"se", value |> as_time()} - :resource -> {"sr", value |> set_to_string(@resource_map)} - :ip_range -> {"sip", value} - :protocol -> {"spr", value} - :services -> {"ss", value |> set_to_string(@services_map)} - :resource_type -> {"srt", value |> set_to_string(@resource_types_map)} - :permissions -> {"sp", value |> set_to_string(@permissions_map)} - _ -> {nil, nil} - end - end - - def sign( - sas = %__MODULE__{target_scope: target_scope}, - %Storage{account_name: account_name, account_key: account_key} - ) - when is_atom(target_scope) and target_scope != nil do - # https://docs.microsoft.com/en-us/rest/api/storageservices/service-sas-examples - values = - sas - |> Map.from_struct() - |> Enum.filter(fn {_, val} -> val != nil end) - |> Enum.map(&__MODULE__.encode/1) - |> Enum.filter(fn {_, val} -> val != nil end) - |> Map.new() - - stringToSign = - [ - account_name, - values |> Map.get("sp", ""), - values |> Map.get("ss", ""), - values |> Map.get("srt", ""), - values |> Map.get("st", ""), - values |> Map.get("se", ""), - values |> Map.get("sip", ""), - values |> Map.get("spr", ""), - values |> Map.get("sv", ""), - "" - ] - |> Enum.join("\n") - |> IO.inspect(label: "stringToSign") - - signature = - :crypto.hmac(:sha256, account_key |> Base.decode64!(), stringToSign) - |> Base.encode64() - - values - |> Map.put("sig", signature) - |> URI.encode_query() - end - - def sas1() do - new() - |> for_storage_account() - |> add_service_table() - |> add_service_queue() - |> add_service_queue() - |> add_service_queue() - |> add_resource_type_service() - |> add_resource_type_object() - |> add_permission_read() - |> ip_range("168.1.5.60-168.1.5.70") - # |> for_blob_service() - |> start_time(Timex.now()) - |> expiry_time(Timex.now() |> Timex.add(Timex.Duration.from_hours(1))) - |> protocol("https") - end - - def demo() do - sas1() - |> sign(%Storage{ - cloud_environment_suffix: "core.windows.net", - account_name: "SAMPLE_STORAGE_ACCOUNT_NAME" |> System.get_env(), - account_key: "SAMPLE_STORAGE_ACCOUNT_KEY" |> System.get_env() - }) - |> URI.decode_query() - end - - def d2() do - new() - |> service_version(ApiVersion.get_api_version(:storage)) - |> for_storage_account() - |> add_service_blob() - |> add_resource_type_container() - |> add_resource_blob_container() - |> add_permission_read() - |> add_permission_process() - |> add_permission_list() - |> start_time(Timex.now()) - |> expiry_time(Timex.now() - |> Timex.add(Timex.Duration.from_days(100))) - |> sign(%Storage{ - cloud_environment_suffix: "core.windows.net", - account_name: "SAMPLE_STORAGE_ACCOUNT_NAME" |> System.get_env(), - account_key: "SAMPLE_STORAGE_ACCOUNT_KEY" |> System.get_env() - } - ) - end -end diff --git a/lib/microsoft/azure/storage/storage.ex b/lib/microsoft/azure/storage/storage.ex deleted file mode 100644 index 7c267e2..0000000 --- a/lib/microsoft/azure/storage/storage.ex +++ /dev/null @@ -1,55 +0,0 @@ -defmodule Microsoft.Azure.Storage do - @derive {Inspect, except: [:account_key]} - @enforce_keys [] - defstruct [ - :account_name, - :account_key, - :aad_token_provider, - :cloud_environment_suffix, - :is_development_factory - ] - - @endpoint_names %{ - blob_service: "blob", - queue_service: "queue", - table_service: "table", - file_service: "file" - } - - @development_fabric_key "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==" - - @doc """ - Returns the storage context for the Azure storage emulator. - """ - def development_factory(), - do: %__MODULE__{ - # https://docs.microsoft.com/en-us/azure/storage/common/storage-use-emulator#authenticating-requests-against-the-storage-emulator - account_name: "devstoreaccount1", - account_key: @development_fabric_key, - is_development_factory: true - } - - def secondary(context = %__MODULE__{}), - do: - context - |> Map.update!(:account_name, &(&1 <> "-secondary")) - - def endpoint_url(context = %__MODULE__{is_development_factory: true}, service) - when is_atom(service) do - port = - case service do - :blob_service -> 10000 - :queue_service -> 10001 - :table_service -> 10002 - end - - %URI{scheme: "http", host: "127.0.0.1", port: port, path: context.account_name} - |> URI.to_string() - end - - def endpoint_url(context = %__MODULE__{}, service) when is_atom(service), - do: %URI{scheme: "https", host: endpoint_hostname(context, service)} |> URI.to_string() - - def endpoint_hostname(context = %__MODULE__{}, service) when is_atom(service), - do: "#{context.account_name}.#{@endpoint_names[service]}.#{context.cloud_environment_suffix}" -end diff --git a/lib/sample.ex b/lib/sample.ex deleted file mode 100644 index 5187852..0000000 --- a/lib/sample.ex +++ /dev/null @@ -1,257 +0,0 @@ -defmodule Sample do - use Timex - - alias Microsoft.Azure.Storage - alias Microsoft.Azure.Storage.{BlobStorage, BlobPolicy, Container, ContainerLease, Blob} - - import XmlBuilder - - def person(id, first, last) do - element(:person, %{id: id}, [ - element(:first, first), - element(:last, last) - ]) - end - - def storage_context(), - do: %Storage{ - account_name: "SAMPLE_STORAGE_ACCOUNT_NAME" |> System.get_env(), - account_key: "SAMPLE_STORAGE_ACCOUNT_KEY" |> System.get_env(), - cloud_environment_suffix: "core.windows.net" - } - - def upload() do - filename = "C:/Users/chgeuer/Desktop/Konstantin/VID_20181213_141227.mp4" - # ""../../../Users/chgeuer/Videos/outbreak.mp4" - - container = - storage_context() - |> Container.new("videos") - - container - |> Container.create_container() - - container - |> Blob.upload_file(filename) - end - - def get_blob_service_properties(), - do: storage_context() |> BlobStorage.get_blob_service_properties() - - def re_set_blob_service_properties() do - props = - storage_context() - |> BlobStorage.get_blob_service_properties() - |> elem(1) - |> Map.get(:service_properties) - - storage_context() - |> BlobStorage.set_blob_service_properties(props) - end - - def list_containers(), - do: - storage_context() - |> Container.list_containers() - - def get_blob_service_stats(), - do: storage_context() |> BlobStorage.get_blob_service_stats() - - def create_container(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.create_container() - - def delete_container(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.delete_container() - - def list_blobs(container_name, opts \\ []), - do: - storage_context() - |> Container.new(container_name) - |> Container.list_blobs(opts) - - def get_container_properties(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.get_container_properties() - - def get_container_metadata(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.get_container_metadata() - - def get_container_acl(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.get_container_acl() - - def set_container_acl_public_access_off(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.set_container_acl_public_access_off() - - def set_container_acl_public_access_blob(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.set_container_acl_public_access_blob() - - def set_container_acl_public_access_container(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.set_container_acl_public_access_container() - - def set_container_acl(container_name), - do: - storage_context() - |> Container.new(container_name) - |> Container.set_container_acl([ - %BlobPolicy{ - id: "pol1", - start: Timex.now() |> Timex.shift(minutes: -10), - expiry: Timex.now() |> Timex.shift(years: 1), - permission: [:list] - } - ]) - - def container_lease_acquire(container_name) do - lease_duration = 16 - - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_acquire( - lease_duration, - "00000000-1111-2222-3333-444444444444" - ) - - 0..lease_duration - |> Enum.each(fn i -> - Process.sleep(1000) - - {:ok, %{lease_state: lease_state, lease_status: lease_status}} = - get_container_properties(container_name) - - IO.puts("#{i}: lease_state=#{lease_state} lease_status=#{lease_status}") - end) - end - - def container_lease_release(container_name) do - lease_duration = 60 - - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_acquire( - lease_duration, - "00000000-1111-2222-3333-444444444444" - ) - - 0..3 - |> Enum.each(fn i -> - Process.sleep(200) - - {:ok, %{lease_state: lease_state, lease_status: lease_status}} = - get_container_properties(container_name) - - IO.puts("#{i}: lease_state=#{lease_state} lease_status=#{lease_status}") - end) - - IO.puts("Call release now") - - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_release("00000000-1111-2222-3333-444444444444") - - 0..3 - |> Enum.each(fn i -> - Process.sleep(200) - - {:ok, %{lease_state: lease_state, lease_status: lease_status}} = - get_container_properties(container_name) - - IO.puts("#{i}: lease_state=#{lease_state} lease_status=#{lease_status}") - end) - end - - def container_lease_renew(container_name) do - lease_duration = 16 - - {:ok, - %{ - lease_id: lease_id - }} = - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_acquire( - lease_duration, - "00000000-1111-2222-3333-444444444444" - ) - - IO.puts("Acquired lease #{lease_id}") - - 0..lease_duration - |> Enum.each(fn _ -> - Process.sleep(1000) - - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_renew(lease_id) - end) - end - - def container_lease_break(container_name) do - lease_duration = 60 - - {:ok, - %{ - lease_id: lease_id - }} = - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_acquire( - lease_duration, - "00000000-1111-2222-3333-444444444444" - ) - - IO.puts("Acquired lease #{lease_id}") - - Process.sleep(1000) - - break_period = 5 - - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_break(lease_id, break_period) - end - - def container_lease_acquire_and_change(container_name) do - lease_duration = 60 - - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_acquire( - lease_duration, - "00000000-1111-2222-3333-444444444444" - ) - - Process.sleep(1000) - - IO.puts("Change to new lease ID ") - - storage_context() - |> Container.new(container_name) - |> ContainerLease.container_lease_change( - "00000000-1111-2222-3333-444444444444", - "00000000-1111-2222-3333-555555555555" - ) - end -end diff --git a/lib/microsoft/azure/storage/api_version.ex b/lib/storage/api_version.ex similarity index 91% rename from lib/microsoft/azure/storage/api_version.ex rename to lib/storage/api_version.ex index de8122f..8526e6f 100644 --- a/lib/microsoft/azure/storage/api_version.ex +++ b/lib/storage/api_version.ex @@ -1,9 +1,13 @@ -defmodule Microsoft.Azure.Storage.ApiVersion do +defmodule ExMicrosoftAzureStorage.Storage.ApiVersion do + @moduledoc """ + ApiVersion + """ + # "2015-04-05" # def get_api_version(:storage), do: "2016-05-31" # "2017-07-29" # "2017-11-09" - def get_api_version(:storage), do: "2018-03-28" + def get_api_version(:storage), do: "2020-04-08" defstruct [:year, :month, :day] diff --git a/lib/storage/blob.ex b/lib/storage/blob.ex new file mode 100644 index 0000000..d0a8d76 --- /dev/null +++ b/lib/storage/blob.ex @@ -0,0 +1,629 @@ +defmodule ExMicrosoftAzureStorage.Storage.Blob do + @moduledoc """ + Blob + """ + require Logger + + import ExMicrosoftAzureStorage.Storage.RequestBuilder + import SweetXml + + alias ExMicrosoftAzureStorage.Storage.BlobProperties + alias ExMicrosoftAzureStorage.Storage.Container + + @enforce_keys [:container, :blob_name] + @max_concurrency 3 + @max_number_of_blocks 50_000 + @mega_byte 1024 * 1024 + @max_block_size 4 * @mega_byte + @max_block_size_100_mega_byte 100 * @mega_byte + + defstruct [:container, :blob_name] + + def new(%Container{} = container, blob_name) + when is_binary(blob_name), + do: %__MODULE__{container: container, blob_name: blob_name} + + def to_block_id(block_id) when is_binary(block_id), do: block_id + def to_block_id(block_id) when is_integer(block_id), do: <> |> Base.encode64() + + @doc """ + The `put_block` operation creates a new block to be committed as part of a blob. + """ + def put_block( + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + }, + block_id, + content + ) + when is_binary(block_id) and byte_size(content) <= @max_block_size_100_mega_byte do + # https://docs.microsoft.com/en-us/rest/api/storageservices/put-block + + response = + context + |> new_azure_storage_request() + |> method(:put) + |> url("/#{container_name}/#{blob_name}") + |> add_param(:query, :comp, "block") + # |> to_block_id()) + |> add_param(:query, :blockid, block_id) + |> body(content) + |> add_header_content_md5() + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 201} -> + {:ok, + response + |> create_success_response()} + end + end + + @doc """ + The `put_block_list` operation writes a blob by specifying the list of block IDs that make up the blob. + """ + def put_block_list( + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + }, + block_list, + headers \\ [] + ) + when is_list(block_list) do + # https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-list + response = + context + |> new_azure_storage_request() + |> method(:put) + |> url("/#{container_name}/#{blob_name}") + |> add_param(:query, :comp, "blocklist") + |> body(block_list |> serialize_block_list()) + |> add_headers(headers) + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 201} -> + {:ok, + response + |> create_success_response()} + end + end + + @template_block_list """ + + + <%= for block <- @block_list do %> + <%= block |> ExMicrosoftAzureStorage.Storage.Blob.to_block_id() %> + <% end %> + + """ + + defp serialize_block_list(block_list), + do: @template_block_list |> EEx.eval_string(assigns: [block_list: block_list]) |> to_string() + + defp deserialize_block_list(xml_body) do + deserialize_block = fn node -> + %{ + name: node |> xpath(~x"./Name/text()"s), + size: + node + |> xpath( + ~x"./Size/text()"s + |> transform_by(fn t -> t |> Integer.parse() |> elem(0) end) + ) + } + end + + %{ + committed_blocks: + xml_body + |> xpath(~x"/BlockList/CommittedBlocks/Block"l) + |> Enum.map(deserialize_block), + uncommitted_blocks: + xml_body + |> xpath(~x"/BlockList/UncommittedBlocks/Block"l) + |> Enum.map(deserialize_block) + } + end + + def get_block_list( + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + }, + block_list_type \\ :all, + snapshot \\ nil + ) + when block_list_type in [:all, :committed, :uncommitted] do + # https://docs.microsoft.com/en-us/rest/api/storageservices/get-block-list + + response = + context + |> new_azure_storage_request() + |> method(:get) + |> url("/#{container_name}/#{blob_name}") + |> add_param(:query, :comp, "blocklist") + |> add_param(:query, :blocklisttype, block_list_type |> Atom.to_string()) + |> add_param_if(snapshot != nil, :query, :snapshot, snapshot) + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 200} -> + {:ok, + response + |> create_success_response() + |> Map.merge(response.body |> deserialize_block_list())} + end + end + + def get_blob(blob, opts \\ []) + + def get_blob( + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + }, + _opts + ) do + response = + context + |> new_azure_storage_request() + |> method(:get) + |> url("/#{container_name}/#{blob_name}") + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 200} -> + {:ok, response |> create_success_response()} + end + end + + def get_blob_properties(%__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + }) do + response = + context + |> new_azure_storage_request() + |> method(:head) + |> url("/#{container_name}/#{blob_name}") + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 200} -> + {:ok, + response + |> create_success_response() + |> Map.put(:properties, response.headers |> BlobProperties.deserialise())} + end + end + + @allowed_set_blob_headers [ + "x-ms-blob-cache-control", + "x-ms-blob-content-type", + "x-ms-blob-content-md5", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-disposition" + ] + + @doc """ + Sets blob properties. + + Follows the same behaviour as the underlying REST API where setting one property will also + implicitly set others to nil, unless you explitly set them in this request. + + See + """ + def set_blob_properties( + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + }, + %BlobProperties{} = blob_properties + ) do + headers = + blob_properties + |> BlobProperties.serialise() + |> Enum.map(&transform_set_blob_property_header/1) + |> Enum.filter(fn {header, _value} -> Enum.member?(@allowed_set_blob_headers, header) end) + + response = + context + |> new_azure_storage_request() + |> method(:put) + |> url("/#{container_name}/#{blob_name}") + |> add_param(:query, :comp, "properties") + |> add_headers(headers) + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 200} -> + {:ok, response |> create_success_response()} + end + end + + @doc """ + Updates blob properties. + + Similar to `set_blob_properties/2` but keeps existing values by first performing + `get_blob_properties/2`, merging the result and handing it over to `set_blob_properties/2`. + """ + def update_blob_properties(blob, blob_properties) do + with {:ok, %{properties: existing_blob_properties}} <- blob |> get_blob_properties() do + merged_properties = Map.merge(existing_blob_properties, blob_properties) + blob |> set_blob_properties(merged_properties) + end + end + + defp transform_set_blob_property_header({"cache-control", value}), + do: {"x-ms-blob-cache-control", value} + + defp transform_set_blob_property_header({"content-type", value}), + do: {"x-ms-blob-content-type", value} + + defp transform_set_blob_property_header({"content-md5", value}), + do: {"x-ms-blob-content-md5", value} + + defp transform_set_blob_property_header({"content-encoding", value}), + do: {"x-ms-blob-content-encoding", value} + + defp transform_set_blob_property_header({"content-language", value}), + do: {"x-ms-blob-content-language", value} + + defp transform_set_blob_property_header({"content-disposition", value}), + do: {"x-ms-blob-content-disposition", value} + + defp transform_set_blob_property_header(header), do: header + + def put_blob( + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + }, + blob_data, + opts \\ [] + ) do + opts = + opts + |> Keyword.put(:blob_type, "BlockBlob") + + response = + context + |> new_azure_storage_request() + |> method(:put) + |> url("/#{container_name}/#{blob_name}") + |> body(blob_data) + |> add_headers_from_opts(opts) + |> add_header_content_md5() + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 201} -> + {:ok, response |> create_success_response()} + end + end + + defp add_headers(request, headers) do + Enum.reduce(headers, request, fn {k, v}, request -> request |> add_header(k, v) end) + end + + defp add_headers_from_opts(request, opts) do + Enum.reduce(opts, request, fn {key, value}, request -> + request |> add_header(header_for_opt(key), value) + end) + end + + defp header_for_opt(:blob_type), do: "x-ms-blob-type" + defp header_for_opt(:copy_source), do: "x-ms-copy-source" + defp header_for_opt(:content_type), do: "x-ms-blob-content-type" + defp header_for_opt(:content_disposition), do: "x-ms-blob-content-disposition" + defp header_for_opt(:content_encoding), do: "x-ms-blob-content-encoding" + + def put_blob_from_url( + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + } = blob, + url, + opts \\ [] + ) do + opts = + opts + |> Keyword.put(:blob_type, "BlockBlob") + |> Keyword.put(:copy_source, url) + + {content_opts, opts} = + opts + |> Keyword.split([:content_type, :content_encoding, :content_disposition, :content_language]) + + {content_type_workaround_enabled, opts} = opts |> Keyword.pop(:content_type_workaround, false) + + response = + context + |> new_azure_storage_request() + |> method(:put) + |> url("/#{container_name}/#{blob_name}") + |> add_headers_from_opts(opts) + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 201} -> + with {:ok, _response} <- + workaround_for_put_blob_from_url( + blob, + url, + content_opts, + content_type_workaround_enabled + ) do + {:ok, response |> create_success_response()} + end + end + end + + # Workaround for a bug in Azure Storage where original content-type is lost on put_blob_from_url + # requests https://github.com/joeapearson/elixir-azure/issues/2 + defp workaround_for_put_blob_from_url(_blob, _url, _content_opts, false) do + unless suppress_workaround_for_put_blob_from_url_warning?() do + Logger.warning(""" + Your blob's content-* metadata may not have been correctly copied. + + Set `content_type_workaround: true` when calling `Blob.put_blob_from_url/2` to work around. + + See https://github.com/joeapearson/elixir-azure/issues/2 + """) + end + + {:ok, nil} + end + + defp workaround_for_put_blob_from_url(blob, url, [], true) do + # In this case we have to do the work of finding out what the original source content-type was + # and then setting it on the blob. Results in many requests and accordingly is less reliable. + + with {:ok, %{status: 200, headers: source_headers}} <- Tesla.head(url) do + blob_properties = BlobProperties.deserialise(source_headers) + update_blob_properties(blob, blob_properties) + end + end + + defp workaround_for_put_blob_from_url(blob, _url, content_type_attrs, true) do + blob |> update_blob_properties(struct!(BlobProperties, content_type_attrs)) + end + + defp suppress_workaround_for_put_blob_from_url_warning? do + Keyword.get(config(), :suppress_workaround_for_put_blob_from_url_warning?, false) + end + + @spec upload_file(Container.t(), String.t(), String.t() | nil, map | nil) :: + {:ok, map} | {:error, map} + def upload_file( + container, + source_path, + blob_name \\ nil, + blob_properties \\ nil + ) + + def upload_file( + %Container{} = container, + source_path, + blob_name, + blob_properties + ) + when is_map(blob_properties) do + headers = + BlobProperties + |> struct(blob_properties) + |> BlobProperties.serialise() + |> Enum.map(&transform_set_blob_property_header/1) + |> Enum.filter(fn {header, _value} -> Enum.member?(@allowed_set_blob_headers, header) end) + + container + |> to_blob(source_path, blob_name) + |> upload_async(source_path, headers) + end + + def upload_file(%Container{} = container, source_path, blob_name, nil) do + container + |> to_blob(source_path, blob_name) + |> upload_async(source_path, []) + end + + defp to_blob(container, source_path, nil) do + target_filename = + source_path + |> Path.basename() + |> URI.encode() + + to_blob(container, source_path, target_filename) + end + + defp to_blob(container, _source_filename, target_filename) do + __MODULE__.new(container, target_filename) + end + + defp upload_async(blob, filename, headers) do + blob + |> upload_stream(filename) + |> stream_to_block_ids() + |> case do + {:error, _reason} = err -> + err + + {:ok, ids} -> + commit_block_ids(blob, ids, headers) + end + end + + defp upload_stream(blob, filename) do + filename + |> File.stream!([], @max_block_size) + |> Stream.zip(1..@max_number_of_blocks) + |> Task.async_stream( + fn {content, i} -> + block_id = to_block_id(i) + + case put_block(blob, block_id, content) do + {:ok, _} -> + block_id + + {:error, _resp} = error -> + error + end + end, + max_concurrency: @max_concurrency, + ordered: true, + timeout: :infinity + ) + |> Enum.to_list() + end + + defp stream_to_block_ids(results) do + Enum.reduce_while(results, {:ok, []}, fn + {_, {:error, reason}}, {_status, _ids} -> + {:halt, {:error, reason}} + + {_, id}, {status, ids} -> + {:cont, {status, [id | ids]}} + end) + end + + defp commit_block_ids(blob, ids, headers) do + block_ids = + 1..@max_number_of_blocks + |> Enum.map(&to_block_id/1) + |> Enum.filter(&(&1 in ids)) + + put_block_list(blob, block_ids, headers) + end + + def delete_blob( + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + }, + opts \\ [] + ) do + # https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + + %{snapshot: snapshot, timeout: timeout} = + case [snapshot: nil, timeout: -1] + |> Keyword.merge(opts) + |> Enum.into(%{}) do + %{snapshot: snapshot, timeout: timeout} -> %{snapshot: snapshot, timeout: timeout} + end + + response = + context + |> new_azure_storage_request() + |> method(:delete) + |> url("/#{container_name}/#{blob_name}") + |> add_param_if(snapshot != nil, :query, :snapshot, snapshot) + |> add_param_if(timeout > 0, :query, :timeout, timeout) + |> sign_and_call(:blob_service) + + case response do + %{status: status} when 400 <= status and status < 500 -> + {:error, response |> create_error_response()} + + %{status: 202} -> + {:ok, response |> create_success_response()} + end + end + + def copy_stream( + %__MODULE__{} = source, + %__MODULE__{ + container: %Container{storage_context: context, container_name: container_name}, + blob_name: blob_name + } = target, + opts \\ [] + ) do + opts = + opts + |> Keyword.put(:copy_source, url(source)) + + poll_interval = Keyword.get(opts, :poll_interval, 5000) + + Stream.resource( + fn -> + context + |> new_azure_storage_request() + |> method(:put) + |> url("/#{container_name}/#{blob_name}") + |> add_headers_from_opts(opts) + |> sign_and_call(:blob_service) + end, + fn + nil -> + :timer.sleep(poll_interval) + {[get_blob_properties(target)], nil} + + %{status: status} = response when 400 <= status and status < 500 -> + {[{:error, response |> create_error_response()}], nil} + + %{status: status} = response when status < 300 -> + {[{:ok, response |> create_success_response()}], nil} + end, + fn _ -> nil end + ) + |> Stream.flat_map(fn + {:ok, %{x_ms_copy_status: status}} = result + when status != "success" and status != "failed" -> + [result] + + result -> + [result, :halt] + end) + |> Stream.take_while(fn + :halt -> false + _ -> true + end) + end + + def copy( + %__MODULE__{} = source, + %__MODULE__{} = target, + opts \\ [] + ) do + copy_stream(source, target, opts) + |> Enum.reduce(nil, fn result, _ -> result end) + end + + def url(%__MODULE__{ + container: %Container{ + storage_context: context, + container_name: container + }, + blob_name: blob_name + }), + do: ExMicrosoftAzureStorage.Storage.endpoint_url(context, :blob_service) <> "/#{container}/#{blob_name}" + + defp config, do: Application.get_env(:azure, __MODULE__, []) +end diff --git a/lib/microsoft/azure/storage/blob_policy.ex b/lib/storage/blob_policy.ex similarity index 77% rename from lib/microsoft/azure/storage/blob_policy.ex rename to lib/storage/blob_policy.ex index b3585b9..7b680a9 100644 --- a/lib/microsoft/azure/storage/blob_policy.ex +++ b/lib/storage/blob_policy.ex @@ -1,7 +1,11 @@ -defmodule Microsoft.Azure.Storage.BlobPolicy do +defmodule ExMicrosoftAzureStorage.Storage.BlobPolicy do + @moduledoc """ + BlobPolicy + """ + import SweetXml - import Microsoft.Azure.Storage.DateTimeUtils - import Microsoft.Azure.Storage.Utilities, only: [set_to_string: 2, string_to_set: 2] + import ExMicrosoftAzureStorage.Storage.DateTimeUtils + import ExMicrosoftAzureStorage.Storage.Utilities, only: [set_to_string: 2, string_to_set: 2] require EEx defstruct [:id, :start, :expiry, :permission] @@ -38,9 +42,9 @@ defmodule Microsoft.Azure.Storage.BlobPolicy do <%= policy.id %> - <%= policy.start |> Microsoft.Azure.Storage.DateTimeUtils.to_string_iso8601() %> - <%= policy.expiry |> Microsoft.Azure.Storage.DateTimeUtils.to_string_iso8601() %> - <%= policy.permission |> Microsoft.Azure.Storage.BlobPolicy.permission_serialize() %> + <%= policy.start |> ExMicrosoftAzureStorage.Storage.DateTimeUtils.to_string_iso8601() %> + <%= policy.expiry |> ExMicrosoftAzureStorage.Storage.DateTimeUtils.to_string_iso8601() %> + <%= policy.permission |> ExMicrosoftAzureStorage.Storage.BlobPolicy.permission_serialize() %> Date <% end %> @@ -48,7 +52,7 @@ defmodule Microsoft.Azure.Storage.BlobPolicy do """ def serialize(policies) when is_list(policies), - do: @template |> EEx.eval_string(assigns: [policies: policies]) + do: @template |> EEx.eval_string(assigns: [policies: policies]) |> Kernel.to_string() # def serialize(policies) when is_list(policies) do # inner_xml = diff --git a/lib/storage/blob_properties.ex b/lib/storage/blob_properties.ex new file mode 100644 index 0000000..84d8e57 --- /dev/null +++ b/lib/storage/blob_properties.ex @@ -0,0 +1,179 @@ +defmodule ExMicrosoftAzureStorage.Storage.BlobProperties do + @moduledoc """ + Blob properties. + """ + + import ExMicrosoftAzureStorage.Storage.DateTimeUtils + import ExMicrosoftAzureStorage.Storage.Utilities + + defstruct [ + :last_modified, + :creation_time, + :tag_count, + :blob_type, + :copy_completion_time, + :copy_status_description, + :copy_id, + :copy_progress, + :copy_source, + :copy_status, + :incremental_copy, + :copy_destination_snapshot, + :lease_duration, + :lease_state, + :lease_status, + :content_length, + :content_type, + :etag, + :content_md5, + :content_encoding, + :content_language, + :content_disposition, + :cache_control, + :blob_sequence_number, + :accept_ranges, + :blob_committed_block_count, + :blob_server_encrypted, + :encryption_key_sha256, + :encryption_scope, + :access_tier, + :access_tier_inferred, + :archive_status, + :access_tier_change_time, + :rehydrate_priority, + :last_access_time, + :blob_sealed + ] + + @type t() :: %__MODULE__{ + last_modified: DateTime.t(), + creation_time: DateTime.t(), + tag_count: integer(), + blob_type: String.t(), + copy_completion_time: DateTime.t(), + copy_status_description: String.t(), + copy_id: String.t(), + copy_progress: String.t(), + copy_source: String.t(), + copy_status: String.t(), + incremental_copy: boolean(), + copy_destination_snapshot: DateTime.t(), + lease_duration: String.t(), + lease_state: String.t(), + lease_status: String.t(), + content_length: non_neg_integer(), + content_type: String.t(), + etag: String.t(), + content_md5: String.t(), + content_encoding: String.t(), + content_language: String.t(), + content_disposition: String.t(), + cache_control: String.t(), + blob_sequence_number: non_neg_integer(), + accept_ranges: String.t(), + blob_committed_block_count: non_neg_integer(), + blob_server_encrypted: boolean(), + encryption_key_sha256: String.t(), + encryption_scope: String.t(), + access_tier: String.t(), + access_tier_inferred: boolean(), + archive_status: String.t(), + access_tier_change_time: DateTime.t(), + rehydrate_priority: String.t(), + last_access_time: DateTime.t(), + blob_sealed: boolean() + } + + @type headers() :: [{String.t(), String.t()}] + + @headers [ + # { header, key, format} + {"last-modified", :last_modified, :rfc1123_datetime}, + {"x-ms-creation-time", :creation_time, :rfc1123_datetime}, + {"x-ms-tag-count", :tag_count, :integer}, + {"x-ms-blob-type", :blob_type, :string}, + {"x-ms-copy-completion-time", :copy_completion_time, :rfc1123_datetime}, + {"x-ms-copy-status-description", :copy_status_description, :string}, + {"x-ms-copy-id", :copy_id, :string}, + {"x-ms-copy-progress", :copy_progress, :string}, + {"x-ms-copy-source", :copy_source, :string}, + {"x-ms-copy-status", :copy_status, :string}, + {"x-ms-incremental_copy", :incremental_copy, :boolean}, + {"x-ms-copy-destination-snapshot", :copy_destination_snapshot, :rfc1123_datetime}, + {"x-ms-lease-duration", :lease_duration, :string}, + {"x-ms-lease-state", :lease_state, :string}, + {"x-ms-lease-status", :lease_status, :string}, + {"content-length", :content_length, :integer}, + {"content-type", :content_type, :string}, + {"etag", :etag, :string}, + {"content-md5", :content_md5, :string}, + {"content-encoding", :content_encoding, :string}, + {"content-language", :content_language, :string}, + {"content-disposition", :content_disposition, :string}, + {"cache-control", :cache_control, :string}, + {"x-ms-blob-sequence-number", :blob_sequence_number, :integer}, + {"accept-ranges", :accept_ranges, :string}, + {"x-ms-blob-committed-block-count", :blob_committed_block_count, :integer}, + {"x-ms-blob-server-encrypted", :blob_server_encrypted, :boolean}, + {"x-ms-encryption-key-sha256", :encryption_key_sha256, :string}, + {"x-ms-encryption-scope", :encryption_scope, :string}, + {"x-ms-access-tier", :access_tier, :string}, + {"x-ms-access-tier-inferred", :access_tier_inferred, :boolean}, + {"x-ms-archive-status", :archive_status, :boolean}, + {"x-ms-access-tier-change-time", :access_tier_change_time, :rfc1123_datetime}, + {"x-ms-rehydrate-priority", :rehydrate_priority, :string}, + {"x-ms-last-access-time", :last_access_time, :rfc1123_datetime}, + {"x-ms-blob-sealed", :blob_sealed, :boolean} + ] + + @doc """ + Serialises a `BlobProperties` into headers as a list of key / value tuples. + """ + @spec serialise(properties :: __MODULE__.t()) :: headers() + def serialise(%__MODULE__{} = properties) do + @headers + |> Enum.reduce([], fn {header, key, type}, acc -> + case Map.get(properties, key) do + nil -> + acc + + value -> + [{header, encode(value, type)} | acc] + end + end) + end + + @doc """ + Accepts a list of key value tuples and converts them into a `BlobProperties` struct + """ + @spec deserialise(headers :: headers()) :: __MODULE__.t() + def deserialise(headers) do + attrs = + @headers + |> Enum.reduce(%{}, fn + {header, key, type}, acc -> + value = headers |> header(header) |> decode(type) + acc |> Map.put(key, value) + end) + + struct!(__MODULE__, attrs) + end + + defp header(headers, header) do + case List.keyfind(headers, header, 0) do + nil -> nil + {^header, value} -> value + end + end + + defp encode(value, :rfc1123_datetime), do: value |> to_string_rfc1123() + defp encode(value, :integer), do: value |> Integer.to_string() + defp encode(value, :boolean), do: value |> to_string() + defp encode(value, _format), do: value + + defp decode(nil, _decoder), do: nil + defp decode(value, :rfc1123_datetime), do: value |> date_parse_rfc1123() + defp decode(value, :integer), do: value |> String.to_integer() + defp decode(value, :boolean), do: value |> to_bool() + defp decode(value, _format), do: value +end diff --git a/lib/microsoft/azure/storage/blob_storage.ex b/lib/storage/blob_storage.ex similarity index 86% rename from lib/microsoft/azure/storage/blob_storage.ex rename to lib/storage/blob_storage.ex index 701d6fb..ee3dc1b 100644 --- a/lib/microsoft/azure/storage/blob_storage.ex +++ b/lib/storage/blob_storage.ex @@ -1,26 +1,31 @@ -defmodule Microsoft.Azure.Storage.BlobStorage do - use NamedArgs +defmodule ExMicrosoftAzureStorage.Storage.BlobStorage do + @moduledoc """ + BlobStorage + """ import SweetXml - import Microsoft.Azure.Storage.RequestBuilder + import ExMicrosoftAzureStorage.Storage.RequestBuilder + import ExMicrosoftAzureStorage.Storage.Utilities, only: [to_bool: 1] - alias Microsoft.Azure.Storage alias __MODULE__.ServiceProperties + alias ExMicrosoftAzureStorage.Storage defmodule Responses do - import Microsoft.Azure.Storage.RequestBuilder + @moduledoc false + import ExMicrosoftAzureStorage.Storage.RequestBuilder - def get_blob_service_stats_response(), - do: [ + def get_blob_service_stats_response do + [ geo_replication: [ ~x"/StorageServiceStats/GeoReplication", status: ~x"./Status/text()"s, last_sync_time: ~x"./LastSyncTime/text()"s ] ] + end - def get_blob_service_properties_response(), - do: [ + def get_blob_service_properties_response do + [ logging: [ ~x"/StorageServiceProperties/Logging", version: ~x"./Version/text()"s, @@ -74,12 +79,15 @@ defmodule Microsoft.Azure.Storage.BlobStorage do days: ~x"./Days/text()"I ] ] + end end defmodule ServiceProperties do + @moduledoc false + import SweetXml import XmlBuilder - import Microsoft.Azure.Storage.RequestBuilder + import ExMicrosoftAzureStorage.Storage.RequestBuilder alias __MODULE__.{Logging, RetentionPolicy, Metrics, CorsRule} @@ -98,9 +106,11 @@ defmodule Microsoft.Azure.Storage.BlobStorage do |> Map.update!(:hour_metrics, &Metrics.to_struct/1) |> Map.update!(:minute_metrics, &Metrics.to_struct/1) |> Map.update!(:delete_retention_policy, &RetentionPolicy.to_struct/1) + |> Map.update!(:cors_rules, &CorsRule.to_struct/1) end defmodule Logging do + @moduledoc false defstruct [:version, :delete, :read, :write, :retention_policy] def to_struct(data) do @@ -129,7 +139,10 @@ defmodule Microsoft.Azure.Storage.BlobStorage do end defmodule RetentionPolicy do + @moduledoc false defstruct [:enabled, :days] + + def to_struct(nil), do: %__MODULE__{enabled: false, days: 0} def to_struct(data), do: struct(__MODULE__, data) end @@ -143,6 +156,7 @@ defmodule Microsoft.Azure.Storage.BlobStorage do end defmodule Metrics do + @moduledoc false defstruct [:version, :enabled, :include_apis, :retention_policy] def to_struct(data) do @@ -190,6 +204,7 @@ defmodule Microsoft.Azure.Storage.BlobStorage do end defmodule CorsRule do + @moduledoc false defstruct [ :max_age_in_seconds, :allowed_origins, @@ -198,6 +213,7 @@ defmodule Microsoft.Azure.Storage.BlobStorage do :allowed_headers ] + def to_struct(data) when is_list(data), do: data |> Enum.map(&to_struct/1) def to_struct(data), do: struct(__MODULE__, data) end @@ -242,15 +258,15 @@ defmodule Microsoft.Azure.Storage.BlobStorage do ]}) end - def parse(xml), - do: - xml - |> xmap(__MODULE__.storage_service_properties_parser()) - |> Map.get(:storage_service_properties) - |> __MODULE__.to_struct() + def parse(xml) do + xml + |> xmap(__MODULE__.storage_service_properties_parser()) + |> Map.get(:storage_service_properties) + |> __MODULE__.to_struct() + end - def storage_service_properties_parser(), - do: [ + def storage_service_properties_parser do + [ storage_service_properties: [ ~x"/StorageServiceProperties", logging: [ @@ -300,19 +316,23 @@ defmodule Microsoft.Azure.Storage.BlobStorage do ~x"./AllowedHeaders/text()"s |> transform_by(&(&1 |> String.split(","))) ], default_service_version: ~x"/StorageServiceProperties/DefaultServiceVersion/text()"s, + # delete_retention_policy is not present in responses from Azurite (the storage simulator) + # so we have to make this property optional with the `o` modifier passed to `~x`. delete_retention_policy: [ - ~x"./DeleteRetentionPolicy", + ~x"./DeleteRetentionPolicy"o, enabled: ~x"./Enabled/text()"s |> transform_by(&to_bool/1), days: ~x"./Days/text()"I ] ] ] + end end - def get_blob_service_stats(context = %Storage{}) do + def get_blob_service_stats(%Storage{} = context) do # https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats response = context + |> Storage.secondary() |> new_azure_storage_request() |> method(:get) |> url("/") @@ -337,7 +357,7 @@ defmodule Microsoft.Azure.Storage.BlobStorage do end end - def get_blob_service_properties(context = %Storage{}) do + def get_blob_service_properties(%Storage{} = context) do # https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties response = context @@ -353,17 +373,19 @@ defmodule Microsoft.Azure.Storage.BlobStorage do {:error, response |> create_error_response()} %{status: 200} -> + {_header, request_id} = response.headers |> List.keyfind("x-ms-request-id", 0) + {:ok, %{} |> Map.put(:service_properties, ServiceProperties.parse(response.body)) |> Map.put(:headers, response.headers) |> Map.put(:url, response.url) |> Map.put(:status, response.status) - |> Map.put(:request_id, response.headers["x-ms-request-id"])} + |> Map.put(:request_id, request_id)} end end - def set_blob_service_properties(context = %Storage{}, service_properties = %ServiceProperties{}) do + def set_blob_service_properties(%Storage{} = context, %ServiceProperties{} = service_properties) do # https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties response = context @@ -375,7 +397,7 @@ defmodule Microsoft.Azure.Storage.BlobStorage do |> add_header("Content-Type", "application/xml") |> body( service_properties - |> Microsoft.Azure.Storage.BlobStorage.ServiceProperties.xml_blob_service_properties() + |> ExMicrosoftAzureStorage.Storage.BlobStorage.ServiceProperties.xml_blob_service_properties() |> XmlBuilder.generate(format: :none) ) |> sign_and_call(:blob_service) diff --git a/lib/storage/connection_string.ex b/lib/storage/connection_string.ex new file mode 100644 index 0000000..5938ed1 --- /dev/null +++ b/lib/storage/connection_string.ex @@ -0,0 +1,34 @@ +defmodule ExMicrosoftAzureStorage.Storage.ConnectionString do + @moduledoc """ + ExMicrosoftAzureStorage Storage connection string utilities. + """ + + @doc """ + Parses an ExMicrosoftAzureStorage storage connection string into a plain map. + + Keys are normalised into lower case with underscores for convenience. + """ + @spec parse(connection_string :: String.t()) :: map() + def parse(connection_string) do + connection_string + |> String.split(";") + |> Enum.reduce(%{}, fn key_value_string, acc -> + {key, value} = parse_connection_string_item(key_value_string) + + Map.put(acc, key, value) + end) + end + + defp parse_connection_string_item(item) do + # The value part of the item can contain `=` (esp the account key which is base64-encoded), so + # `parts: 2` is essential. + [k, v] = item |> String.split("=", parts: 2) + + {key_for(k), v} + end + + defp key_for("DefaultEndpointsProtocol"), do: :default_endpoints_protocol + defp key_for("AccountName"), do: :account_name + defp key_for("AccountKey"), do: :account_key + defp key_for("EndpointSuffix"), do: :endpoint_suffix +end diff --git a/lib/microsoft/azure/storage/container.ex b/lib/storage/container.ex similarity index 87% rename from lib/microsoft/azure/storage/container.ex rename to lib/storage/container.ex index cd288c9..dfcd149 100644 --- a/lib/microsoft/azure/storage/container.ex +++ b/lib/storage/container.ex @@ -1,20 +1,30 @@ -defmodule Microsoft.Azure.Storage.Container do +defmodule ExMicrosoftAzureStorage.Storage.Container do + @moduledoc """ + Container + """ + import SweetXml - import Microsoft.Azure.Storage.RequestBuilder + import ExMicrosoftAzureStorage.Storage.RequestBuilder + import ExMicrosoftAzureStorage.Storage.Utilities, only: [to_bool: 1] + + alias ExMicrosoftAzureStorage.Storage + alias ExMicrosoftAzureStorage.Storage.{BlobPolicy, DateTimeUtils} - alias Microsoft.Azure.Storage - alias Microsoft.Azure.Storage.{DateTimeUtils, BlobPolicy} + @type t :: %__MODULE__{container_name: String.t(), storage_context: map} @enforce_keys [:storage_context, :container_name] defstruct [:storage_context, :container_name] - def new(storage_context = %Storage{}, container_name) + def new(%Storage{} = storage_context, container_name) when is_binary(container_name), do: %__MODULE__{storage_context: storage_context, container_name: container_name} defmodule Responses do - def list_containers_response(), + @moduledoc false + def list_containers_response, do: [ + max_results: ~x"/EnumerationResults/MaxResults/text()"s, + next_marker: ~x"/EnumerationResults/NextMarker/text()"s, containers: [ ~x"/EnumerationResults/Containers/Container"l, name: ~x"./Name/text()"s, @@ -33,13 +43,18 @@ defmodule Microsoft.Azure.Storage.Container do ] ] - def list_blobs_response(), + def list_blobs_response, do: [ max_results: ~x"/EnumerationResults/MaxResults/text()"s, next_marker: ~x"/EnumerationResults/NextMarker/text()"s, blobs: [ ~x"/EnumerationResults/Blobs/Blob"l, name: ~x"./Name/text()"s, + tags: [ + ~x"./Tags/TagSet/Tag"l, + key: ~x"./Key/text()"s, + value: ~x"./Value/text()"s + ], properties: [ ~x"./Properties", etag: ~x"./Etag/text()"s, @@ -63,7 +78,7 @@ defmodule Microsoft.Azure.Storage.Container do ] end - def list_containers(context = %Storage{}) do + def list_containers(%Storage{} = context) do # https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 response = context @@ -105,6 +120,17 @@ defmodule Microsoft.Azure.Storage.Container do end end + @doc """ + Returns an existing container if found or creates a new one if not. + """ + def ensure_container(container) do + case create_container(container) do + {:ok, %{status: 201} = response} -> {:ok, response} + {:error, %{error_code: "ContainerAlreadyExists"} = response} -> {:ok, response} + other -> other + end + end + def get_container_properties(%__MODULE__{ storage_context: context, container_name: container_name @@ -176,13 +202,13 @@ defmodule Microsoft.Azure.Storage.Container do end end - def set_container_acl_public_access_off(container = %__MODULE__{}), + def set_container_acl_public_access_off(%__MODULE__{} = container), do: container |> set_container_acl(:off) - def set_container_acl_public_access_blob(container = %__MODULE__{}), + def set_container_acl_public_access_blob(%__MODULE__{} = container), do: container |> set_container_acl(:blob) - def set_container_acl_public_access_container(container = %__MODULE__{}), + def set_container_acl_public_access_container(%__MODULE__{} = container), do: container |> set_container_acl(:container) defp container_access_level_to_string(:off), do: nil diff --git a/lib/microsoft/azure/storage/container_lease.ex b/lib/storage/container_lease.ex similarity index 91% rename from lib/microsoft/azure/storage/container_lease.ex rename to lib/storage/container_lease.ex index 6fcb250..f57f499 100644 --- a/lib/microsoft/azure/storage/container_lease.ex +++ b/lib/storage/container_lease.ex @@ -1,6 +1,10 @@ -defmodule Microsoft.Azure.Storage.ContainerLease do - import Microsoft.Azure.Storage.RequestBuilder - alias Microsoft.Azure.Storage.{Container} +defmodule ExMicrosoftAzureStorage.Storage.ContainerLease do + @moduledoc """ + ContainerLease + """ + + import ExMicrosoftAzureStorage.Storage.RequestBuilder + alias ExMicrosoftAzureStorage.Storage.Container # "x-ms-lease-action" acquire/renew/change/release/break # "x-ms-lease-id" Required for renew/change/release @@ -41,7 +45,7 @@ defmodule Microsoft.Azure.Storage.ContainerLease do # AcquireLease TimeSpan? leaseTime, string proposedLeaseId def container_lease_acquire( - container = %Container{}, + %Container{} = container, lease_duration, proposed_lease_id \\ nil ) @@ -65,7 +69,7 @@ defmodule Microsoft.Azure.Storage.ContainerLease do # RenewLease def container_lease_renew( - container = %Container{}, + %Container{} = container, lease_id ) do # https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container @@ -86,7 +90,7 @@ defmodule Microsoft.Azure.Storage.ContainerLease do # BreakLease TimeSpan? breakPeriod def container_lease_break( - container = %Container{}, + %Container{} = container, lease_id, break_period \\ -1 ) @@ -115,7 +119,7 @@ defmodule Microsoft.Azure.Storage.ContainerLease do # ReleaseLease def container_lease_release( - container = %Container{}, + %Container{} = container, lease_id ) do # https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container#remarks @@ -136,7 +140,7 @@ defmodule Microsoft.Azure.Storage.ContainerLease do # ChangeLease string proposedLeaseId, def container_lease_change( - container = %Container{}, + %Container{} = container, lease_id, proposed_lease_id ) do diff --git a/lib/storage/crypto.ex b/lib/storage/crypto.ex new file mode 100644 index 0000000..30b0e95 --- /dev/null +++ b/lib/storage/crypto.ex @@ -0,0 +1,11 @@ +defmodule ExMicrosoftAzureStorage.Storage.Crypto do + @moduledoc """ + Provides backwards-compatible cryptographic functions. + """ + + if Code.ensure_loaded?(:crypto) and function_exported?(:crypto, :mac, 4) do + def hmac(digest, key, payload), do: :crypto.mac(:hmac, digest, key, payload) + else + def hmac(digest, key, payload), do: :crypto.hmac(digest, key, payload) + end +end diff --git a/lib/microsoft/azure/storage/date_time_utils.ex b/lib/storage/date_time_utils.ex similarity index 65% rename from lib/microsoft/azure/storage/date_time_utils.ex rename to lib/storage/date_time_utils.ex index e228c41..83ef3cd 100644 --- a/lib/microsoft/azure/storage/date_time_utils.ex +++ b/lib/storage/date_time_utils.ex @@ -1,14 +1,18 @@ -defmodule Microsoft.Azure.Storage.DateTimeUtils do +defmodule ExMicrosoftAzureStorage.Storage.DateTimeUtils do + @moduledoc """ + DateTimeUtils + """ + use Timex - def utc_now(), + def utc_now, # https://docs.microsoft.com/en-us/rest/api/storageservices/representation-of-date-time-values-in-headers do: Timex.now() |> Timex.format!("{RFC1123z}") |> String.replace(" Z", " GMT") - # "2019-02-05T16:43:10.4730000Z" |> Microsoft.Azure.Storage.DateTimeUtils.date_parse_iso8601() + # "2019-02-05T16:43:10.4730000Z" |> ExMicrosoftAzureStorage.Storage.DateTimeUtils.date_parse_iso8601() def date_parse_iso8601(date) do {:ok, result, 0} = date |> DateTime.from_iso8601() result @@ -23,15 +27,14 @@ defmodule Microsoft.Azure.Storage.DateTimeUtils do |> DateTime.to_iso8601() |> String.replace_trailing("Z", "0Z") - # "Tue, 05 Feb 2019 16:58:12 GMT" |> Microsoft.Azure.Storage.DateTimeUtils.date_parse_rfc1123() - def date_parse_rfc1123(str), - do: - str - |> Timex.parse!("{RFC1123}") + # "Tue, 05 Feb 2019 16:58:12 GMT" |> ExMicrosoftAzureStorage.Storage.DateTimeUtils.date_parse_rfc1123() + def date_parse_rfc1123(str) do + Timex.parse!(str, "{RFC1123}") + end - def to_string(timex_time), + def to_string_rfc1123(date_time), # https://docs.microsoft.com/en-us/rest/api/storageservices/representation-of-date-time-values-in-headers do: - timex_time + date_time |> Timex.format!("{WDshort}, {0D} {Mshort} {YYYY} {0h24}:{0m}:{0s} GMT") end diff --git a/lib/microsoft/azure/storage/queue.ex b/lib/storage/queue.ex similarity index 89% rename from lib/microsoft/azure/storage/queue.ex rename to lib/storage/queue.ex index e3ef416..547aa1e 100644 --- a/lib/microsoft/azure/storage/queue.ex +++ b/lib/storage/queue.ex @@ -1,11 +1,14 @@ -defmodule Microsoft.Azure.Storage.Queue do - use NamedArgs +defmodule ExMicrosoftAzureStorage.Storage.Queue do + @moduledoc """ + Queue + """ + use Timex import SweetXml - import Microsoft.Azure.Storage.RequestBuilder - alias Microsoft.Azure.Storage - alias Microsoft.Azure.Storage.DateTimeUtils + import ExMicrosoftAzureStorage.Storage.RequestBuilder alias __MODULE__.Responses + alias ExMicrosoftAzureStorage.Storage + alias ExMicrosoftAzureStorage.Storage.DateTimeUtils @enforce_keys [:storage_context, :queue_name] defstruct [:storage_context, :queue_name] @@ -14,10 +17,11 @@ defmodule Microsoft.Azure.Storage.Queue do do: %__MODULE__{storage_context: storage_context, queue_name: queue_name} defmodule Responses do - alias Microsoft.Azure.Storage.DateTimeUtils + @moduledoc false + alias ExMicrosoftAzureStorage.Storage.DateTimeUtils - def put_message_response(), - do: [ + def put_message_response do + [ message_id: ~x"/QueueMessagesList/QueueMessage/MessageId/text()"s, pop_receipt: ~x"/QueueMessagesList/QueueMessage/PopReceipt/text()"s, insertion_time: @@ -30,9 +34,10 @@ defmodule Microsoft.Azure.Storage.Queue do ~x"/QueueMessagesList/QueueMessage/TimeNextVisible/text()"s |> transform_by(&DateTimeUtils.date_parse_rfc1123/1) ] + end - def get_message_response(), - do: [ + def get_message_response do + [ message_id: ~x"/QueueMessagesList/QueueMessage/MessageId/text()"s, pop_receipt: ~x"/QueueMessagesList/QueueMessage/PopReceipt/text()"s, insertion_time: @@ -49,6 +54,7 @@ defmodule Microsoft.Azure.Storage.Queue do ~x"/QueueMessagesList/QueueMessage/MessageText/text()"s |> transform_by(&Base.decode64!/1) ] + end end def create_queue(%__MODULE__{storage_context: context, queue_name: queue_name}, opts \\ []) do @@ -194,25 +200,15 @@ defmodule Microsoft.Azure.Storage.Queue do when is_binary(message) do # https://docs.microsoft.com/en-us/rest/api/storageservices/put-message + opts_with_default = + [visibilitytimeout: 0, messagettl: 0] + |> Keyword.merge(opts) + |> Enum.into(%{}) + %{ visibilitytimeout: visibilitytimeout, messagettl: messagettl - } = - case [visibilitytimeout: 0, messagettl: 0] - |> Keyword.merge(opts) - |> Enum.into(%{}) do - %{ - visibilitytimeout: visibilitytimeout, - messagettl: messagettl - } - when visibilitytimeout >= 0 and visibilitytimeout <= @seconds_7_days and - (messagettl == -1 or messagettl == 0 or - (messagettl >= 1 and messagettl <= @seconds_7_days)) -> - %{ - visibilitytimeout: visibilitytimeout, - messagettl: messagettl - } - end + } = visibility_timeout(opts_with_default) body = "#{message |> Base.encode64()}" @@ -237,6 +233,25 @@ defmodule Microsoft.Azure.Storage.Queue do end end + defp visibility_timeout(%{ + visibilitytimeout: visibilitytimeout, + messagettl: messagettl + }) + when visibilitytimeout >= 0 and visibilitytimeout <= @seconds_7_days and + (messagettl == -1 or messagettl == 0 or + (messagettl >= 1 and messagettl <= @seconds_7_days)) do + %{ + visibilitytimeout: visibilitytimeout, + messagettl: messagettl + } + end + + defp visibility_timeout(_) do + raise ArgumentError, + message: + "Invalid visibility timeout given it should be within the range of 0 - #{@seconds_7_days} seconds." + end + def get_message(%__MODULE__{storage_context: context, queue_name: queue_name}, opts \\ []) do # https://docs.microsoft.com/en-us/rest/api/storageservices/get-messages diff --git a/lib/microsoft/azure/storage/request_builder.ex b/lib/storage/request_builder.ex similarity index 75% rename from lib/microsoft/azure/storage/request_builder.ex rename to lib/storage/request_builder.ex index 9333607..275dc04 100644 --- a/lib/microsoft/azure/storage/request_builder.ex +++ b/lib/storage/request_builder.ex @@ -1,19 +1,27 @@ -defmodule Microsoft.Azure.Storage.RequestBuilder do +defmodule ExMicrosoftAzureStorage.Storage.RequestBuilder do + @moduledoc """ + RequestBuilder + """ + import SweetXml - alias Microsoft.Azure.Storage - alias Microsoft.Azure.Storage.{RestClient, ApiVersion, DateTimeUtils, Container} + import ExMicrosoftAzureStorage.Storage.Utilities, only: [to_bool: 1] + + alias ExMicrosoftAzureStorage.Storage + alias ExMicrosoftAzureStorage.Storage.{ApiVersion, Container, DateTimeUtils, RestClient} - def new_azure_storage_request(storage = %Storage{}), do: %{storage_context: storage} + defp json_library, do: Application.get_env(:azure, :json_library, Jason) + + def new_azure_storage_request(%Storage{} = storage), do: %{storage_context: storage} def method(request, m), do: request |> Map.put_new(:method, m) def url(request, u), do: request |> Map.put_new(:url, u) - def body(request, body), - do: - request - |> add_header("Content-Length", "#{body |> byte_size()}") - |> Map.put(:body, body) + def body(request, body) do + request + |> add_header("Content-Length", "#{body |> byte_size()}") + |> Map.put(:body, body) + end def add_header_content_md5(request) do body = request |> Map.get(:body) @@ -28,13 +36,16 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do # request |> Map.update!(:headers, &Map.merge(&1, headers)) def add_header(request = %{headers: headers}, k, v) when headers != nil, - do: request |> Map.put(:headers, headers |> Map.put(k, v)) + do: request |> Map.put(:headers, [{k, v} | headers]) + + def add_header(request, k, v), do: request |> Map.put(:headers, [{k, v}]) - def add_header(request, k, v), do: request |> Map.put(:headers, %{k => v}) + def has_header?(%{headers: headers}, k), do: List.keymember?(headers, k, 0) + def has_header?(_request, _k), do: false @prefix_x_ms_meta "x-ms-meta-" - def add_header_x_ms_meta(request, kvp = %{}), + def add_header_x_ms_meta(request, %{} = kvp), do: kvp |> Enum.reduce(request, fn {k, v}, r -> r |> add_header(@prefix_x_ms_meta <> k, v) end) @@ -68,7 +79,7 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do &Tesla.Multipart.add_field( &1, key, - Poison.encode!(value), + json_library().encode!(value), headers: [{:"Content-Type", "application/json"}] ) ) @@ -111,30 +122,28 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do defp primary(account_name), do: account_name |> String.replace("-secondary", "") - defp canonicalized_headers(headers = %{}), - do: - headers - |> Enum.into([]) - |> Enum.map(fn {k, v} -> {k |> String.downcase(), v} end) - |> Enum.filter(fn {k, _} -> k |> String.starts_with?("x-ms-") end) - |> Enum.sort() - |> Enum.map(fn {k, v} -> "#{k}:#{v}" end) - |> Enum.join("\n") + defp canonicalized_headers(headers) do + headers + |> Enum.map(fn {k, v} -> {k |> String.downcase(), v} end) + |> Enum.filter(fn {k, _} -> k |> String.starts_with?("x-ms-") end) + |> Enum.sort() + |> Enum.map_join("\n", fn {k, v} -> "#{k}:#{v}" end) + end - def remove_empty_headers(request = %{headers: headers = %{}}) do + def remove_empty_headers(request = %{headers: headers}) when is_list(headers) do new_headers = headers - |> Enum.into([]) |> Enum.filter(fn {_k, v} -> v != nil && String.length(v) > 0 end) - |> Enum.into(%{}) request |> Map.put(:headers, new_headers) end defp get_header(headers, name) do - headers - |> Map.get(name) + case for {k, v} <- headers, k == name, do: v do + [result] -> result + [] -> nil + end end defp protect( @@ -144,7 +153,7 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do method: method, url: url, query: query, - headers: headers = %{}, + headers: headers, storage_context: storage_context = %Storage{ is_development_factory: is_development_factory, @@ -154,7 +163,7 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do } ) when is_binary(account_key) and account_key != nil do - canonicalizedHeaders = headers |> canonicalized_headers() + canonicalized_headers = headers |> canonicalized_headers() url = case is_development_factory do @@ -162,7 +171,7 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do _ -> url end - canonicalizedResource = + canonicalized_resource = case query do [] -> "/#{storage_context.account_name |> primary()}#{url}" @@ -174,7 +183,7 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do |> Enum.map_join("\n", fn {k, v} -> "#{k}:#{v}" end)) end - stringToSign = + string_to_sign = [ method |> Atom.to_string() |> String.upcase(), headers |> get_header("Content-Encoding"), @@ -188,13 +197,13 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do headers |> get_header("If-None-Match"), headers |> get_header("If-Unmodified-Since"), headers |> get_header("Range"), - canonicalizedHeaders, - canonicalizedResource + canonicalized_headers, + canonicalized_resource ] |> Enum.join("\n") signature = - :crypto.hmac(:sha256, storage_context.account_key |> Base.decode64!(), stringToSign) + Storage.Crypto.hmac(:sha256, account_key |> Base.decode64!(), string_to_sign) |> Base.encode64() data @@ -205,10 +214,10 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do end defp protect( - request = %{ + %{ storage_context: %Storage{account_key: nil, aad_token_provider: aad_token_provider}, uri: uri - } + } = request ) do token = uri @@ -239,7 +248,10 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do uri |> RestClient.new() + add_content_type_header? = request.method == :put && !has_header?(request, "Content-Type") + request + |> add_header_if(add_content_type_header?, "Content-Type", "application/octet-stream") |> add_header("x-ms-date", DateTimeUtils.utc_now()) |> add_header("x-ms-version", ApiVersion.get_api_version(:storage)) |> remove_empty_headers() @@ -248,6 +260,7 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do |> protect() |> Enum.into([]) |> (&RestClient.request(connection, &1)).() + |> elem(1) end def add_missing(map, key, value) do @@ -257,14 +270,9 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do end end - def decode(%Tesla.Env{status: 200, body: body}), do: Poison.decode(body) - def decode(response), do: {:error, response} - def decode(%Tesla.Env{status: 200} = env, false), do: {:ok, env} - def decode(%Tesla.Env{status: 200, body: body}, struct), do: Poison.decode(body, as: struct) - def decode(response, _struct), do: {:error, response} - defmodule Responses do - def error_response(), + @moduledoc false + def error_response, do: [ error_code: ~x"/Error/Code/text()"s, error_message: ~x"/Error/Message/text()"s, @@ -275,19 +283,11 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do end def identity(x), do: x - def to_bool("true"), do: true - def to_bool("false"), do: false - def to_bool(_), do: false - - def to_integer!(x) do - {i, ""} = x |> Integer.parse() - i - end - def create_error_response(response = %{}) do + def create_error_response(%{} = response) do response |> create_success_response(xml_body_parser: &__MODULE__.Responses.error_response/0) - |> Map.update!(:error_message, &String.split(&1, "\n")) + |> Map.update(:error_message, "", &String.split(&1, "\n")) end def create_success_response(response, opts \\ []) do @@ -298,16 +298,20 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do |> Map.put(:body, response.body) |> copy_response_headers_into_map() |> copy_x_ms_meta_headers_into_map() - |> (fn response = %{body: body} -> - case opts |> Keyword.get(:xml_body_parser) do - nil -> - response - - xml_parser when is_function(xml_parser) -> - response - |> Map.merge(body |> xmap(xml_parser.())) - end - end).() + |> parse_body_and_update_response(opts) + end + + defp parse_body_and_update_response(%{body: ""} = response, _), do: response + + defp parse_body_and_update_response(%{body: body} = response, opts) do + case opts |> Keyword.get(:xml_body_parser) do + nil -> + response + + xml_parser when is_function(xml_parser) -> + response + |> Map.merge(body |> xmap(xml_parser.())) + end end @response_headers [ @@ -321,19 +325,19 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do {"x-ms-lease-state", :x_ms_lease_state}, {"x-ms-blob-type", :x_ms_blob_type}, {"x-ms-lease-status", :x_ms_lease_status}, - {"x-ms-request-server-encrypted", :x_ms_request_server_encrypted, &__MODULE__.to_bool/1}, + {"x-ms-request-server-encrypted", :x_ms_request_server_encrypted, &to_bool/1}, {"x-ms-delete-type-permanent", :x_ms_delete_type_permanent}, - {"x-ms-has-immutability-policy", :x_ms_has_immutability_policy, &__MODULE__.to_bool/1}, - {"x-ms-has-legal-hold", :x_ms_has_legal_hold, &__MODULE__.to_bool/1}, - {"x-ms-approximate-messages-count", :x_ms_approximate_messages_count, - &__MODULE__.to_integer!/1}, + {"x-ms-has-immutability-policy", :x_ms_has_immutability_policy, &to_bool/1}, + {"x-ms-has-legal-hold", :x_ms_has_legal_hold, &to_bool/1}, + {"x-ms-approximate-messages-count", :x_ms_approximate_messages_count, &String.to_integer/1}, {"x-ms-error-code", :x_ms_error_code}, {"x-ms-blob-public-access", :x_ms_blob_public_access, &Container.parse_access_level/1}, {"x-ms-blob-cache-control", :x_ms_blob_cache_control}, - {"x-ms-cache-control", :x_ms_cache_control} + {"x-ms-cache-control", :x_ms_cache_control}, + {"x-ms-copy-status", :x_ms_copy_status} ] - defp copy_response_headers_into_map(response = %{}) do + defp copy_response_headers_into_map(%{} = response) do Enum.reduce(@response_headers, response, fn x, response -> response |> copy_response_header_into_map(x) end) @@ -347,13 +351,9 @@ defmodule Microsoft.Azure.Storage.RequestBuilder do is_function(transform, 1) do http_header = http_header |> String.downcase() - if response.headers |> Map.has_key?(http_header) do - case response.headers[http_header] do - nil -> response - val -> response |> Map.put(key_to_set, val |> transform.()) - end - else - response + case get_header(response.headers, http_header) do + nil -> response + val -> response |> Map.put(key_to_set, val |> transform.()) end end diff --git a/lib/microsoft/azure/storage/rest_client.ex b/lib/storage/rest_client.ex similarity index 51% rename from lib/microsoft/azure/storage/rest_client.ex rename to lib/storage/rest_client.ex index b2ab32e..d90f65e 100644 --- a/lib/microsoft/azure/storage/rest_client.ex +++ b/lib/storage/rest_client.ex @@ -1,9 +1,15 @@ -defmodule Microsoft.Azure.Storage.RestClient do +defmodule ExMicrosoftAzureStorage.Storage.RestClient do + @moduledoc """ + RestClient + """ + use Tesla - adapter(:ibrowse) + adapter(Tesla.Adapter.Hackney, recv_timeout: 40_000) + + plug(Tesla.Middleware.Timeout, timeout: 40_000) - def proxy_middleware() do + def proxy_middleware do case System.get_env("http_proxy") do nil -> nil @@ -14,25 +20,26 @@ defmodule Microsoft.Azure.Storage.RestClient do proxy_cfg -> proxy_cfg |> String.split(":") - |> (fn [host, port] -> - {Tesla.Middleware.Opts, - [ - # https://github.com/cmullaparthi/ibrowse/wiki/ibrowse-API - proxy_host: host |> String.to_charlist(), - proxy_port: port |> Integer.parse() |> elem(0), - inactivity_timeout: 40_000 - ]} - end).() + |> proxy_configuration() end end + defp proxy_configuration([host, port]) do + {Tesla.Middleware.Opts, + [ + # https://github.com/cmullaparthi/ibrowse/wiki/ibrowse-API + proxy_host: host |> String.to_charlist(), + proxy_port: port |> Integer.parse() |> elem(0) + ]} + end + def new(base_url) when is_binary(base_url) do [ {Tesla.Middleware.BaseUrl, base_url}, proxy_middleware() ] |> Enum.filter(&(&1 != nil)) - |> Tesla.build_client() + |> Tesla.client() end def new(base_url, headers) when is_binary(base_url) and is_map(headers) do @@ -42,6 +49,6 @@ defmodule Microsoft.Azure.Storage.RestClient do proxy_middleware() ] |> Enum.filter(&(&1 != nil)) - |> Tesla.build_client() + |> Tesla.client() end end diff --git a/lib/storage/shared_access_signature.ex b/lib/storage/shared_access_signature.ex new file mode 100644 index 0000000..22029f1 --- /dev/null +++ b/lib/storage/shared_access_signature.ex @@ -0,0 +1,238 @@ +defmodule ExMicrosoftAzureStorage.Storage.SharedAccessSignature do + @moduledoc """ + SharedAccessSignature + """ + + alias ExMicrosoftAzureStorage.Storage + import ExMicrosoftAzureStorage.Storage.Utilities, only: [add_to: 3, set_to_string: 2] + + # https://docs.microsoft.com/en-us/rest/api/storageservices/delegating-access-with-a-shared-access-signature + # https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 + # https://github.com/chgeuer/private_gists/blob/76db1345142d25d3359af6ce4ba7b9eef1aeb769/azure/AccountSAS/AccountSas.cs + + defstruct [ + :service_version, + :target_scope, + :services, + :resource_type, + :permissions, + :start_time, + :expiry_time, + :canonicalized_resource, + :resource, + :ip_range, + :protocol, + :cache_control, + :content_disposition, + :content_encoding, + :content_language, + :content_type + ] + + def new, do: %__MODULE__{} + + def for_storage_account(%__MODULE__{target_scope: nil} = v), + do: v |> Map.put(:target_scope, :account) + + def for_blob_service(%__MODULE__{target_scope: nil} = v), do: v |> Map.put(:target_scope, :blob) + + def for_table_service(%__MODULE__{target_scope: nil} = v), + do: v |> Map.put(:target_scope, :table) + + def for_queue_service(%__MODULE__{target_scope: nil} = v), + do: v |> Map.put(:target_scope, :queue) + + # https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas#specifying-account-sas-parameters + @services_map %{blob: "b", queue: "q", table: "t", file: "f"} + def add_service_blob(%__MODULE__{target_scope: :account} = v), do: v |> add_to(:services, :blob) + + def add_service_queue(%__MODULE__{target_scope: :account} = v), + do: v |> add_to(:services, :queue) + + def add_service_table(%__MODULE__{target_scope: :account} = v), + do: v |> add_to(:services, :table) + + def add_service_file(%__MODULE__{target_scope: :account} = v), do: v |> add_to(:services, :file) + + @resource_types_map %{service: "s", object: "o", container: "c"} + def add_resource_type_service(%__MODULE__{target_scope: :account} = v), + do: v |> add_to(:resource_type, :service) + + def add_resource_type_container(%__MODULE__{target_scope: :account} = v), + do: v |> add_to(:resource_type, :container) + + def add_resource_type_object(%__MODULE__{target_scope: :account} = v), + do: v |> add_to(:resource_type, :object) + + @resource_map %{ + # https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#specifying-the-signed-resource-blob-service-only + container: "c", + blob: "b", + # https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#specifying-the-signed-resource-file-service-only + share: "s", + file: "f" + } + + def add_resource_blob_container(%__MODULE__{} = v), do: v |> add_to(:resource, :container) + + def add_resource_blob_blob(%__MODULE__{} = v), do: v |> add_to(:resource, :blob) + + @permissions_map %{ + read: "r", + write: "w", + delete: "d", + list: "l", + add: "a", + create: "c", + update: "u", + process: "p" + } + def add_permission_read(%__MODULE__{} = v), do: add_to(v, :permissions, :read) + def add_permission_write(%__MODULE__{} = v), do: add_to(v, :permissions, :write) + def add_permission_delete(%__MODULE__{} = v), do: add_to(v, :permissions, :delete) + def add_permission_list(%__MODULE__{} = v), do: add_to(v, :permissions, :list) + def add_permission_add(%__MODULE__{} = v), do: add_to(v, :permissions, :add) + def add_permission_create(%__MODULE__{} = v), do: add_to(v, :permissions, :create) + def add_permission_update(%__MODULE__{} = v), do: add_to(v, :permissions, :update) + def add_permission_process(%__MODULE__{} = v), do: add_to(v, :permissions, :process) + + def add_canonicalized_resource(%__MODULE__{} = v, resource_name) do + %{v | canonicalized_resource: resource_name} + end + + def as_time(t), do: t |> Timex.format!("{YYYY}-{0M}-{0D}T{0h24}:{0m}:{0s}Z") + + def service_version(%__MODULE__{} = v, service_version), + do: %{v | service_version: service_version} + + def start_time(%__MODULE__{} = v, start_time), do: %{v | start_time: start_time} + def expiry_time(%__MODULE__{} = v, expiry_time), do: %{v | expiry_time: expiry_time} + def resource(%__MODULE__{} = v, resource), do: %{v | resource: resource} + def ip_range(%__MODULE__{} = v, ip_range), do: %{v | ip_range: ip_range} + def protocol(%__MODULE__{} = v, protocol), do: %{v | protocol: protocol} + + def cache_control(%__MODULE__{} = v, cache_control), do: %{v | cache_control: cache_control} + + def content_disposition(%__MODULE__{} = v, content_disposition), + do: %{v | content_disposition: content_disposition} + + def content_encoding(%__MODULE__{} = v, content_encoding), + do: %{v | content_encoding: content_encoding} + + def content_language(%__MODULE__{} = v, content_language), + do: %{v | content_language: content_language} + + def content_type(%__MODULE__{} = v, content_type), do: %{v | content_type: content_type} + + def encode({:service_version, value}), do: {"sv", value} + def encode({:start_time, value}), do: {"st", value |> as_time()} + + def encode({:expiry_time, value}), do: {"se", value |> as_time()} + def encode({:canonicalized_resource, value}), do: {"cr", value} + def encode({:resource, value}), do: {"sr", value |> set_to_string(@resource_map)} + def encode({:ip_range, value}), do: {"sip", value} + def encode({:protocol, value}), do: {"spr", value} + def encode({:services, value}), do: {"ss", value |> set_to_string(@services_map)} + def encode({:resource_type, value}), do: {"srt", value |> set_to_string(@resource_types_map)} + def encode({:permissions, value}), do: {"sp", value |> set_to_string(@permissions_map)} + def encode({:cache_control, value}), do: {"rscc", value} + def encode({:content_disposition, value}), do: {"rscd", value} + def encode({:content_encoding, value}), do: {"rsce", value} + def encode({:content_language, value}), do: {"rscl", value} + def encode({:content_type, value}), do: {"rsct", value} + def encode(_), do: {nil, nil} + + # https://docs.microsoft.com/en-us/rest/api/storageservices/create-service-sas#version-2018-11-09-and-later + # StringToSign = signedPermissions + "\n" + + # signedStart + "\n" + + # signedExpiry + "\n" + + # canonicalizedResource + "\n" + + # signedIdentifier + "\n" + + # signedIP + "\n" + + # signedProtocol + "\n" + + # signedVersion + "\n" + + # signedResource + "\n" + # signedSnapshotTime + "\n" + + # rscc + "\n" + + # rscd + "\n" + + # rsce + "\n" + + # rscl + "\n" + + # rsct + defp string_to_sign(values, _account_name, :blob) do + [ + # permissions + values |> Map.get("sp", ""), + # start date + values |> Map.get("st", ""), + # expiry date + values |> Map.get("se", ""), + # canonicalized resource + values |> Map.get("cr", ""), + # identifier + "", + # IP address + values |> Map.get("sip", ""), + # Protocol + values |> Map.get("spr", ""), + # Version + values |> Map.get("sv", ""), + # resource + values |> Map.get("sr"), + # snapshottime + "", + # rscc - Cache-Control + values |> Map.get("rscc", ""), + # rscd - Content-Disposition + values |> Map.get("rscd", ""), + # rsce - Content-Encoding + values |> Map.get("rsce", ""), + # rscl - Content-Language + values |> Map.get("rscl", ""), + # rsct - Content-Type + values |> Map.get("rsct", "") + ] + |> Enum.join("\n") + end + + defp string_to_sign(values, account_name, _) do + [ + account_name, + values |> Map.get("sp", ""), + values |> Map.get("ss", ""), + values |> Map.get("srt", ""), + values |> Map.get("st", ""), + values |> Map.get("se", ""), + values |> Map.get("sip", ""), + values |> Map.get("spr", ""), + values |> Map.get("sv", ""), + "" + ] + |> Enum.join("\n") + end + + def sign( + %__MODULE__{target_scope: target_scope} = sas, + %Storage{account_name: account_name, account_key: account_key} + ) + when is_atom(target_scope) and target_scope != nil do + # https://docs.microsoft.com/en-us/rest/api/storageservices/service-sas-examples + values = + sas + |> Map.from_struct() + |> Enum.filter(fn {_, val} -> val != nil end) + |> Enum.map(&__MODULE__.encode/1) + |> Enum.filter(fn {_, val} -> val != nil end) + |> Map.new() + + string_to_sign = string_to_sign(values, account_name, target_scope) + + signature = + Storage.Crypto.hmac(:sha256, account_key |> Base.decode64!(), string_to_sign) + |> Base.encode64() + + values + |> Map.put("sig", signature) + |> Map.drop(["cr"]) + |> URI.encode_query() + end +end diff --git a/lib/storage/storage.ex b/lib/storage/storage.ex new file mode 100644 index 0000000..a213432 --- /dev/null +++ b/lib/storage/storage.ex @@ -0,0 +1,95 @@ +defmodule ExMicrosoftAzureStorage.Storage do + @moduledoc """ + ExMicrosoftAzureStorage.Storage + """ + + alias ExMicrosoftAzureStorage.Storage.ConnectionString + + @derive {Inspect, except: [:account_key]} + @enforce_keys [] + defstruct account_name: nil, + account_key: nil, + host: nil, + aad_token_provider: nil, + endpoint_suffix: nil, + default_endpoints_protocol: "https", + is_development_factory: false + + @endpoint_names %{ + blob_service: "blob", + queue_service: "queue", + table_service: "table", + file_service: "file" + } + + @development_account_name "devstoreaccount1" + @development_account_key "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==" + + @doc """ + Creates a new `ExMicrosoftAzureStorage.Storage` struct from the specified `connection_string`. + + Your particular account connection string may be found in the Azure web portal. They take this form: + + DefaultEndpointsProtocol=https;AccountName=YOUR_ACCOUNT_NAME;AccountKey=YOUR_ACCOUNT_KEY;EndpointSuffix=core.windows.net + """ + def new(connection_string) when is_binary(connection_string) do + struct!(__MODULE__, ConnectionString.parse(connection_string)) + end + + @doc """ + Returns the storage context for the Azure storage emulator. + """ + def development_factory(host \\ "127.0.0.1") do + %__MODULE__{ + # https://docs.microsoft.com/en-us/azure/storage/common/storage-use-emulator#authenticating-requests-against-the-storage-emulator + account_name: @development_account_name, + account_key: @development_account_key, + host: host, + default_endpoints_protocol: "http", + is_development_factory: true + } + end + + @doc """ + Returns the storage context for a local Azure storage emulator. + """ + def emulator(host \\ "127.0.0.1"), do: development_factory(host) + + def secondary(%__MODULE__{is_development_factory: true} = context), do: context + + def secondary(%__MODULE__{} = context), + do: + context + |> Map.update!(:account_name, &(&1 <> "-secondary")) + + def endpoint_url(%__MODULE__{is_development_factory: true, host: host} = context, service) + when is_atom(service) do + port = + case service do + :blob_service -> 10_000 + :queue_service -> 10_001 + :table_service -> 10_002 + end + + %URI{ + scheme: default_endpoints_protocol(context), + host: host, + port: port, + path: "/" <> context.account_name + } + |> URI.to_string() + end + + def endpoint_url(%__MODULE__{} = context, service) when is_atom(service), + do: + %URI{scheme: default_endpoints_protocol(context), host: endpoint_hostname(context, service)} + |> URI.to_string() + + def endpoint_hostname(%__MODULE__{} = context, service) when is_atom(service), + do: "#{context.account_name}.#{@endpoint_names[service]}.#{context.endpoint_suffix}" + + def default_endpoints_protocol(%__MODULE__{ + default_endpoints_protocol: default_endpoints_protocol + }), + do: default_endpoints_protocol +end diff --git a/lib/microsoft/azure/storage/utilities.ex b/lib/storage/utilities.ex similarity index 51% rename from lib/microsoft/azure/storage/utilities.ex rename to lib/storage/utilities.ex index 556dda7..4709f00 100644 --- a/lib/microsoft/azure/storage/utilities.ex +++ b/lib/storage/utilities.ex @@ -1,16 +1,20 @@ -defmodule Microsoft.Azure.Storage.Utilities do - @doc ~S""" +defmodule ExMicrosoftAzureStorage.Storage.Utilities do + @moduledoc """ + Utilities + """ + + @doc """ Adds a value to a list, which is a value in a dictionary. ## Examples - iex> %{foo: nil} |> Microsoft.Azure.Storage.Utilities.add_to(:foo, :a) + iex> %{foo: nil} |> ExMicrosoftAzureStorage.Storage.Utilities.add_to(:foo, :a) %{foo: [:a]} - iex> %{foo: [:a]} |> Microsoft.Azure.Storage.Utilities.add_to(:foo, :b) + iex> %{foo: [:a]} |> ExMicrosoftAzureStorage.Storage.Utilities.add_to(:foo, :b) %{foo: [:b, :a]} - iex> %{foo: [:a]} |> Microsoft.Azure.Storage.Utilities.add_to(:foo, :b) |> Microsoft.Azure.Storage.Utilities.add_to(:foo, :c) + iex> %{foo: [:a]} |> ExMicrosoftAzureStorage.Storage.Utilities.add_to(:foo, :b) |> ExMicrosoftAzureStorage.Storage.Utilities.add_to(:foo, :c) %{foo: [:c, :b, :a]} """ def add_to(v = %{}, key, value) when is_atom(key) and is_atom(value), @@ -25,15 +29,15 @@ defmodule Microsoft.Azure.Storage.Utilities do end ) - @doc ~S""" + @doc """ Converts a list of atoms to a representative string, based on a mapping table. ## Examples - iex> [:read, :write] |> Microsoft.Azure.Storage.Utilities.set_to_string(%{read: "r", write: "w"}) + iex> [:read, :write] |> ExMicrosoftAzureStorage.Storage.Utilities.set_to_string(%{read: "r", write: "w"}) "rw" - iex> [:read, :write, :update] |> Microsoft.Azure.Storage.Utilities.set_to_string(%{read: "r", write: "w", create: "c"}) + iex> [:read, :write, :update] |> ExMicrosoftAzureStorage.Storage.Utilities.set_to_string(%{read: "r", write: "w", create: "c"}) "rw" """ def set_to_string(set, mapping) when is_list(set) and is_map(mapping), @@ -44,29 +48,29 @@ defmodule Microsoft.Azure.Storage.Utilities do |> Enum.filter(&(&1 != nil)) |> Enum.join("") - @doc ~S""" + @doc """ Reverses a map ## Examples - iex> %{read: "r", write: "w"} |> Microsoft.Azure.Storage.Utilities.reverse_map() + iex> %{read: "r", write: "w"} |> ExMicrosoftAzureStorage.Storage.Utilities.reverse_map() %{"r" => :read, "w" => :write} - iex> %{"r" => :read, "w" => :write} |> Microsoft.Azure.Storage.Utilities.reverse_map() + iex> %{"r" => :read, "w" => :write} |> ExMicrosoftAzureStorage.Storage.Utilities.reverse_map() %{write: "w", read: "r"} - iex> %{"r" => :read, "w" => :write} |> Microsoft.Azure.Storage.Utilities.reverse_map() + iex> %{"r" => :read, "w" => :write} |> ExMicrosoftAzureStorage.Storage.Utilities.reverse_map() %{read: "r", write: "w"} """ def reverse_map(mapping), do: mapping |> Enum.to_list() |> Enum.map(fn {k, v} -> {v, k} end) |> Map.new() - @doc ~S""" + @doc """ Converts a string with shortcuts back into a list of atoms. ## Examples - iex> "rw" |> Microsoft.Azure.Storage.Utilities.string_to_set(%{read: "r", write: "w", create: "c"}) + iex> "rw" |> ExMicrosoftAzureStorage.Storage.Utilities.string_to_set(%{read: "r", write: "w", create: "c"}) [:read, :write] """ def string_to_set(string, mapping) when is_binary(string) and is_map(mapping) do @@ -79,4 +83,13 @@ defmodule Microsoft.Azure.Storage.Utilities do |> Enum.filter(&(&1 != nil)) |> Enum.to_list() end + + @doc """ + Converts a string literal "true" or "false" into appropriate boolean. + + All other values return `false`. + """ + def to_bool("true"), do: true + def to_bool("false"), do: false + def to_bool(_), do: false end diff --git a/mix.exs b/mix.exs index 0ec2978..f3f54a0 100644 --- a/mix.exs +++ b/mix.exs @@ -1,33 +1,88 @@ defmodule ExMicrosoftAzureStorage.MixProject do use Mix.Project + @version "1.1.1" + @repo_url "https://github.com/bettyblocks/ex_microsoft_azure_storage" + def project do [ app: :ex_microsoft_azure_storage, - version: "0.1.0", - elixir: "~> 1.6", + version: @version, + elixir: "~> 1.12", + elixirc_paths: elixirc_paths(Mix.env()), start_permanent: Mix.env() == :prod, - deps: deps() + deps: deps(), + dialyzer: dialyzer(), + package: package(), + docs: docs() ] end # Run "mix help compile.app" to learn about applications. def application do [ - extra_applications: [:logger, :crypto] + extra_applications: [:logger, :crypto, :eex] ] end # Run "mix help deps" to learn about dependencies. defp deps do [ - {:ibrowse, "~> 4.4"}, - {:tesla, "~> 0.8"}, - {:poison, ">= 1.0.0"}, - {:sweet_xml, "~> 0.6.5"}, - {:xml_builder, "~> 2.1"}, - {:named_args, "~> 0.1.1"}, - {:timex, "~> 3.2"} + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"} + {:credo, "~> 1.5", only: [:dev, :test], runtime: false}, + {:dialyxir, "~> 1.1", only: [:dev, :test], runtime: false}, + {:ex_doc, ">= 0.0.0", only: :dev, runtime: false}, + {:ex_machina, ">= 0.0.0", only: [:dev, :test]}, + {:hackney, "~> 1.17"}, + {:jason, "~> 1.2", optional: true}, + {:poison, ">= 1.0.0", optional: true}, + {:sweet_xml, "~> 0.7"}, + {:tesla, "~> 1.4"}, + {:timex, "~> 3.7"}, + {:xml_builder, "~> 2.2"} + ] + end + + # Specifies which paths to compile per environment. + defp elixirc_paths(:test), do: ["lib", "test/support"] + defp elixirc_paths(_), do: ["lib"] + + defp package do + [ + description: "Microsoft Azure storage elixir", + links: %{"GitHub" => @repo_url}, + licenses: ["MIT"], + files: ~w(lib .formatter.exs mix.exs README.md LICENSE.md CHANGELOG.md) + ] + end + + defp docs do + [ + main: "readme", + extras: [ + "README.md" + ], + authors: [ + "almirsarajcic", + "bettyblocks", + "chgeuer", + "joeapearson" + ], + source_ref: "v#{@version}", + source_url: @repo_url, + api_reference: false + ] + end + + # Configures dialyzer (static analysis tool for Elixir / Erlang). + # + # The `dialyzer.plt` file takes a long time to generate first time round, so we store it in a + # custom location where it can then be easily cached during CI. + defp dialyzer do + [ + plt_add_apps: [:eex, :mix, :jason], + plt_file: {:no_warn, "priv/plts/dialyzer.plt"} ] end end diff --git a/mix.lock b/mix.lock index 41a1cdc..7e5fc27 100644 --- a/mix.lock +++ b/mix.lock @@ -1,21 +1,32 @@ %{ - "certifi": {:hex, :certifi, "2.3.1", "d0f424232390bf47d82da8478022301c561cf6445b5b5fb6a84d49a9e76d2639", [:rebar3], [{:parse_trans, "3.2.0", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"}, - "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm"}, - "gettext": {:hex, :gettext, "0.15.0", "40a2b8ce33a80ced7727e36768499fc9286881c43ebafccae6bab731e2b2b8ce", [:mix], [], "hexpm"}, - "hackney": {:hex, :hackney, "1.12.1", "8bf2d0e11e722e533903fe126e14d6e7e94d9b7983ced595b75f532e04b7fdc7", [:rebar3], [{:certifi, "2.3.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "5.1.1", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"}, - "ibrowse": {:hex, :ibrowse, "4.4.0", "2d923325efe0d2cb09b9c6a047b2835a5eda69d8a47ed6ff8bc03628b764e991", [:rebar3], [], "hexpm"}, - "idna": {:hex, :idna, "5.1.1", "cbc3b2fa1645113267cc59c760bafa64b2ea0334635ef06dbac8801e42f7279c", [:rebar3], [{:unicode_util_compat, "0.3.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm"}, - "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"}, - "mime": {:hex, :mime, "1.2.0", "78adaa84832b3680de06f88f0997e3ead3b451a440d183d688085be2d709b534", [:mix], [], "hexpm"}, - "mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], [], "hexpm"}, - "named_args": {:hex, :named_args, "0.1.1", "aef3e050b83ee298e7b3247c6a90e6a8431a716761a46ce11064a0496f9adf9e", [:mix], [], "hexpm"}, - "parse_trans": {:hex, :parse_trans, "3.2.0", "2adfa4daf80c14dc36f522cf190eb5c4ee3e28008fc6394397c16f62a26258c2", [:rebar3], [], "hexpm"}, - "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"}, - "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.1", "28a4d65b7f59893bc2c7de786dec1e1555bd742d336043fe644ae956c3497fbe", [:make, :rebar], [], "hexpm"}, - "sweet_xml": {:hex, :sweet_xml, "0.6.5", "dd9cde443212b505d1b5f9758feb2000e66a14d3c449f04c572f3048c66e6697", [:mix], [], "hexpm"}, - "tesla": {:hex, :tesla, "0.10.0", "e588c7e7f1c0866c81eeed5c38f02a4a94d6309eede336c1e6ca08b0a95abd3f", [:mix], [{:exjsx, ">= 0.1.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}], "hexpm"}, - "timex": {:hex, :timex, "3.3.0", "e0695aa0ddb37d460d93a2db34d332c2c95a40c27edf22fbfea22eb8910a9c8d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm"}, - "tzdata": {:hex, :tzdata, "0.5.16", "13424d3afc76c68ff607f2df966c0ab4f3258859bbe3c979c9ed1606135e7352", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}, - "unicode_util_compat": {:hex, :unicode_util_compat, "0.3.1", "a1f612a7b512638634a603c8f401892afbf99b8ce93a45041f8aaca99cadb85e", [:rebar3], [], "hexpm"}, - "xml_builder": {:hex, :xml_builder, "2.1.0", "c249d5339427c13cae11e9d9d0e8b40d25d228b9ecc54029f24017385e60280b", [:mix], [], "hexpm"}, + "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"}, + "certifi": {:hex, :certifi, "2.6.1", "dbab8e5e155a0763eea978c913ca280a6b544bfa115633fa20249c3d396d9493", [:rebar3], [], "hexpm", "524c97b4991b3849dd5c17a631223896272c6b0af446778ba4675a1dff53bb7e"}, + "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"}, + "credo": {:hex, :credo, "1.5.6", "e04cc0fdc236fefbb578e0c04bd01a471081616e741d386909e527ac146016c6", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "4b52a3e558bd64e30de62a648518a5ea2b6e3e5d2b164ef5296244753fc7eb17"}, + "dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.13", "0c98163e7d04a15feb62000e1a891489feb29f3d10cb57d4f845c405852bbef8", [:mix], [], "hexpm", "d602c26af3a0af43d2f2645613f65841657ad6efc9f0e361c3b6c06b578214ba"}, + "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, + "ex_doc": {:hex, :ex_doc, "0.24.2", "e4c26603830c1a2286dae45f4412a4d1980e1e89dc779fcd0181ed1d5a05c8d9", [:mix], [{:earmark_parser, "~> 1.4.0", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "e134e1d9e821b8d9e4244687fb2ace58d479b67b282de5158333b0d57c6fb7da"}, + "ex_machina": {:hex, :ex_machina, "2.7.0", "b792cc3127fd0680fecdb6299235b4727a4944a09ff0fa904cc639272cd92dc7", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "419aa7a39bde11894c87a615c4ecaa52d8f107bbdd81d810465186f783245bf8"}, + "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, + "gettext": {:hex, :gettext, "0.18.2", "7df3ea191bb56c0309c00a783334b288d08a879f53a7014341284635850a6e55", [:mix], [], "hexpm", "f9f537b13d4fdd30f3039d33cb80144c3aa1f8d9698e47d7bcbcc8df93b1f5c5"}, + "hackney": {:hex, :hackney, "1.17.4", "99da4674592504d3fb0cfef0db84c3ba02b4508bae2dff8c0108baa0d6e0977c", [:rebar3], [{:certifi, "~>2.6.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "de16ff4996556c8548d512f4dbe22dd58a587bf3332e7fd362430a7ef3986b16"}, + "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, + "jason": {:hex, :jason, "1.2.2", "ba43e3f2709fd1aa1dce90aaabfd039d000469c05c56f0b8e31978e03fa39052", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "18a228f5f0058ee183f29f9eae0805c6e59d61c3b006760668d8d18ff0d12179"}, + "makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"}, + "makeup_elixir": {:hex, :makeup_elixir, "0.15.1", "b5888c880d17d1cc3e598f05cdb5b5a91b7b17ac4eaf5f297cb697663a1094dd", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.1", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "db68c173234b07ab2a07f645a5acdc117b9f99d69ebf521821d89690ae6c6ec8"}, + "makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"}, + "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, + "mime": {:hex, :mime, "1.6.0", "dabde576a497cef4bbdd60aceee8160e02a6c89250d6c0b29e56c0dfb00db3d2", [:mix], [], "hexpm", "31a1a8613f8321143dde1dafc36006a17d28d02bdfecb9e95a880fa7aabd19a7"}, + "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, + "nimble_parsec": {:hex, :nimble_parsec, "1.1.0", "3a6fca1550363552e54c216debb6a9e95bd8d32348938e13de5eda962c0d7f89", [:mix], [], "hexpm", "08eb32d66b706e913ff748f11694b17981c0b04a33ef470e33e11b3d3ac8f54b"}, + "parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"}, + "poison": {:hex, :poison, "4.0.1", "bcb755a16fac91cad79bfe9fc3585bb07b9331e50cfe3420a24bcc2d735709ae", [:mix], [], "hexpm", "ba8836feea4b394bb718a161fc59a288fe0109b5006d6bdf97b6badfcf6f0f25"}, + "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"}, + "sweet_xml": {:hex, :sweet_xml, "0.7.0", "39ca6a53c526a1759672690656d5a787bee1016bfff467310170f9b428a238cb", [:mix], [], "hexpm", "2f18cb07f22b5a0d3e99d8b7e4176020f0051f90e449968821e4fde930edd945"}, + "tesla": {:hex, :tesla, "1.4.2", "54fc2c989b63c3c9ff675c2f193d824325a802e61575063363eaa97f471af2ed", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.3", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "aa3a4130a662a16471a6847fecd4772e2adaf9a4693488f022c612c0bb98c9fd"}, + "timex": {:hex, :timex, "3.7.5", "3eca56e23bfa4e0848f0b0a29a92fa20af251a975116c6d504966e8a90516dfd", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "a15608dca680f2ef663d71c95842c67f0af08a0f3b1d00e17bbd22872e2874e4"}, + "tzdata": {:hex, :tzdata, "1.1.0", "72f5babaa9390d0f131465c8702fa76da0919e37ba32baa90d93c583301a8359", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "18f453739b48d3dc5bcf0e8906d2dc112bb40baafe2c707596d89f3c8dd14034"}, + "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, + "xml_builder": {:hex, :xml_builder, "2.2.0", "cc5f1eeefcfcde6e90a9b77fb6c490a20bc1b856a7010ce6396f6da9719cbbab", [:mix], [], "hexpm", "9d66d52fb917565d358166a4314078d39ef04d552904de96f8e73f68f64a62c9"}, } diff --git a/tesla_bugfix_when_no_connectivity.md b/tesla_bugfix_when_no_connectivity.md deleted file mode 100644 index 3035fa1..0000000 --- a/tesla_bugfix_when_no_connectivity.md +++ /dev/null @@ -1,10 +0,0 @@ - -- `tesla\lib\tesla\middleware\core.ex` - - -```elixir -defmodule Tesla.Middleware.Normalize do - def normalize(:error) do - raise %Tesla.Error{message: "unknown adapter error", reason: "unknown error"} - end -``` diff --git a/test/ex_microsoft_azure_storage_test.exs b/test/ex_microsoft_azure_storage_test.exs index d0cdebc..d61acd8 100644 --- a/test/ex_microsoft_azure_storage_test.exs +++ b/test/ex_microsoft_azure_storage_test.exs @@ -1,126 +1,2 @@ -defmodule ExMicrosoftAzureStorageTest do - use ExUnit.Case - # doctest ExMicrosoftAzureStorage - alias Microsoft.Azure.Storage.ApiVersion - - import SweetXml - - test "Tests HMAC SHA256" do - # https://en.wikipedia.org/wiki/HMAC#Examples - assert Base.encode16( - :crypto.hmac(:sha256, "key", "The quick brown fox jumps over the lazy dog"), - case: :lower - ) == "f7bc83f430538424b13298e6aa6fb143ef4d59a14946175997479dbc2d1a3cd8" - end - - test "Try out SweetXml" do - doc = """ - - - - - Match One - - 1Team One - 2Team Two - - - - Match Two - - 2Team Two - 3Team Three - - - - Match Three - - 1Team One - 3Team Three - - - - - """ - - result = doc |> xpath(~x"//matchup/name/text()") - assert result == 'Match One' - - result = doc |> xpath(~x"//matchup/@winner-id"l) - assert result == ['1', '2', '1'] - - result = - doc - |> xpath( - ~x"//matchups/matchup"l, - name: ~x"./name/text()", - winner: [ - ~x".//team/id[.=ancestor::matchup/@winner-id]/..", - name: ~x"./name/text()" - ] - ) - - assert result == [ - %{name: 'Match One', winner: %{name: 'Team One'}}, - %{name: 'Match Two', winner: %{name: 'Team Two'}}, - %{name: 'Match Three', winner: %{name: 'Team One'}} - ] - - result = - doc - |> xmap( - matchups: [ - ~x"//matchups/matchup"l, - name: ~x"./name/text()", - winner: [ - ~x".//team/id[.=ancestor::matchup/@winner-id]/..", - name: ~x"./name/text()" - ] - ], - last_matchup: [ - ~x"//matchups/matchup[last()]", - name: ~x"./name/text()", - winner: [ - ~x".//team/id[.=ancestor::matchup/@winner-id]/..", - name: ~x"./name/text()" - ] - ] - ) - - assert result == %{ - matchups: [ - %{name: 'Match One', winner: %{name: 'Team One'}}, - %{name: 'Match Two', winner: %{name: 'Team Two'}}, - %{name: 'Match Three', winner: %{name: 'Team One'}} - ], - last_matchup: %{name: 'Match Three', winner: %{name: 'Team One'}} - } - end - - test "api_version comparison" do - old_y = "2017-01-20" |> ApiVersion.parse() - new_y = "2018-01-20" |> ApiVersion.parse() - old_m = "2018-01-20" |> ApiVersion.parse() - new_m = "2018-03-20" |> ApiVersion.parse() - old_d = "2018-03-19" |> ApiVersion.parse() - new_d = "2018-03-20" |> ApiVersion.parse() - - assert :older == old_y |> ApiVersion.compare(new_y) - assert :newer == new_y |> ApiVersion.compare(old_y) - assert :older == old_m |> ApiVersion.compare(new_m) - assert :newer == new_m |> ApiVersion.compare(old_m) - assert :older == old_d |> ApiVersion.compare(new_d) - assert :newer == new_d |> ApiVersion.compare(old_d) - assert :equal == new_d |> ApiVersion.compare(new_d) - end - - # test "ce" do - # "fR5pqJJzUC/H4rXDmkbQSL0JO94=" - # |> Base.decode64!() - # |> Base.encode16() - - # "7d1e69a89273502fc7e2b5c39a46d048bd093bde" - # |> Base.decode16!(case: :mixed) - # |> Base.encode64() - # end +defmodule AzureTest do end diff --git a/test/storage/blob_from_url.txt b/test/storage/blob_from_url.txt new file mode 100644 index 0000000..9b27c40 --- /dev/null +++ b/test/storage/blob_from_url.txt @@ -0,0 +1 @@ +This file is used in `blob_test.exs` to simulate the upload of a blob from a URL. diff --git a/test/storage/blob_properties_test.exs b/test/storage/blob_properties_test.exs new file mode 100644 index 0000000..f43ea02 --- /dev/null +++ b/test/storage/blob_properties_test.exs @@ -0,0 +1,36 @@ +defmodule ExMicrosoftAzureStorage.Storage.BlobPropertiesTest do + @moduledoc false + + use ExUnit.Case, async: true + + alias ExMicrosoftAzureStorage.Storage.BlobProperties + + describe "deserialise" do + test "deserialises blob properties from headers" do + headers = [ + {"server", "Azurite-Blob/3.11.0"}, + {"last-modified", "Mon, 12 Jul 2021 18:18:21 GMT"}, + {"x-ms-creation-time", "Mon, 12 Jul 2021 18:18:21 GMT"}, + {"x-ms-blob-type", "BlockBlob"}, + {"x-ms-lease-state", "available"}, + {"x-ms-lease-status", "unlocked"}, + {"content-length", "12"}, + {"content-type", "application/octet-stream"}, + {"etag", "\"0x198B53AAAB848F0\""}, + {"content-md5", "h/Fps4ugBcqAcVVmEmMG/w=="}, + {"x-ms-request-id", "f7022735-4acd-49b1-ae93-de9389874274"}, + {"x-ms-version", "2020-06-12"}, + {"date", "Mon, 12 Jul 2021 18:18:21 GMT"}, + {"accept-ranges", "bytes"}, + {"x-ms-server-encrypted", "true"}, + {"x-ms-access-tier", "Hot"}, + {"x-ms-access-tier-inferred", "true"}, + {"x-ms-access-tier-change-time", "Mon, 12 Jul 2021 18:18:21 GMT"}, + {"connection", "keep-alive"}, + {"keep-alive", "timeout=5"} + ] + + assert headers |> BlobProperties.deserialise() + end + end +end diff --git a/test/storage/blob_storage_test.exs b/test/storage/blob_storage_test.exs new file mode 100644 index 0000000..2d44326 --- /dev/null +++ b/test/storage/blob_storage_test.exs @@ -0,0 +1,61 @@ +defmodule ExMicrosoftAzureStorage.Storage.BlobStorageTest do + @moduledoc false + + use ExUnit.Case, async: true + + @moduletag :external + + import ExMicrosoftAzureStorage.Factory + + alias ExMicrosoftAzureStorage.Storage.BlobStorage + alias ExMicrosoftAzureStorage.Storage.BlobStorage.ServiceProperties + + setup do + storage_context = build(:storage_context) + + %{storage_context: storage_context} + end + + describe "get_blob_service_stats" do + test "gets blob service stats", %{storage_context: storage_context} do + assert {:ok, %{geo_replication: %{last_sync_time: last_sync_time, status: "live"}}} = + storage_context |> BlobStorage.get_blob_service_stats() + + assert last_sync_time + end + end + + describe "get_blob_service_properties" do + test "gets blob service properties", %{storage_context: storage_context} do + assert {:ok, %{service_properties: %ServiceProperties{}}} = + storage_context |> BlobStorage.get_blob_service_properties() + end + end + + describe "put_blob_service_properties" do + test "sets CORS rules", %{storage_context: storage_context} do + rule = %{ + allowed_origins: ["https://google.com"], + allowed_methods: ["GET"], + max_age_in_seconds: 600, + exposed_headers: [""], + allowed_headers: [""] + } + + cors_rule = rule |> ServiceProperties.CorsRule.to_struct() + + {:ok, %{service_properties: service_properties}} = + storage_context |> BlobStorage.get_blob_service_properties() + + service_properties = Map.put(service_properties, :cors_rules, [cors_rule]) + + storage_context + |> BlobStorage.set_blob_service_properties(service_properties) + + {:ok, %{service_properties: service_properties_after_update}} = + storage_context |> BlobStorage.get_blob_service_properties() + + assert service_properties == service_properties_after_update + end + end +end diff --git a/test/storage/blob_test.exs b/test/storage/blob_test.exs new file mode 100644 index 0000000..3d84da9 --- /dev/null +++ b/test/storage/blob_test.exs @@ -0,0 +1,155 @@ +defmodule ExMicrosoftAzureStorage.Storage.BlobTest do + @moduledoc false + + use ExUnit.Case, async: true + + @moduletag :external + + alias ExMicrosoftAzureStorage.Storage.{Blob, BlobProperties, Container} + + import ExMicrosoftAzureStorage.Factory + + defp header(headers, key) do + case List.keyfind(headers, key, 0) do + nil -> nil + {^key, value} -> value + end + end + + setup do + storage_context = build(:storage_context) + container_context = storage_context |> Container.new("blob-test") + + Container.delete_container(container_context) + + {:ok, _response} = Container.ensure_container(container_context) + + %{storage_context: storage_context, container_context: container_context} + end + + describe "blob properties" do + setup %{container_context: container_context} do + blob_name = build(:blob_name) + blob_data = build(:blob_data) + blob = container_context |> Blob.new(blob_name) + + blob |> Blob.delete_blob() + {:ok, %{status: 201}} = blob |> Blob.put_blob(blob_data) + + %{blob: blob, container_context: container_context} + end + + test "gets blob properties", %{blob: blob} do + assert {:ok, %{status: 200, properties: %BlobProperties{}}} = + blob |> Blob.get_blob_properties() + end + + test "error when blob not found", %{container_context: container_context} do + blob_name = build(:blob_name) + blob = container_context |> Blob.new(blob_name) + + assert {:error, %{status: 404}} = blob |> Blob.get_blob_properties() + end + + test "set blob properties", %{blob: blob} do + content_type = build(:content_type) + content_md5 = build(:content_md5) + + {:ok, %{status: 200, properties: blob_properties}} = blob |> Blob.get_blob_properties() + + refute blob_properties.content_type == content_type + + blob_properties = + blob_properties + |> Map.put(:content_type, content_type) + |> Map.put(:content_md5, content_md5) + + assert {:ok, %{status: 200}} = blob |> Blob.set_blob_properties(blob_properties) + + assert {:ok, %{status: 200, properties: blob_properties}} = + blob |> Blob.get_blob_properties() + + assert blob_properties.content_type == content_type + assert blob_properties.content_md5 == content_md5 + end + end + + describe "put_blob" do + test "puts a blob", %{container_context: container_context} do + blob_name = "my_blob" + blob_data = "my_blob_data" + blob = container_context |> Blob.new(blob_name) + + assert {:ok, %{status: 201}} = + blob + |> Blob.put_blob(blob_data) + + assert {:ok, %{body: ^blob_data}} = blob |> Blob.get_blob() + end + end + + describe "put_blob_by_url" do + test "puts a blob from a URL", %{ + container_context: container_context, + storage_context: storage_context + } do + blob_name = "blob_from_url.txt" + + url = + "https://raw.githubusercontent.com/joeapearson/elixir-azure/main/test/storage/#{blob_name}" + + expected_contents = + if storage_context.is_development_factory do + # Storage emulator doesn't yet support put blob from URL API and always returns an empty + # blob + "" + else + File.read!(Path.expand(blob_name, __DIR__)) + end + + %{headers: source_headers} = Tesla.head!(url) + source_content_type = header(source_headers, "content-type") + source_content_encoding = header(source_headers, "content-encoding") + source_content_language = header(source_headers, "content-language") + source_content_disposition = header(source_headers, "content-disposition") + + assert is_binary(source_content_type) + + blob = container_context |> Blob.new(blob_name) + + assert {:ok, %{status: 201}} = + blob |> Blob.put_blob_from_url(url, content_type_workaround: true) + + assert {:ok, %{status: 200, body: destination_body, headers: destination_headers}} = + blob |> Blob.get_blob() + + assert destination_body == expected_contents + + destination_content_type = header(destination_headers, "content-type") + destination_content_encoding = header(destination_headers, "content-encoding") + destination_content_language = header(destination_headers, "content-language") + destination_content_disposition = header(destination_headers, "content-disposition") + + assert source_content_type == destination_content_type + assert source_content_encoding == destination_content_encoding + assert source_content_language == destination_content_language + assert source_content_disposition == destination_content_disposition + end + end + + describe "copy" do + test "copies a blob from another blob", %{ + container_context: container_context + } do + blob_data = "my_blob_data" + source = container_context |> Blob.new("source_blob") + target = container_context |> Blob.new("target_blob") + + assert {:ok, %{status: 201}} = Blob.put_blob(source, blob_data) + assert {:ok, %{body: ^blob_data}} = Blob.get_blob(source) + + assert {:ok, %{x_ms_copy_status: "success"}} = Blob.copy(source, target) + assert {:ok, %{body: ^blob_data}} = Blob.get_blob(target) + end + end +end diff --git a/test/storage/connection_string_test.exs b/test/storage/connection_string_test.exs new file mode 100644 index 0000000..732635e --- /dev/null +++ b/test/storage/connection_string_test.exs @@ -0,0 +1,29 @@ +defmodule ExMicrosoftAzureStorage.Storage.ConnectionStringTest do + @moduledoc false + + use ExUnit.Case, async: true + + import ExMicrosoftAzureStorage.Factory + + alias ExMicrosoftAzureStorage.Storage.ConnectionString + + describe "parse" do + test "parses a connection string" do + default_endpoints_protocol = "https" + account_name = "my_account_name" + account_key = "my_account_key" + endpoint_suffix = "my_endpoint_suffix" + + attrs = %{ + default_endpoints_protocol: default_endpoints_protocol, + account_name: account_name, + account_key: account_key, + endpoint_suffix: endpoint_suffix + } + + connection_string = build(:connection_string, attrs) + + assert attrs == ConnectionString.parse(connection_string) + end + end +end diff --git a/test/storage/shared_access_signature_test.exs b/test/storage/shared_access_signature_test.exs new file mode 100644 index 0000000..6da5711 --- /dev/null +++ b/test/storage/shared_access_signature_test.exs @@ -0,0 +1,76 @@ +defmodule ExMicrosoftAzureStorage.Storage.SharedAccessSignatureTest do + @moduledoc false + + use ExUnit.Case, async: true + + @moduletag :external + + import ExMicrosoftAzureStorage.Factory + + alias ExMicrosoftAzureStorage.Storage.SharedAccessSignature, as: SAS + alias ExMicrosoftAzureStorage.Storage.{Blob, Container} + + @blob_name "my_blob" + @blob_data "blob_data" + + setup_all do + storage_context = build(:storage_context) + container_context = storage_context |> Container.new("sas-test") + + {:ok, _response} = Container.ensure_container(container_context) + + blob = container_context |> Blob.new(@blob_name) + + {:ok, %{status: 201}} = + blob + |> Blob.put_blob(@blob_data) + + %{storage_context: storage_context, container_context: container_context, blob: blob} + end + + describe "encode" do + test "encodes values" do + value = build(:value) + now = DateTime.utc_now() + + assert {"sv", value} == SAS.encode({:service_version, value}) + assert {"st", SAS.as_time(now)} == SAS.encode({:start_time, now}) + assert {"se", SAS.as_time(now)} == SAS.encode({:expiry_time, now}) + assert {"cr", value} == SAS.encode({:canonicalized_resource, value}) + + assert {"sr", "b"} == SAS.encode({:resource, [:blob]}) + assert {"sr", "c"} == SAS.encode({:resource, [:container]}) + assert {"sr", "s"} == SAS.encode({:resource, [:share]}) + assert {"sr", "f"} == SAS.encode({:resource, [:file]}) + + assert {"sip", value} == SAS.encode({:ip_range, value}) + assert {"spr", value} == SAS.encode({:protocol, value}) + + assert {"ss", "b"} == SAS.encode({:services, [:blob]}) + assert {"ss", "q"} == SAS.encode({:services, [:queue]}) + assert {"ss", "t"} == SAS.encode({:services, [:table]}) + assert {"ss", "f"} == SAS.encode({:services, [:file]}) + + assert {"srt", "s"} == SAS.encode({:resource_type, [:service]}) + assert {"srt", "o"} == SAS.encode({:resource_type, [:object]}) + assert {"srt", "c"} == SAS.encode({:resource_type, [:container]}) + + assert {"sp", "r"} == SAS.encode({:permissions, [:read]}) + assert {"sp", "w"} == SAS.encode({:permissions, [:write]}) + assert {"sp", "d"} == SAS.encode({:permissions, [:delete]}) + assert {"sp", "l"} == SAS.encode({:permissions, [:list]}) + assert {"sp", "a"} == SAS.encode({:permissions, [:add]}) + assert {"sp", "c"} == SAS.encode({:permissions, [:create]}) + assert {"sp", "u"} == SAS.encode({:permissions, [:update]}) + assert {"sp", "p"} == SAS.encode({:permissions, [:process]}) + + assert {"rscc", value} == SAS.encode({:cache_control, value}) + assert {"rscd", value} == SAS.encode({:content_disposition, value}) + assert {"rsce", value} == SAS.encode({:content_encoding, value}) + assert {"rscl", value} == SAS.encode({:content_language, value}) + assert {"rsct", value} == SAS.encode({:content_type, value}) + + assert {nil, nil} == SAS.encode({:not, "a value"}) + end + end +end diff --git a/test/storage/storage_test.exs b/test/storage/storage_test.exs new file mode 100644 index 0000000..36c19be --- /dev/null +++ b/test/storage/storage_test.exs @@ -0,0 +1,25 @@ +defmodule ExMicrosoftAzureStorage.StorageTest do + @moduledoc false + + use ExUnit.Case, async: true + + import ExMicrosoftAzureStorage.Factory + + alias ExMicrosoftAzureStorage.Storage + + describe "new" do + test "can be created from an Azure connection string" do + storage = build(:connection_string) |> Storage.new() + + assert is_binary(storage.account_name) + assert is_binary(storage.account_key) + assert is_binary(storage.default_endpoints_protocol) + assert is_binary(storage.endpoint_suffix) + + assert is_nil(storage.host) + assert is_nil(storage.aad_token_provider) + + assert false == storage.is_development_factory + end + end +end diff --git a/test/support/factory.ex b/test/support/factory.ex new file mode 100644 index 0000000..f079850 --- /dev/null +++ b/test/support/factory.ex @@ -0,0 +1,36 @@ +defmodule ExMicrosoftAzureStorage.Factory do + @moduledoc """ + Provides test data factories. + """ + + use ExMachina + + def blob_data_factory(_attrs), do: sequence("blob_data") + + def blob_name_factory(_attrs), do: sequence("blob_name") + + def connection_string_factory(attrs) do + [ + [ + "DefaultEndpointsProtocol", + Map.get(attrs, :default_endpoints_protocol, sequence("default_endpoints_protocol")) + ], + ["AccountName", Map.get(attrs, :account_name, sequence("account_name"))], + ["AccountKey", Map.get(attrs, :account_key, sequence("account_key"))], + ["EndpointSuffix", Map.get(attrs, :endpoint_suffix, sequence("endpoint_suffix"))] + ] + |> Enum.map_join(";", fn kv -> Enum.join(kv, "=") end) + end + + def content_type_factory(_attrs), do: sequence("application/type") + + def content_md5_factory(_attrs), do: sequence("md5") |> Base.encode64() + + def storage_context_factory do + ExMicrosoftAzureStorage.Storage.development_factory() + end + + def value_factory(_attrs) do + sequence("value") + end +end diff --git a/test/test_helper.exs b/test/test_helper.exs index 869559e..00fa573 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -1 +1,3 @@ ExUnit.start() + +ExUnit.configure(exclude: [external: true]) diff --git a/test/utilities_test.exs b/test/utilities_test.exs deleted file mode 100644 index 55b25fe..0000000 --- a/test/utilities_test.exs +++ /dev/null @@ -1,6 +0,0 @@ -defmodule Microsoft.Azure.Storage.Tests.DocTests do - use ExUnit.Case, async: true - doctest Microsoft.Azure.Storage.Utilities -end - -