{this.props.label}
diff --git a/ashes/src/components/sku-line-items/sku-line-items.jsx b/ashes/src/components/sku-line-items/sku-line-items.jsx
index dd94beaded..fe274b9c42 100644
--- a/ashes/src/components/sku-line-items/sku-line-items.jsx
+++ b/ashes/src/components/sku-line-items/sku-line-items.jsx
@@ -16,7 +16,7 @@ export const defaultColumns = [
{
field: 'sku',
text: 'SKU',
- render: (code: string) =>
{code}
+ render: (code: string) =>
{code},
},
{ field: 'price', text: 'Price', type: 'currency' },
{ field: 'quantity', text: 'Qty' },
@@ -24,11 +24,11 @@ export const defaultColumns = [
];
const attributesColumns = {
- 'giftCard': [
+ giftCard: [
{
field: 'code',
text: 'Gift Card Number',
- render: code => !_.isEmpty(code) ?
{code} : 'N/A'
+ render: code => (!_.isEmpty(code) ?
{code} : 'N/A'),
},
{ field: 'recipientName', text: 'Recipient Name' },
{ field: 'recipientEmail', text: 'Recipient Email' },
@@ -42,6 +42,7 @@ type Props = {
items: Array
,
renderRow?: Function,
withAttributes?: boolean,
+ withLink?: boolean,
className?: string,
};
@@ -49,14 +50,16 @@ const lineItemAttributes = (item: SkuItem, columns: Columns): Array>
const attributes = _.get(item, 'attributes', {});
if (!_.isEmpty(attributes)) {
- return Object.keys(attributes).map((name: string) => (
- _.get(attributesColumns, name) ?
- : null
- ));
+ return Object.keys(attributes).map(
+ (name: string) =>
+ _.get(attributesColumns, name)
+ ?
+ : null
+ );
}
return [];
@@ -71,10 +74,7 @@ const SkuLineItems = (props: Props) => {
const attributes = _.get(items[index], 'attributes');
const className = classNames({ '_with-attributes': !_.isEmpty(attributes) });
- return [
- React.cloneElement(row, { className }),
- lineItemAttributes(items[index], columns),
- ];
+ return [React.cloneElement(row, { className }), lineItemAttributes(items[index], columns)];
});
if (items.length > 0) {
@@ -84,14 +84,14 @@ const SkuLineItems = (props: Props) => {
className={className}
columns={columns}
emptyMessage="No items yet."
- data={{rows: items}}
+ data={{ rows: items }}
renderRow={renderRow}
processRows={withAttributes ? processRows : _.identity}
/>
);
} else {
return (
-
+
No items yet.
);
diff --git a/ashes/src/interfaces/paragons/customer.js b/ashes/src/interfaces/paragons/customer.js
index 7be55e08dc..efc571b22b 100644
--- a/ashes/src/interfaces/paragons/customer.js
+++ b/ashes/src/interfaces/paragons/customer.js
@@ -1,7 +1,12 @@
type Customer = {
id: number,
email: string,
- phoneNumber?: string,
name: string,
-};
+ phoneNumber?: string,
+ isGuest?: boolean,
+ groups?: Array
,
+ avatarUrl?: string,
+ rank?: number,
+ location?: string,
+};
diff --git a/ashes/src/lib/credit-card-utils.js b/ashes/src/lib/credit-card-utils.js
index 314698d613..37628baac3 100644
--- a/ashes/src/lib/credit-card-utils.js
+++ b/ashes/src/lib/credit-card-utils.js
@@ -23,13 +23,18 @@ export function expirationYears() {
return _.range(20).map(n => [current + n, current + n]);
}
-export function formatExpiration(card) {
- return `${card.expMonth}/${card.expYear}`;
-}
+export function formatExpiration({ expMonth, expYear }) {
+ if (!expMonth || !expYear) {
+ return 'N/A';
+ }
+ return `${expMonth}/${expYear}`;
+}
export function formatNumber(card) {
- return `xxxx xxxx xxxx ${card.lastFour}`;
+ const lastFour = card.lastFour || 'xxxx';
+
+ return `xxxx xxxx xxxx ${lastFour}`;
}
export function getBillingAddress(getState, customerId, addressId) {
diff --git a/ashes/src/modules/orders/details.js b/ashes/src/modules/orders/details.js
index 201387863b..4f68f42f37 100644
--- a/ashes/src/modules/orders/details.js
+++ b/ashes/src/modules/orders/details.js
@@ -13,42 +13,51 @@ const initialState: State = {
order: null,
};
-const _getOrder = createAsyncActions(
- 'getOrder',
- (refNum: string) => Api.get(`/orders/${refNum}`)
-);
+const _getOrder = createAsyncActions('getOrder', (refNum: string) => Api.get(`/orders/${refNum}`));
-const _updateOrder = createAsyncActions(
- 'updateOrder',
- (id: number, data: Object) => Api.patch(`/orders/${id}`, data)
+const _getAmazonOrder = createAsyncActions('getAmazonOrder', (refNum: string) =>
+ Api.get(`/hyperion/orders/${refNum}/full`)
);
-const _updateShipments = createAsyncActions(
- 'updateShipments',
- (refNum: string) => Api.patch(`/inventory/shipments/for-order/${refNum}`, { state: 'shipped' })
+const _updateOrder = createAsyncActions('updateOrder', (id: number, data: Object) => Api.patch(`/orders/${id}`, data));
+
+const _updateShipments = createAsyncActions('updateShipments', (refNum: string) =>
+ Api.patch(`/inventory/shipments/for-order/${refNum}`, { state: 'shipped' })
);
export const fetchOrder = _getOrder.perform;
+export const fetchAmazonOrder = _getAmazonOrder.perform;
export const updateOrder = _updateOrder.perform;
export const updateShipments = _updateShipments.perform;
-export const clearFetchErrors =_getOrder.clearErrors;
+export const clearFetchErrors = _getOrder.clearErrors;
function orderSucceeded(state: State, payload: Object): State {
- const order: OrderParagon = new OrderParagon(payload.result || payload);
+ const order: OrderParagon = new OrderParagon(payload.result);
+
+ return { ...state, order };
+}
+
+function amazonOrderSucceeded(state: State, payload: Object): State {
+ const res = payload.result;
+
+ const order: OrderParagon = new OrderParagon(res);
return { ...state, order };
}
export function increaseRemorsePeriod(refNum: string) {
return (dispatch: Function): Promise<*> =>
- Api.post(`/orders/${refNum}/increase-remorse-period`)
- .then(order => dispatch(_updateOrder.succeeded(order)));
+ Api.post(`/orders/${refNum}/increase-remorse-period`).then(order => dispatch(_updateOrder.succeeded(order)));
}
-const reducer = createReducer({
- [_getOrder.succeeded]: orderSucceeded,
- [_updateOrder.succeeded]: orderSucceeded,
- [_updateShipments.succeeded]: (state) => state,
-}, initialState);
+const reducer = createReducer(
+ {
+ [_getOrder.succeeded]: orderSucceeded,
+ [_getAmazonOrder.succeeded]: amazonOrderSucceeded,
+ [_updateOrder.succeeded]: orderSucceeded,
+ [_updateShipments.succeeded]: state => state,
+ },
+ initialState
+);
export default reducer;
diff --git a/ashes/src/paragons/order.js b/ashes/src/paragons/order.js
index dd30c046b2..9be54e7e83 100644
--- a/ashes/src/paragons/order.js
+++ b/ashes/src/paragons/order.js
@@ -27,6 +27,7 @@ export type Order = {
paymentMethods: Array,
orderState: string,
lineItems: Object,
+ channel: ?string,
};
export type ShippingMethod = {
@@ -75,7 +76,6 @@ export const stateTitles = {
};
// this map taken from scala code
-
export const allowedStateTransitions = {
[states.cart]: [states.fraudHold, states.remorseHold, states.canceled, states.fulfillmentStarted],
[states.fraudHold]: [states.manualHold, states.remorseHold, states.fulfillmentStarted, states.canceled],
@@ -91,10 +91,23 @@ function collectLineItems(skus: Array): Array {
});
}
+export function isAmazonOrder(order: OrderParagon) {
+ return order.channel === 'Amazon.com';
+}
+
+// @todo verify isAmazon more strictly and convenient
+// @todo: introduce flow types for list responses from ES
+export function isAmazonListItemOrder(order: any) {
+ const customerEmail = _.get(order, 'customer.email', '');
+
+ return customerEmail.endsWith('@marketplace.amazon.com');
+}
+
export default class OrderParagon {
constructor(order: Order) {
Object.assign(this, order);
const skus = _.get(order, 'lineItems.skus');
+
if (skus) {
this.lineItems.skus = collectLineItems(skus);
}
@@ -103,15 +116,16 @@ export default class OrderParagon {
lineItems: Object;
orderState: string;
referenceNumber: string;
- customer: Object;
- promotion: Object;
- coupon: Object;
+ customer: Customer;
paymentMethods: Array;
orderState: string;
remorsePeriodEnd: string;
shippingState: string;
paymentState: string;
placedAt: string;
+ promotion: ?Object;
+ coupon: ?Object;
+ channel: ?string;
get entityId(): string {
return this.referenceNumber;
diff --git a/ashes/src/routes.js b/ashes/src/routes.js
index 780a3df3ab..e73dfa8e09 100644
--- a/ashes/src/routes.js
+++ b/ashes/src/routes.js
@@ -22,7 +22,7 @@ export default function makeRoutes(jwtToken) {
const claims = getClaims(jwtToken);
return [
-
+
{authRoutes(claims)}
@@ -36,6 +36,6 @@ export default function makeRoutes(jwtToken) {
,
- ,
+ ,
];
}
diff --git a/ashes/src/routes/orders.js b/ashes/src/routes/orders.js
index e4880a1501..2c5eb79a9a 100644
--- a/ashes/src/routes/orders.js
+++ b/ashes/src/routes/orders.js
@@ -67,6 +67,9 @@ const getRoutes = (jwt: Object) => {
frn: frn.activity.order,
}),
]),
+ router.read('amazon-order', { path: 'amazon/:order', component: Order, amazon: true }, [
+ router.read('amazon-order-details', { component: OrderDetails, isIndex: true }),
+ ]),
]);
return (
diff --git a/developer-portal/content/reference/amazon_orders.apib b/developer-portal/content/reference/amazon_orders.apib
new file mode 100644
index 0000000000..64ad0305d6
--- /dev/null
+++ b/developer-portal/content/reference/amazon_orders.apib
@@ -0,0 +1,3 @@
+# Data Amazon Orders
+
+
\ No newline at end of file
diff --git a/developer-portal/content/reference/index.md b/developer-portal/content/reference/index.md
index e15f25597b..e56e63fe53 100644
--- a/developer-portal/content/reference/index.md
+++ b/developer-portal/content/reference/index.md
@@ -252,6 +252,7 @@ in the `5xx` range indicate an error with Fox's platform.
+
@@ -260,3 +261,5 @@ in the `5xx` range indicate an error with Fox's platform.
+
+
diff --git a/developer-portal/content/reference/objects/amazon_order.apib b/developer-portal/content/reference/objects/amazon_order.apib
new file mode 100644
index 0000000000..80b213a768
--- /dev/null
+++ b/developer-portal/content/reference/objects/amazon_order.apib
@@ -0,0 +1,25 @@
+# Data structures
+
+### AmazonOrderBase
++ amazonOrderId: `111-5296499-9653859` (required, string) - Amazon Order ID
++ orderTotal: `4500` (required, number) - Order total amount in cents
++ paymentMethodDetail: `CreditCard` (required, string) - Payment method name
++ orderType: `StandardOrder` (required, string) - Amazon Order type
++ currency: `USD` (required, string) - Order currency
++ orderStatus: `Shipped` (required, string) - Order status
++ purchaseDate: `2017-03-08T19:38:36Z` (required, string) - Timestamp of when order was placed.
++ scope: `1` (required, string) - Scope ID
+
+### AmazonOrderPayload
+- Include AmazonOrderBase
++ customerEmail: `67xlr8kb83lx0t2@marketplace.amazon.com` (required, string) - Customer email anonymized by amazon
+
+### AmazonOrderResponse
++ id: `2` (required, number) - Unique identifier
+- Include AmazonOrderBase
++ accountId: `1` (required, number) - Customer accountId
++ createdAt: `2017-06-05T10:14:46.982Z` (required, string) - Timestamp of when exact record was created.
++ updatedAt: `2017-06-05T10:14:46.982Z` (required, string) - Timestamp of when exact record was updated last time.
+
+### AmazonOrderUpdatePayload
++ orderStatus: `Shipped` (required, string) - Order status
\ No newline at end of file
diff --git a/developer-portal/content/reference/resources/admin_amazon_orders.apib b/developer-portal/content/reference/resources/admin_amazon_orders.apib
new file mode 100644
index 0000000000..54e9620934
--- /dev/null
+++ b/developer-portal/content/reference/resources/admin_amazon_orders.apib
@@ -0,0 +1,21 @@
+## Amazon Orders
+
+All orders from Amazon sales channel duplicated in Fox platform.
+Orders are fetched by platform automatically but also can be added via API.
+
+`CREATE` call is idempotent — if order with such `amazonOrderId` has
+been created already — previously created order will return.
+
+### Create [POST /v1/amazon-orders]
+
++ Request (application/json)
+ + Attributes (AmazonOrderPayload)
++ Response 200 (application/json)
+ + Attributes (AmazonOrderResponse)
+
+### Update [PATCH /v1/amazon-orders/{amazonOrderId}]
+
++ Request (application/json)
+ + Attributes(AmazonOrderUpdatePayload)
++ Response 200 (application/json)
+ + Attributes (AmazonOrderResponse)
diff --git a/green-river/README.md b/green-river/README.md
index 233767f86f..d638092f75 100644
--- a/green-river/README.md
+++ b/green-river/README.md
@@ -2,3 +2,10 @@
An event-sourcing system based on Kafka that utilizes bottledwater to capture all of the changes that occur in Postgres and pipe them into Kafka.
It's built in Scala and powers logging and searching capabilities in the system.
+
+### tasks
+
+All tasks should be run with env `localhost` when running green-river locally.
+
+To create mappings run `sbt -Denv=localhost createMappings`
+To start Green-river run `sbt -Denv=localhost '~re-start'`
diff --git a/green-river/src/main/resources/application.conf b/green-river/src/main/resources/application.conf
index d41f819102..4cf51228f8 100644
--- a/green-river/src/main/resources/application.conf
+++ b/green-river/src/main/resources/application.conf
@@ -7,7 +7,7 @@ kafka.indices {
]
admin = [
- "failed_authorizations_search_view",
+ "failed_authorizations_search_view"
]
}
diff --git a/hyperion/.env.test b/hyperion/.env.test
index 8c93eabd88..bb9ecae0dc 100644
--- a/hyperion/.env.test
+++ b/hyperion/.env.test
@@ -4,9 +4,6 @@ HYPERION_DB_PASSWORD=''
HYPERION_DB_NAME=hyperion_test
HYPERION_DB_HOST=localhost
-# misc
-PUSH_CHECK_INTERVAL=5
-
# AWS
AWS_ACCESS_KEY_ID=AWK_KEY
AWS_SECRET_ACCESS_KEY=AWS_SECRET
@@ -14,3 +11,6 @@ AWS_SECRET_ACCESS_KEY=AWS_SECRET
# MWS
MWS_ACCESS_KEY_ID=MWS_KEY
MWS_SECRET_ACCESS_KEY=MWS_SECRET
+
+# misc
+PUSH_CHECK_INTERVAL=5
diff --git a/hyperion/Dockerfile.seed b/hyperion/Dockerfile.seed
index 8418bde6f5..ce40fa528c 100644
--- a/hyperion/Dockerfile.seed
+++ b/hyperion/Dockerfile.seed
@@ -11,6 +11,7 @@ ARG db_name="hyperion"
ARG db_user="hyperion"
ARG db_password=""
ARG push_check_interval=5
+ARG orders_fetch_interval=1440
ARG hyperion_env=prod
ARG hyperion_port=55555
@@ -19,6 +20,7 @@ ENV HYPERION_DB_USER $db_user
ENV HYPERION_DB_PASSWORD $db_password
ENV HYPERION_DB_HOST $db_host
ENV PUSH_CHECK_INTERVAL $push_check_interval
+ENV ORDERS_FETCH_INTERVAL $orders_fetch_interval
ENV MIX_ENV $hyperion_env
ENV PORT $hyperion_port
diff --git a/hyperion/config/config.exs b/hyperion/config/config.exs
index 88886273d7..6a9205c155 100644
--- a/hyperion/config/config.exs
+++ b/hyperion/config/config.exs
@@ -21,6 +21,7 @@ config :hyperion,
config :hyperion,
public_key: System.get_env("PUBLIC_KEY"),
push_check_interval: System.get_env("PUSH_CHECK_INTERVAL"),
+ orders_fetch_interval: System.get_env("ORDERS_FETCH_INTERVAL"),
create_plugin: System.get_env("CREATE_ASHES_PLUGIN")
# This configuration is loaded before any dependency and is restricted
diff --git a/hyperion/env.dev.template b/hyperion/env.dev.template
index 97bcc34e04..d653701eab 100644
--- a/hyperion/env.dev.template
+++ b/hyperion/env.dev.template
@@ -20,3 +20,8 @@ PHOENIX_URL=your-developer-appliance-url
PHOENIX_PASSWORD=api-password
PHOENIX_USER=user
PHOENIX_ORG=org
+
+# misc
+PUSH_CHECK_INTERVAL=5
+CREATE_ASHES_PLUGIN=true
+
diff --git a/hyperion/lib/hyperion.ex b/hyperion/lib/hyperion.ex
index a43580ded8..1917fbdbeb 100644
--- a/hyperion/lib/hyperion.ex
+++ b/hyperion/lib/hyperion.ex
@@ -9,6 +9,13 @@ defmodule Hyperion do
Envy.reload_config
end
+ # Create plugin on app start only if ENV var defined
+ # Pattern match against `True' because of marathon
+ case Application.fetch_env(:hyperion, :create_plugin) do
+ {:ok, "true"} -> Hyperion.PhoenixScala.Client.create_amazon_plugin_in_ashes()
+ _ -> nil
+ end
+
children = [
worker(Hyperion.Repo, []),
worker(Hyperion.Amazon.Workers.CustomersOrdersWorker, []),
@@ -16,11 +23,6 @@ defmodule Hyperion do
worker(Hyperion.MWSAuth, [])
]
- # Create plugin on app start only if ENV var defined
- case Application.fetch_env(:hyperion, :create_plugin) do
- {:ok, "true"} -> Hyperion.PhoenixScala.Client.create_amazon_plugin_in_ashes()
- _ -> nil
- end
opts = [strategy: :one_for_one, name: Hyperion.Supervisor]
Supervisor.start_link(children, opts)
diff --git a/hyperion/lib/hyperion/amazon/amazon.ex b/hyperion/lib/hyperion/amazon/amazon.ex
index bae94b0df2..b841b42c4a 100644
--- a/hyperion/lib/hyperion/amazon/amazon.ex
+++ b/hyperion/lib/hyperion/amazon/amazon.ex
@@ -1,6 +1,5 @@
defmodule Hyperion.Amazon do
alias Hyperion.PhoenixScala.Client, warn: true
- alias Hyperion.JwtAuth, warn: true
alias Hyperion.Amazon.Credentials, warn: true
require Logger
@@ -76,13 +75,19 @@ defmodule Hyperion.Amazon do
def fetch_config do
token = Client.login
fetch_config(token)
- # Credentials.mws_config(jwt.scope)
end
def fetch_config(token) do
Credentials.mws_config(token)
end
+ def safe_fetch_config() do
+ case Client.safe_login() do
+ token -> Credentials.safe_mws_config(token)
+ nil -> nil
+ end
+ end
+
def get_full_order(order_id, token) do
try do
cfg = fetch_config(token)
@@ -110,7 +115,7 @@ defmodule Hyperion.Amazon do
result: %{
referenceNumber: order["AmazonOrderId"],
paymentState: "Captured",
- line_items: %{
+ lineItems: %{
skus: get_sku_map(items, token),
},
lineItemAdjustments: [],
@@ -121,10 +126,7 @@ defmodule Hyperion.Amazon do
adjustments: 0,
total: String.to_float(order["OrderTotal"]["Amount"]) * 100 |> round
},
- customer: %{
- email: order["BuyerEmail"],
- name: order["BuyerName"]
- },
+ customer: Client.get_customer_by_email(order["BuyerEmail"], token).body,
shippingMethod: %{
id: 0,
name: order["ShipmentServiceLevelCategory"],
@@ -146,14 +148,14 @@ defmodule Hyperion.Amazon do
zip: order["ShippingAddress"]["PostalCode"],
isDefault: false
},
- paymentMethods: %{
+ paymentMethods: [%{
id: 0,
amount: (String.to_float(order["OrderTotal"]["Amount"]) * 100 |> round),
currentBalance: 0,
availableBalance: 0,
createdAt: order["PurchaseDate"],
- type: order["PaymentMethodDetails"]["PaymentMethodDetail"]
- },
+ type: lower_first(order["PaymentMethodDetails"]["PaymentMethodDetail"])
+ }],
orderState: order["OrderStatus"],
shippingState: "---",
fraudScore: 0,
@@ -222,6 +224,20 @@ defmodule Hyperion.Amazon do
}]
end
+ def get_sku_image(sku, token) do
+ try do
+ sku = Client.get_sku(sku, token)
+ case sku.body["errors"] do
+ nil ->
+ first_image = hd(sku.body["albums"])["images"] |> hd
+ first_image["src"]
+ err -> raise %AmazonError{message: "#{err}"}
+ end
+ rescue _e in PhoenixError ->
+ nil
+ end
+ end
+
defp get_country_id(code, token) do
resp = Client.get_countries(token)
Enum.filter(resp.body, fn(cnt) -> cnt["alpha2"] == code end)
@@ -229,35 +245,32 @@ defmodule Hyperion.Amazon do
|> get_in(["id"])
end
- defp get_sku_image(sku, token) do
- sku = Client.get_sku(sku, token)
- case sku.body["errors"] do
- nil ->
- first_image = hd(sku.body["albums"])["images"] |> hd
- first_image["src"]
- err -> raise %AmazonError{message: "#{err}"}
- end
- end
- # gets the albums and images for skus,
+
+ # Gets the albums and images for amazon enabled skus,
# adds the SKU for matching
# renders main, pt and swatch images
defp process_images(response) do
r = response.body
- albums = Enum.map(r["skus"], fn(sku)->
- Enum.map(sku["albums"], fn(album) ->
- {String.to_atom(album["name"]), album["images"]}
- end)
- end) |> Enum.reject(fn(list) -> list == [] end)
+ amazon_skus = Enum.filter(r["skus"], fn (sku) ->
+ sku["attributes"]["amazon"]["v"] == true
+ end)
- case albums do
+ case amazon_skus do
[] -> raise "No images for product #{r["id"]}"
- _ -> Enum.map(r["skus"], fn(x)-> [albums: hd(albums), code: x["attributes"]["code"]["v"]] end)
+ _ -> amazon_skus
end
- |> Enum.flat_map(fn(product) ->
- main = render_main_section(hd(product[:albums]), product[:code])
- [main, render_swatch_section(product[:albums][:swatches], product[:code], Enum.count(main))]
- end) |> Enum.reject(fn el -> el == nil end)
+ |> Enum.map(fn(sku)->
+ images = Enum.map(sku["albums"], fn(album) ->
+ {String.to_atom(album["name"]), album["images"]}
+ end)
+ [albums: images, code: sku["attributes"]["code"]["v"]]
+ end)
+ |> Enum.map(fn(sku) ->
+ main = render_main_section(hd(sku[:albums]), sku[:code], 1)
+ [main, render_swatch_section(sku[:albums][:swatches], sku[:code], Enum.count(main))]
+ end)
+ |> List.flatten |> Enum.reject(fn el -> el == nil end) |> Enum.with_index(1)
end
# renders main images section as:
@@ -266,19 +279,20 @@ defmodule Hyperion.Amazon do
# {[type: "PT",
# location: "http:", id: 1],
# 2}],
- defp render_main_section({_, [h|[]]}, sku) do
- [{[sku: sku, type: "Main", location: String.replace(h["src"], "https", "http")], 1}]
+ defp render_main_section({_, [h|t]}, sku, _idx) when t == [] do
+ IO.puts("t is []")
+ [{sku, "Main", String.replace(h["src"], "https", "http")}]
end
- defp render_main_section({_, [h|t]}, sku) do
- main = [sku: sku, type: "Main", location: String.replace(h["src"], "https", "http")]
+ defp render_main_section({_, [h|t]}, sku, _idx) do
+ main = {sku, "Main", String.replace(h["src"], "https", "http")}
pt = Enum.with_index(t, 1)
- |> Enum.flat_map(fn({img, idx}) ->
- [sku: sku, type: "PT", location: String.replace(img["src"], "https", "http"), id: idx]
+ |> Enum.map(fn({img, idx}) ->
+ {sku, "PT", String.replace(img["src"], "https", "http"), idx}
end)
[main, pt]
- |> Enum.with_index(1)
+ # |> Enum.with_index(idx)
end
def render_swatch_section(nil, _, _), do: nil
@@ -287,8 +301,8 @@ defmodule Hyperion.Amazon do
# [[type: Swatch, lication: http://, id: id]]
def render_swatch_section(list, sku, initial) do
Enum.with_index(list, initial + 1)
- |> Enum.map(fn {image, idx} ->
- [sku: sku, type: "Swatch", location: String.replace(image["src"], "https", "http"), idx: idx]
+ |> Enum.map(fn {image, _idx} ->
+ {sku, "Swatch", String.replace(image["src"], "https", "http")}
end)
end
@@ -347,7 +361,7 @@ defmodule Hyperion.Amazon do
# assign variants options to associated sku
defp process_variants(variants) do
func = fn var ->
- {String.to_atom(var["attributes"]["name"]["v"]), atomize_keys(var["values"])}
+ {format_string(var["attributes"]["name"]["v"]), atomize_keys(var["values"])}
end
props = Enum.map(variants, func)
@@ -404,4 +418,11 @@ defmodule Hyperion.Amazon do
|> String.replace(~r/\s+/, "")
|> String.to_atom
end
+
+ defp lower_first(str) do
+ str
+ |> String.first
+ |> String.downcase
+ |> Kernel.<>(String.slice(str, 1, String.length(str)))
+ end
end
diff --git a/hyperion/lib/hyperion/amazon/credentials.ex b/hyperion/lib/hyperion/amazon/credentials.ex
index 82955f74bc..60dac09e3f 100644
--- a/hyperion/lib/hyperion/amazon/credentials.ex
+++ b/hyperion/lib/hyperion/amazon/credentials.ex
@@ -6,6 +6,18 @@ defmodule Hyperion.Amazon.Credentials do
|> build_cfg
end
+ def safe_mws_config(token) do
+ Client.safe_get_credentials(token)
+ |> build_cfg
+ end
+
+ defp build_cfg(client) when is_nil(client) do
+ %MWSClient.Config{aws_access_key_id: mws_access_key_id(),
+ aws_secret_access_key: mws_secret_access_key(),
+ seller_id: "",
+ mws_auth_token: "" }
+ end
+
defp build_cfg(client) do
%MWSClient.Config{aws_access_key_id: mws_access_key_id(),
aws_secret_access_key: mws_secret_access_key(),
diff --git a/hyperion/lib/hyperion/amazon/templates/submit_images.ex b/hyperion/lib/hyperion/amazon/templates/submit_images.ex
index 72a72fc069..a48c20efb4 100644
--- a/hyperion/lib/hyperion/amazon/templates/submit_images.ex
+++ b/hyperion/lib/hyperion/amazon/templates/submit_images.ex
@@ -3,7 +3,6 @@ defmodule Hyperion.Amazon.Templates.SubmitImages do
Renders whole images feed based on albums
More info here https://sellercentral.amazon.com/gp/help/200386840?ie=UTF8&*Version*=1&*entries*=0&
"""
-
def template_string do
"""
@@ -14,59 +13,40 @@ defmodule Hyperion.Amazon.Templates.SubmitImages do
<%= seller_id %>
ProductImage
- <%= Hyperion.Amazon.Templates.SubmitImages.render_main_image(hd(images)) %>
- <%= Hyperion.Amazon.Templates.SubmitImages.render_pt_images(hd(images)) %>
- <%= Hyperion.Amazon.Templates.SubmitImages.render_swatches(tl(images)) %>
+ <%= for image <- images do %>
+ <%= Hyperion.Amazon.Templates.SubmitImages.render_any_image(image) %>
+ <% end %>
"""
end
- def render_main_image([{main, message_id}|_]) do
+ def render_any_image({n, _idx}) when n == nil, do: ""
+
+ def render_any_image({{sku, type, src}, message_id}) do
"""
#{message_id}
Update
- #{main[:sku]}
- Main
- #{main[:location]}
+ #{sku}
+ #{type}
+ #{src}
"""
end
-
- def render_pt_images([_|pt_images]) do
- for {item, idx} <- pt_images do
- """
-
- #{idx}
- Update
-
- #{item[:sku]}
- #{item[:type]}#{item[:id]}
- #{item[:location]}
-
-
- """
- end
- end
-
- def render_swatches([]), do: ""
-
- def render_swatches([swatches|_]) do
- for swatch <- swatches do
+ def render_any_image({{sku, type, src, index}, message_id}) do
"""
- #{swatch[:idx]}
+ #{message_id}
Update
- #{swatch[:sku]}
- #{swatch[:type]}
- #{swatch[:location]}
+ #{sku}
+ #{type}#{index}
+ #{src}
"""
- end
end
end
diff --git a/hyperion/lib/hyperion/api.ex b/hyperion/lib/hyperion/api.ex
index a062e2c37f..536a012b6f 100644
--- a/hyperion/lib/hyperion/api.ex
+++ b/hyperion/lib/hyperion/api.ex
@@ -25,8 +25,7 @@ defmodule Hyperion.API do
def customer_id(conn) do
token = jwt(conn)
try do
- {:ok, data} = Hyperion.JwtAuth.verify(token)
- data[:scope]
+ Hyperion.JwtAuth.get_scope(token)
rescue RuntimeError ->
raise NotAllowed
end
diff --git a/hyperion/lib/hyperion/jwt_auth.ex b/hyperion/lib/hyperion/jwt_auth.ex
index a83d315a98..a4459f090e 100644
--- a/hyperion/lib/hyperion/jwt_auth.ex
+++ b/hyperion/lib/hyperion/jwt_auth.ex
@@ -9,6 +9,16 @@ defmodule Hyperion.JwtAuth do
JsonWebToken.verify(payload, opts)
end
+ @doc """
+ Verifies token and returning scope
+ """
+ def get_scope(token) do
+ case verify(token) do
+ {:ok, data} -> data[:scope]
+ _ -> raise RuntimeError
+ end
+ end
+
defp key do
RsaUtil.public_key(Application.fetch_env!(:hyperion, :public_key), "")
end
diff --git a/hyperion/lib/hyperion/models/pull_worker_history.ex b/hyperion/lib/hyperion/models/pull_worker_history.ex
new file mode 100644
index 0000000000..fdb26132cd
--- /dev/null
+++ b/hyperion/lib/hyperion/models/pull_worker_history.ex
@@ -0,0 +1,47 @@
+defmodule PullWorkerHistory do
+
+ use Ecto.Schema
+ use Timex.Ecto.Timestamps
+ import Ecto.Changeset
+ import Ecto.Query
+
+ @derive {Poison.Encoder, only: [:id, :last_run, :seller_id]}
+
+ schema "pull_worker_history" do
+ field :last_run, Timex.Ecto.DateTime
+ field :seller_id
+
+ timestamps()
+ end
+
+ def changeset(pull_worker_history, params \\ %{}) do
+ pull_worker_history
+ |> cast(params, [:last_run, :seller_id])
+ |> validate_required([:last_run, :seller_id])
+ end
+
+ @doc """
+ Gets the last run mark from the DB.
+ If it's first run using amazon founded at date as the search start
+ """
+ def last_run_for(seller_id) do
+
+ q = (from h in PullWorkerHistory, where: h.seller_id == ^seller_id, order_by: [desc: h.id], limit: 1)
+ case Hyperion.Repo.all(q) do
+ [] -> Timex.parse!("1994-07-05", "%Y-%m-%d", :strftime)
+ r -> hd(r).last_run
+ end
+ end
+
+ @doc """
+ Stores run mark in the DB.
+ If mark for `today' already exists — returns it,
+ if not — creates new
+ """
+ def insert_run_mark(seller_id) do
+ date = Timex.beginning_of_day(Timex.now)
+ q = from p in PullWorkerHistory, where: p.last_run == ^date
+ res = %PullWorkerHistory{seller_id: seller_id, last_run: date}
+ Hyperion.Repo.one(q) || Hyperion.Repo.insert!(res)
+ end
+end
diff --git a/hyperion/lib/hyperion/phoenix_scala/client.ex b/hyperion/lib/hyperion/phoenix_scala/client.ex
index 26c85c446e..7adb6c2735 100644
--- a/hyperion/lib/hyperion/phoenix_scala/client.ex
+++ b/hyperion/lib/hyperion/phoenix_scala/client.ex
@@ -1,34 +1,50 @@
defmodule Hyperion.PhoenixScala.Client do
use HTTPoison.Base
+ require Logger
@moduledoc """
Provides simple access to Phoenix-scala API
"""
-
def process_url(path) do
{:ok, base_uri} = Application.fetch_env(:hyperion, :phoenix_url)
base_uri <> path
end
+ def email, do: Application.fetch_env!(:hyperion, :phoenix_email)
+ def password, do: Application.fetch_env!(:hyperion, :phoenix_password)
+ def org, do: Application.fetch_env!(:hyperion, :phoenix_org)
+ def login_params, do: Poison.encode!(%{email: email(), password: password(), org: org()})
+
+
@doc """
Gets JWT from Phoenix. Try to not use it very often because it takes a lot of time
"""
def login do
- email = Application.fetch_env!(:hyperion, :phoenix_email)
- password = Application.fetch_env!(:hyperion, :phoenix_password)
- org = Application.fetch_env!(:hyperion, :phoenix_org)
- params = Poison.encode!(%{email: email, password: password, org: org})
- case post("/api/v1/public/login", params, make_request_headers()) do
- {_, %{body: _, headers: headers, status_code: 200}} -> Keyword.take(headers, ["Jwt"]) |> hd |> elem(1)
+ params = login_params()
+ case post("/v1/public/login", params, make_request_headers()) do
+ {_, %{body: _, headers: headers, status_code: 200}} -> Keyword.take(headers, ["JWT"]) |> hd |> elem(1)
{_, %{body: resp, headers: _, status_code: _}} -> raise %PhoenixError{message: hd(resp["errors"])}
end
end
+ def safe_login() do
+ params = login_params()
+ case post("/v1/public/login", params, make_request_headers()) do
+ {_, %{body: _, headers: headers, status_code: 200}} -> Keyword.take(headers, ["JWT"]) |> hd |> elem(1)
+ {:ok, %{body: body, headers: _, status_code: 502}} ->
+ Logger.error "Some error occured on login: #{body}"
+ nil
+ {:error, %HTTPoison.Error{id: _, reason: reason}} ->
+ Logger.error "Some error occured on login: #{reason}"
+ nil
+ end
+ end
+
@doc """
Returns product by id
"""
def get_product(product_id, token, ctx \\ "default") do
- get("/api/v1/products/#{ctx}/#{product_id}", make_request_headers(token))
+ get("/v1/products/#{ctx}/#{product_id}", make_request_headers(token))
|> parse_response(token)
end
@@ -36,16 +52,7 @@ defmodule Hyperion.PhoenixScala.Client do
Returns sku by SKU-CODE
"""
def get_sku(sku_code, token, ctx \\ "default") do
- get("/api/v1/skus/#{ctx}/#{sku_code}", make_request_headers(token))
- |> parse_response(token)
- end
-
- @doc """
- Returns all non archived skus
- """
- def get_all_skus(token, size \\ 50) do
- q = %{query: %{bool: %{filter: [%{missing: %{field: "archivedAt"}}]}}}
- post("/api/search/admin/sku_search_view/_search?size=#{size}", Poison.encode!(q), make_request_headers(token))
+ get("/v1/skus/#{ctx}/#{sku_code}", make_request_headers(token))
|> parse_response(token)
end
@@ -53,7 +60,7 @@ defmodule Hyperion.PhoenixScala.Client do
Return all countries from Phoenix
"""
def get_countries(token) do
- get("/api/v1/public/countries", make_request_headers(token))
+ get("/v1/public/countries", make_request_headers(token))
|> parse_response(token)
end
@@ -62,39 +69,80 @@ defmodule Hyperion.PhoenixScala.Client do
"""
def get_countries do
token = login()
- get("/api/v1/public/countries", make_request_headers(token))
+ get("/v1/public/countries", make_request_headers(token))
|> parse_response(token)
end
def get_regions(country_id) do
token = login()
- get("/api/v1/public/countries/#{country_id}", make_request_headers(token))
+ get("/v1/public/countries/#{country_id}", make_request_headers(token))
|> parse_response(token)
end
def get_regions(country_id, token) do
- get("/api/v1/public/countries/#{country_id}", make_request_headers(token))
+ get("/v1/public/countries/#{country_id}", make_request_headers(token))
+ |> parse_response(token)
+ end
+
+ def get_customer_by_email(email, token) do
+ get("/v1/customers/email/#{email}", make_request_headers(token))
+ |> parse_response(token)
+ end
+
+ def get_customer_by_email(email) do
+ token = login()
+ get("/v1/customers/email/#{email}", make_request_headers(token))
|> parse_response(token)
end
@doc """
Creates new customer in Phoenix from Amazon order
"""
- def create_customer(%{name: name, email: email}) do
- params = Poison.encode!(%{name: name, email: email})
- token = login()
- {st, resp} = post("/api/v1/customers", params, make_request_headers(token))
+ def create_customer(payload, token) do
+ params = Poison.encode!(%{name: payload["BuyerName"], email: payload["BuyerEmail"]})
+ {_, resp} = post("/v1/customers", params, make_request_headers(token))
+ case resp.status_code do
+ # status_code = 400 means customer already exists
+ code when code in [200, 400] -> payload
+ _ ->
+ Logger.error("Customer creation error: #{inspect(resp)}")
+ raise %PhoenixError{message: inspect(resp)}
+ end
+ end
+
+ def create_order(payload, token) do
+ params = Poison.encode!(%{amazonOrderId: payload["AmazonOrderId"],
+ orderTotal: String.to_float(payload["OrderTotal"]["Amount"]) * 100,
+ paymentMethodDetail: payload["PaymentMethodDetails"]["PaymentMethodDetail"],
+ orderType: payload["OrderType"],
+ currency: payload["OrderTotal"]["Currency"],
+ orderStatus: payload["OrderStatus"],
+ purchaseDate: payload["PurchaseDate"],
+ scope: Hyperion.JwtAuth.get_scope(token),
+ customerName: payload["BuyerName"],
+ customerEmail: payload["BuyerEmail"]})
+ {st, resp} = post("/v1/amazon-orders", params, make_request_headers(token))
case resp.status_code do
- code when code == 200 -> parse_response({st, resp}, token)
- _ -> %{status: resp.status_code, error: resp.body["errors"]}
+ code when code in [200, 201] -> parse_response({st, resp}, token)
+ _ ->
+ Logger.error("Order creation error: #{inspect(resp)}")
+ raise %PhoenixError{message: inspect(resp)}
end
end
+ def create_order_and_customer(payload) do
+ token = login()
+ create_customer(payload, token)
+ |> create_order(token)
+ end
+
@doc """
Returns MWS credentials stored in Phoenix plugins
"""
+ def get_credentials(token) when token == nil, do: nil
+
def get_credentials(token) do
- {_, resp} = get("/api/v1/plugins/settings/AmazonMWS/detailed", make_request_headers(token))
+ {_, resp} = get("/v1/plugins/settings/AmazonMWS/detailed", make_request_headers(token))
case resp.status_code do
code when code in [200, 201] ->
resp.body["settings"]
@@ -103,6 +151,21 @@ defmodule Hyperion.PhoenixScala.Client do
end
end
+ @doc """
+ Not raising an error if no credentials set
+ """
+ def safe_get_credentials(token) do
+ {_, resp} = get("/v1/plugins/settings/AmazonMWS/detailed", make_request_headers(token))
+ case resp.status_code do
+ code when code in [200, 201] ->
+ resp.body["settings"]
+ |> Enum.reduce(%{}, fn({k, v}, acc) -> Map.put(acc, String.to_atom(k), v) end)
+ _ ->
+ Logger.error "Error while getting credentials #{inspect(resp)}"
+ nil
+ end
+ end
+
@doc """
Creates Amazon MWS plugin in phoenix on Hyperion start
"""
@@ -117,7 +180,7 @@ defmodule Hyperion.PhoenixScala.Client do
%{"default" => "", "name" => "mws_auth_token",
"title" => "Amazon MWS Auth Token", "type" => "string"}]
}
- post("/api/v1/plugins/register", Poison.encode!(params), make_request_headers(token))
+ post("/v1/plugins/register", Poison.encode!(params), make_request_headers(token))
|> parse_response(token)
end
diff --git a/hyperion/lib/hyperion/workers/amazon/customers_orders_worker.ex b/hyperion/lib/hyperion/workers/amazon/customers_orders_worker.ex
index d67ace4ba3..690a85c589 100644
--- a/hyperion/lib/hyperion/workers/amazon/customers_orders_worker.ex
+++ b/hyperion/lib/hyperion/workers/amazon/customers_orders_worker.ex
@@ -10,8 +10,12 @@ defmodule Hyperion.Amazon.Workers.CustomersOrdersWorker do
end
def init(state) do
- schedule_work()
- {:ok, state}
+ case Mix.env do
+ :test -> {:ok, state}
+ _ ->
+ schedule_work()
+ {:ok, state}
+ end
end
def handle_info(:work, state) do
@@ -22,38 +26,56 @@ defmodule Hyperion.Amazon.Workers.CustomersOrdersWorker do
defp do_work() do
try do
- fetch_amazon_orders()
- |> store_customers()
+ get_credentials()
+ |> fetch_amazon_orders
+ |> store_customers_and_orders()
rescue e in RuntimeError ->
Logger.error "Error while fetching orders from Amazon: #{e.message}"
end
end
- # TODO: Add order saving into phoenix
- defp fetch_amazon_orders do
- date = Timex.beginning_of_day(Timex.now)
- |> Timex.format!("%Y-%m-%dT%TZ", :strftime)
+ def get_credentials() do
+ cfg = Amazon.safe_fetch_config()
+ if String.strip(cfg.seller_id) != "" do
+ cfg
+ else
+ schedule_work()
+ raise "Credentials not set. Exiting."
+ end
+ end
+
+ defp fetch_amazon_orders(cfg) do
+ date = PullWorkerHistory.last_run_for(cfg.seller_id)
+ |>Timex.format!("%Y-%m-%dT%H:%M:%SZ", :strftime)
list = [fulfillment_channel: ["MFN", "AFN"],
created_after: [date]]
- case MWSClient.list_orders(list, Amazon.fetch_config()) do
+ Logger.info("Fetching order with params: #{inspect(list)}")
+
+ case MWSClient.list_orders(list, cfg) do
{:error, error} -> raise inspect(error)
{:warn, warn} -> raise warn["ErrorResponse"]["Error"]["Message"]
- {_, resp} -> resp["ListOrdersResponse"]["ListOrdersResult"]["Orders"]
+ {_, resp} ->
+ Logger.info("Orders fetched: #{inspect(resp)}")
+ PullWorkerHistory.insert_run_mark(cfg.seller_id)
+ resp["ListOrdersResponse"]["ListOrdersResult"]
end
end
- defp store_customers(orders) do
+ defp store_customers_and_orders(orders) do
case orders["Orders"]["Order"] do
list when is_list(list) -> Enum.each(list, fn order ->
- Client.create_customer(%{name: order["Order"]["BuyerName"],
- email: order["Order"]["BuyerEmail"]})
- end)
- map when is_map(map) -> Client.create_customer(%{name: map["BuyerName"], email: map["BuyerEmail"]})
- empty when empty in [%{}, []] -> nil
+ Client.create_order_and_customer(order)
+ end)
+ map when is_map(map) -> Client.create_order_and_customer(map)
+ nil -> Logger.info "No orders present: #{inspect(orders)}"
+ _ -> Logger.error "Some error occured! #{inspect(orders)}"
end
end
- defp schedule_work do
- Process.send_after(self(), :work, 24 * 60 * 60 * 1000) # In 24 hours
+ defp schedule_work() do
+ mins = Application.fetch_env!(:hyperion, :orders_fetch_interval) |> String.to_integer
+ next_run = Timex.shift(Timex.now, minutes: mins) |> Timex.format!("{ISO:Extended}")
+ Logger.info "Scheduling #{__MODULE__} for next run at: #{next_run}. Run Interval is set to #{mins} minute(s)"
+ Process.send_after(self(), :work, mins * 60 * 1000)
end
-end
\ No newline at end of file
+end
diff --git a/hyperion/lib/hyperion/workers/amazon/push_checker_worker.ex b/hyperion/lib/hyperion/workers/amazon/push_checker_worker.ex
index d862694b8a..7397553b40 100644
--- a/hyperion/lib/hyperion/workers/amazon/push_checker_worker.ex
+++ b/hyperion/lib/hyperion/workers/amazon/push_checker_worker.ex
@@ -78,6 +78,8 @@ defmodule Hyperion.Amazon.Workers.PushCheckerWorker do
defp schedule_work do
mins = Application.fetch_env!(:hyperion, :push_check_interval) |> String.to_integer
+ next_run = Timex.shift(Timex.now, minutes: mins) |> Timex.format!("{ISO:Extended}")
+ Logger.info "Scheduling #{__MODULE__} for next run at: #{next_run}. Run Interval is set to #{mins} minute(s)"
Process.send_after(self(), :work, mins * 60 * 1000)
end
end
diff --git a/hyperion/mix.exs b/hyperion/mix.exs
index 3670a4a07f..bd9ef75562 100644
--- a/hyperion/mix.exs
+++ b/hyperion/mix.exs
@@ -44,6 +44,7 @@ defmodule Hyperion.Mixfile do
{:mws_client, github: "FoxComm/elixir-amazon-mws-client"},
{:json_web_token, "~> 0.2"},
{:envy, "~> 1.0.0"},
+ {:timex_ecto, "~> 3.0"},
{:exsync, "~> 0.1", only: :dev},
{:espec, "~> 1.3.2", only: :test},
{:ex_machina, "~> 2.0", only: :test}]
diff --git a/hyperion/mix.lock b/hyperion/mix.lock
index 303bd0106b..4a8306b832 100644
--- a/hyperion/mix.lock
+++ b/hyperion/mix.lock
@@ -1,42 +1,43 @@
-%{"certifi": {:hex, :certifi, "1.0.0", "1c787a85b1855ba354f0b8920392c19aa1d06b0ee1362f9141279620a5be2039", [:rebar3], []},
- "combine": {:hex, :combine, "0.9.6", "8d1034a127d4cbf6924c8a5010d3534d958085575fa4d9b878f200d79ac78335", [:mix], []},
- "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], []},
- "cors_plug": {:hex, :cors_plug, "1.2.1", "bbe1381a52e4a16e609cf3c4cbfde6884726a58b9a1a205db104dbdfc542f447", [:mix], [{:plug, "> 0.8.0", [hex: :plug, optional: false]}]},
- "cowboy": {:hex, :cowboy, "1.1.2", "61ac29ea970389a88eca5a65601460162d370a70018afe6f949a29dca91f3bb0", [:rebar3], [{:cowlib, "~> 1.0.2", [hex: :cowlib, optional: false]}, {:ranch, "~> 1.3.2", [hex: :ranch, optional: false]}]},
- "cowlib": {:hex, :cowlib, "1.0.2", "9d769a1d062c9c3ac753096f868ca121e2730b9a377de23dec0f7e08b1df84ee", [:make], []},
- "csv": {:hex, :csv, "1.4.4", "992f2e1418849a326fd1d9287801fa2d86091db4f9611f60781da6d236f64cd4", [:mix], [{:parallel_stream, "~> 1.0.4", [hex: :parallel_stream, optional: false]}]},
- "db_connection": {:hex, :db_connection, "1.1.1", "f9d246e8f65b9490945cf7360875eee18fcec9a0115207603215eb1fd94c39ef", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, optional: true]}]},
- "decimal": {:hex, :decimal, "1.3.1", "157b3cedb2bfcb5359372a7766dd7a41091ad34578296e951f58a946fcab49c6", [:mix], []},
- "ecto": {:hex, :ecto, "2.1.4", "d1ba932813ec0e0d9db481ef2c17777f1cefb11fc90fa7c142ff354972dfba7e", [:mix], [{:db_connection, "~> 1.1", [hex: :db_connection, optional: true]}, {:decimal, "~> 1.2", [hex: :decimal, optional: false]}, {:mariaex, "~> 0.8.0", [hex: :mariaex, optional: true]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, optional: true]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: false]}, {:postgrex, "~> 0.13.0", [hex: :postgrex, optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, optional: true]}]},
+%{"certifi": {:hex, :certifi, "1.0.0", "1c787a85b1855ba354f0b8920392c19aa1d06b0ee1362f9141279620a5be2039", [:rebar3], [], "hexpm"},
+ "combine": {:hex, :combine, "0.9.6", "8d1034a127d4cbf6924c8a5010d3534d958085575fa4d9b878f200d79ac78335", [:mix], [], "hexpm"},
+ "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"},
+ "cors_plug": {:hex, :cors_plug, "1.2.1", "bbe1381a52e4a16e609cf3c4cbfde6884726a58b9a1a205db104dbdfc542f447", [:mix], [{:plug, "> 0.8.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
+ "cowboy": {:hex, :cowboy, "1.1.2", "61ac29ea970389a88eca5a65601460162d370a70018afe6f949a29dca91f3bb0", [:rebar3], [{:cowlib, "~> 1.0.2", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.3.2", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"},
+ "cowlib": {:hex, :cowlib, "1.0.2", "9d769a1d062c9c3ac753096f868ca121e2730b9a377de23dec0f7e08b1df84ee", [:make], [], "hexpm"},
+ "csv": {:hex, :csv, "1.4.4", "992f2e1418849a326fd1d9287801fa2d86091db4f9611f60781da6d236f64cd4", [:mix], [{:parallel_stream, "~> 1.0.4", [hex: :parallel_stream, repo: "hexpm", optional: false]}], "hexpm"},
+ "db_connection": {:hex, :db_connection, "1.1.1", "f9d246e8f65b9490945cf7360875eee18fcec9a0115207603215eb1fd94c39ef", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, repo: "hexpm", optional: true]}], "hexpm"},
+ "decimal": {:hex, :decimal, "1.3.1", "157b3cedb2bfcb5359372a7766dd7a41091ad34578296e951f58a946fcab49c6", [:mix], [], "hexpm"},
+ "ecto": {:hex, :ecto, "2.1.4", "d1ba932813ec0e0d9db481ef2c17777f1cefb11fc90fa7c142ff354972dfba7e", [:mix], [{:db_connection, "~> 1.1", [hex: :db_connection, repo: "hexpm", optional: true]}, {:decimal, "~> 1.2", [hex: :decimal, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.8.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: true]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.13.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, repo: "hexpm", optional: true]}], "hexpm"},
"elixir_xml_to_map": {:git, "https://github.com/retgoat/elixir-xml-to-map.git", "c5171c8173ec72fb35d306eb3b8fb6e03741a4e2", [tag: "0.1.2"]},
- "envy": {:hex, :envy, "1.0.0", "d8385715b908f3972f85cbffa0ffbd36fec017601258bcb6be9003c11ece3e75", [:mix], []},
- "erlsom": {:hex, :erlsom, "1.4.1", "53dbacf35adfea6f0714fd0e4a7b0720d495e88c5e24e12c5dc88c7b62bd3e49", [:rebar3], []},
- "espec": {:hex, :espec, "1.3.2", "20c96d580df671860bd8e7de1523be9ead34bbdca4912689a54f295add56d683", [:mix], [{:meck, "0.8.4", [hex: :meck, optional: false]}]},
- "ex_aws": {:hex, :ex_aws, "1.1.1", "58c46d9a0b395d7cbb065424c109feb6a7385907c12a91d3032a1e8baf3d6220", [:mix], [{:configparser_ex, "~> 0.2.1", [hex: :configparser_ex, optional: true]}, {:hackney, "1.6.3 or 1.6.5 or 1.7.1", [hex: :hackney, optional: true]}, {:jsx, "~> 2.8", [hex: :jsx, optional: true]}, {:poison, ">= 1.2.0", [hex: :poison, optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, optional: true]}, {:xml_builder, "~> 0.0.6", [hex: :xml_builder, optional: true]}]},
- "ex_machina": {:hex, :ex_machina, "2.0.0", "ec284c6f57233729cea9319e083f66e613e82549f78eccdb2059aeba5d0df9f3", [:mix], [{:ecto, "~> 2.1", [hex: :ecto, optional: true]}]},
- "exfswatch": {:hex, :exfswatch, "0.2.1", "3f34190e2750ae6d924ce53729bb40e050dd8a08fee683847e6ddfa03ea5429f", [:mix], [{:fs, "~> 0.9", [hex: :fs, optional: false]}]},
- "exsync": {:hex, :exsync, "0.1.3", "84c66a4f60505d1baeb8a79cbf713ae98276f23b4bc7d676c6cb2d663fced5f3", [:mix], [{:exfswatch, "~> 0.2.1", [hex: :exfswatch, optional: false]}]},
- "fs": {:hex, :fs, "0.9.2", "ed17036c26c3f70ac49781ed9220a50c36775c6ca2cf8182d123b6566e49ec59", [:rebar], []},
- "gettext": {:hex, :gettext, "0.13.1", "5e0daf4e7636d771c4c71ad5f3f53ba09a9ae5c250e1ab9c42ba9edccc476263", [:mix], []},
- "hackney": {:hex, :hackney, "1.7.1", "e238c52c5df3c3b16ce613d3a51c7220a784d734879b1e231c9babd433ac1cb4", [:rebar3], [{:certifi, "1.0.0", [hex: :certifi, optional: false]}, {:idna, "4.0.0", [hex: :idna, optional: false]}, {:metrics, "1.0.1", [hex: :metrics, optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, optional: false]}, {:ssl_verify_fun, "1.1.1", [hex: :ssl_verify_fun, optional: false]}]},
- "html_sanitize_ex": {:hex, :html_sanitize_ex, "1.0.1", "2572e7122c78ab7e57b613e7c7f5e42bf9b3c25e430e32f23f1413d86db8a0af", [:mix], [{:mochiweb, "~> 2.12.2", [hex: :mochiweb, optional: false]}]},
- "httpoison": {:hex, :httpoison, "0.11.1", "d06c571274c0e77b6cc50e548db3fd7779f611fbed6681fd60a331f66c143a0b", [:mix], [{:hackney, "~> 1.7.0", [hex: :hackney, optional: false]}]},
- "idna": {:hex, :idna, "4.0.0", "10aaa9f79d0b12cf0def53038547855b91144f1bfcc0ec73494f38bb7b9c4961", [:rebar3], []},
- "inflex": {:hex, :inflex, "1.7.0", "4466a34b7d8e871d8164619ba0f3b8410ec782e900f0ae1d3d27a5875a29532e", [:mix], []},
- "json_web_token": {:hex, :json_web_token, "0.2.8", "c79d4c36cfd6f205be7099713e67d6057bde64ee9c64363f9001e3de86de703c", [:mix], [{:poison, "~> 3.1", [hex: :poison, optional: false]}]},
- "maru": {:hex, :maru, "0.11.3", "0bf2f26955430c4878dab91fe44aeb8b3aaed338b4f6ffd0729ea119284c374d", [:mix], [{:cowboy, "~> 1.0", [hex: :cowboy, optional: false]}, {:plug, "~> 1.0", [hex: :plug, optional: false]}, {:poison, "~> 1.5 or ~> 2.0 or ~> 3.0", [hex: :poison, optional: false]}]},
- "meck": {:hex, :meck, "0.8.4", "59ca1cd971372aa223138efcf9b29475bde299e1953046a0c727184790ab1520", [:make, :rebar], []},
- "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], []},
- "mime": {:hex, :mime, "1.1.0", "01c1d6f4083d8aa5c7b8c246ade95139620ef8effb009edde934e0ec3b28090a", [:mix], []},
- "mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], []},
- "mochiweb": {:hex, :mochiweb, "2.12.2", "80804ad342afa3d7f3524040d4eed66ce74b17a555de454ac85b07c479928e46", [:make, :rebar], []},
+ "envy": {:hex, :envy, "1.0.0", "d8385715b908f3972f85cbffa0ffbd36fec017601258bcb6be9003c11ece3e75", [:mix], [], "hexpm"},
+ "erlsom": {:hex, :erlsom, "1.4.1", "53dbacf35adfea6f0714fd0e4a7b0720d495e88c5e24e12c5dc88c7b62bd3e49", [:rebar3], [], "hexpm"},
+ "espec": {:hex, :espec, "1.3.2", "20c96d580df671860bd8e7de1523be9ead34bbdca4912689a54f295add56d683", [:mix], [{:meck, "0.8.4", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
+ "ex_aws": {:hex, :ex_aws, "1.1.1", "58c46d9a0b395d7cbb065424c109feb6a7385907c12a91d3032a1e8baf3d6220", [:mix], [{:configparser_ex, "~> 0.2.1", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "1.6.3 or 1.6.5 or 1.7.1", [hex: :hackney, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8", [hex: :jsx, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0", [hex: :poison, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:xml_builder, "~> 0.0.6", [hex: :xml_builder, repo: "hexpm", optional: true]}], "hexpm"},
+ "ex_machina": {:hex, :ex_machina, "2.0.0", "ec284c6f57233729cea9319e083f66e613e82549f78eccdb2059aeba5d0df9f3", [:mix], [{:ecto, "~> 2.1", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
+ "exfswatch": {:hex, :exfswatch, "0.2.1", "3f34190e2750ae6d924ce53729bb40e050dd8a08fee683847e6ddfa03ea5429f", [:mix], [{:fs, "~> 0.9", [hex: :fs, repo: "hexpm", optional: false]}], "hexpm"},
+ "exsync": {:hex, :exsync, "0.1.3", "84c66a4f60505d1baeb8a79cbf713ae98276f23b4bc7d676c6cb2d663fced5f3", [:mix], [{:exfswatch, "~> 0.2.1", [hex: :exfswatch, repo: "hexpm", optional: false]}], "hexpm"},
+ "fs": {:hex, :fs, "0.9.2", "ed17036c26c3f70ac49781ed9220a50c36775c6ca2cf8182d123b6566e49ec59", [:rebar], [], "hexpm"},
+ "gettext": {:hex, :gettext, "0.13.1", "5e0daf4e7636d771c4c71ad5f3f53ba09a9ae5c250e1ab9c42ba9edccc476263", [:mix], [], "hexpm"},
+ "hackney": {:hex, :hackney, "1.7.1", "e238c52c5df3c3b16ce613d3a51c7220a784d734879b1e231c9babd433ac1cb4", [:rebar3], [{:certifi, "1.0.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "4.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
+ "html_sanitize_ex": {:hex, :html_sanitize_ex, "1.0.1", "2572e7122c78ab7e57b613e7c7f5e42bf9b3c25e430e32f23f1413d86db8a0af", [:mix], [{:mochiweb, "~> 2.12.2", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
+ "httpoison": {:hex, :httpoison, "0.11.1", "d06c571274c0e77b6cc50e548db3fd7779f611fbed6681fd60a331f66c143a0b", [:mix], [{:hackney, "~> 1.7.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"},
+ "idna": {:hex, :idna, "4.0.0", "10aaa9f79d0b12cf0def53038547855b91144f1bfcc0ec73494f38bb7b9c4961", [:rebar3], [], "hexpm"},
+ "inflex": {:hex, :inflex, "1.7.0", "4466a34b7d8e871d8164619ba0f3b8410ec782e900f0ae1d3d27a5875a29532e", [:mix], [], "hexpm"},
+ "json_web_token": {:hex, :json_web_token, "0.2.8", "c79d4c36cfd6f205be7099713e67d6057bde64ee9c64363f9001e3de86de703c", [:mix], [{:poison, "~> 3.1", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
+ "maru": {:hex, :maru, "0.11.3", "0bf2f26955430c4878dab91fe44aeb8b3aaed338b4f6ffd0729ea119284c374d", [:mix], [{:cowboy, "~> 1.0", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 1.5 or ~> 2.0 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
+ "meck": {:hex, :meck, "0.8.4", "59ca1cd971372aa223138efcf9b29475bde299e1953046a0c727184790ab1520", [:make, :rebar], [], "hexpm"},
+ "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"},
+ "mime": {:hex, :mime, "1.1.0", "01c1d6f4083d8aa5c7b8c246ade95139620ef8effb009edde934e0ec3b28090a", [:mix], [], "hexpm"},
+ "mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], [], "hexpm"},
+ "mochiweb": {:hex, :mochiweb, "2.12.2", "80804ad342afa3d7f3524040d4eed66ce74b17a555de454ac85b07c479928e46", [:make, :rebar], [], "hexpm"},
"mws_client": {:git, "https://github.com/FoxComm/elixir-amazon-mws-client.git", "145210296b12d0aaa5ee6cabb21c6a79fea6e96d", []},
- "parallel_stream": {:hex, :parallel_stream, "1.0.5", "4c78d3e675f9eff885cbe252c89a8fc1d2fb803c0d03a914281e587834e09431", [:mix], []},
- "plug": {:hex, :plug, "1.3.3", "d9be189924379b4e9d470caef87380d09549aea1ceafe6a0d41292c8c317c923", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1", [hex: :cowboy, optional: true]}, {:mime, "~> 1.0", [hex: :mime, optional: false]}]},
- "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], []},
- "poolboy": {:hex, :poolboy, "1.5.1", "6b46163901cfd0a1b43d692657ed9d7e599853b3b21b95ae5ae0a777cf9b6ca8", [:rebar], []},
- "postgrex": {:hex, :postgrex, "0.13.2", "2b88168fc6a5456a27bfb54ccf0ba4025d274841a7a3af5e5deb1b755d95154e", [:mix], [{:connection, "~> 1.0", [hex: :connection, optional: false]}, {:db_connection, "~> 1.1", [hex: :db_connection, optional: false]}, {:decimal, "~> 1.0", [hex: :decimal, optional: false]}]},
- "ranch": {:hex, :ranch, "1.3.2", "e4965a144dc9fbe70e5c077c65e73c57165416a901bd02ea899cfd95aa890986", [:rebar3], []},
- "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.1", "28a4d65b7f59893bc2c7de786dec1e1555bd742d336043fe644ae956c3497fbe", [:make, :rebar], []},
- "timex": {:hex, :timex, "3.1.13", "48b33162e3ec33e9a08fb5f98e3f3c19c3e328dded3156096c1969b77d33eef0", [:mix], [{:combine, "~> 0.7", [hex: :combine, optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5", [hex: :tzdata, optional: false]}]},
- "tzdata": {:hex, :tzdata, "0.5.11", "3d5469a9f46bdf4a8760333dbdabdcc4751325035c454b10521f71e7c611ae50", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, optional: false]}]}}
+ "parallel_stream": {:hex, :parallel_stream, "1.0.5", "4c78d3e675f9eff885cbe252c89a8fc1d2fb803c0d03a914281e587834e09431", [:mix], [], "hexpm"},
+ "plug": {:hex, :plug, "1.3.3", "d9be189924379b4e9d470caef87380d09549aea1ceafe6a0d41292c8c317c923", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1", [hex: :cowboy, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}], "hexpm"},
+ "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
+ "poolboy": {:hex, :poolboy, "1.5.1", "6b46163901cfd0a1b43d692657ed9d7e599853b3b21b95ae5ae0a777cf9b6ca8", [:rebar], [], "hexpm"},
+ "postgrex": {:hex, :postgrex, "0.13.2", "2b88168fc6a5456a27bfb54ccf0ba4025d274841a7a3af5e5deb1b755d95154e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 1.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm"},
+ "ranch": {:hex, :ranch, "1.3.2", "e4965a144dc9fbe70e5c077c65e73c57165416a901bd02ea899cfd95aa890986", [:rebar3], [], "hexpm"},
+ "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.1", "28a4d65b7f59893bc2c7de786dec1e1555bd742d336043fe644ae956c3497fbe", [:make, :rebar], [], "hexpm"},
+ "timex": {:hex, :timex, "3.1.13", "48b33162e3ec33e9a08fb5f98e3f3c19c3e328dded3156096c1969b77d33eef0", [:mix], [{:combine, "~> 0.7", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm"},
+ "timex_ecto": {:hex, :timex_ecto, "3.1.1", "37d54f6879d96a6789bb497296531cfb853631de78e152969d95cff03c1368dd", [:mix], [{:ecto, "~> 2.1.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:timex, "~> 3.0", [hex: :timex, repo: "hexpm", optional: false]}], "hexpm"},
+ "tzdata": {:hex, :tzdata, "0.5.11", "3d5469a9f46bdf4a8760333dbdabdcc4751325035c454b10521f71e7c611ae50", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}}
diff --git a/hyperion/priv/seeds/categories/clothes_accessories_categories.csv b/hyperion/priv/seeds/categories/clothes_accessories_categories.csv
index 6a6c6f9354..b44e379e23 100644
--- a/hyperion/priv/seeds/categories/clothes_accessories_categories.csv
+++ b/hyperion/priv/seeds/categories/clothes_accessories_categories.csv
@@ -1222,7 +1222,6 @@
6796859011,"Clothing, Shoes & Jewelry/Men/Shoes/Work & Safety/Fire & Safety",department_name:mens AND item_type_keyword:fire-and-safety-shoes,See Available Refinements
6796861011,"Clothing, Shoes & Jewelry/Men/Shoes/Work & Safety/Health Care & Food Service",department_name:mens AND item_type_keyword:health-care-and-food-service-shoes,See Available Refinements
6796860011,"Clothing, Shoes & Jewelry/Men/Shoes/Work & Safety/Industrial & Construction",department_name:mens AND item_type_keyword:industrial-and-construction-shoes,See Available Refinements
-11312338011,"Clothing, Shoes & Jewelry/Men/Shoes/Work & Safety/Medical",department_name:mens AND item_type_keyword:medical-professional-shoes,See Available Refinements
6796862011,"Clothing, Shoes & Jewelry/Men/Shoes/Work & Safety/Military & Tactical",department_name:mens AND item_type_keyword:military-and-tactical-boots,See Available Refinements
6796863011,"Clothing, Shoes & Jewelry/Men/Shoes/Work & Safety/Uniform Dress Shoes",department_name:mens AND item_type_keyword:uniform-dress-shoes,See Available Refinements
7581669011,"Clothing, Shoes & Jewelry/Men/Shops",,
@@ -1322,7 +1321,6 @@
6567202011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Clothing/Medical/Scrub Jackets",department_name:mens AND item_type_keyword:medical-scrubs-jackets,See Available Refinements
6567203011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Clothing/Medical/Scrub Sets",department_name:mens AND item_type_keyword:medical-scrubs-apparel-sets,See Available Refinements
6567204011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Clothing/Medical/Scrub Tops",department_name:mens AND item_type_keyword:medical-scrubs-shirts,See Available Refinements
-11316162011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Clothing/Medical/Shoes",department_name:mens AND item_type_keyword:medical-shoes,
6567205011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Clothing/Medical/T-Shirts",department_name:mens AND item_type_keyword:medical-apparel-t-shirts,See Available Refinements
2492606011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Clothing/Military",department_name:mens AND item_type_keyword:military-apparel,
2492612011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Clothing/Military/Accessories",department_name:mens AND item_type_keyword:military-apparel-accessories,
@@ -1345,7 +1343,6 @@
6796859011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Shoes/Fire & Safety",department_name:mens AND item_type_keyword:fire-and-safety-shoes,See Available Refinements
6796861011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Shoes/Health Care & Food Service",department_name:mens AND item_type_keyword:health-care-and-food-service-shoes,See Available Refinements
6796860011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Shoes/Industrial & Construction",department_name:mens AND item_type_keyword:industrial-and-construction-shoes,See Available Refinements
-11312338011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Shoes/Medical",department_name:mens AND item_type_keyword:medical-professional-shoes,See Available Refinements
6796862011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Shoes/Military & Tactical",department_name:mens AND item_type_keyword:military-and-tactical-boots,See Available Refinements
6796863011,"Clothing, Shoes & Jewelry/Men/Shops/Uniforms, Work & Safety/Shoes/Uniform Dress Shoes",department_name:mens AND item_type_keyword:uniform-dress-shoes,See Available Refinements
6358539011,"Clothing, Shoes & Jewelry/Men/Watches",,
@@ -1954,7 +1951,6 @@
6567202011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Men/Scrub Jackets",department_name:mens AND item_type_keyword:medical-scrubs-jackets,See Available Refinements
6567203011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Men/Scrub Sets",department_name:mens AND item_type_keyword:medical-scrubs-apparel-sets,See Available Refinements
6567204011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Men/Scrub Tops",department_name:mens AND item_type_keyword:medical-scrubs-shirts,See Available Refinements
-11316162011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Men/Shoes",department_name:mens AND item_type_keyword:medical-shoes,
6567205011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Men/T-Shirts",department_name:mens AND item_type_keyword:medical-apparel-t-shirts,See Available Refinements
6567206011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Women",,
6567208011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Women/Lab Coats",department_name:womens AND item_type_keyword:medical-lab-coats,See Available Refinements
@@ -1963,7 +1959,6 @@
6567211011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Women/Scrub Jackets",department_name:womens AND item_type_keyword:medical-scrubs-jackets,See Available Refinements
6567212011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Women/Scrub Sets",department_name:womens AND item_type_keyword:medical-scrubs-apparel-sets,See Available Refinements
6567213011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Women/Scrub Tops",department_name:womens AND item_type_keyword:medical-scrubs-shirts,See Available Refinements
-11316163011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Women/Shoes",department_name:womens AND item_type_keyword:medical-shoes,
6567214011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Medical/Women/T-Shirts",department_name:womens AND item_type_keyword:medical-apparel-t-shirts,See Available Refinements
1283420011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Military",,
2492606011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Clothing/Military/Men",department_name:mens AND item_type_keyword:military-apparel,
@@ -2092,14 +2087,12 @@
6796859011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Men/Fire & Safety",department_name:mens AND item_type_keyword:fire-and-safety-shoes,See Available Refinements
6796861011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Men/Health Care & Food Service",department_name:mens AND item_type_keyword:health-care-and-food-service-shoes,See Available Refinements
6796860011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Men/Industrial & Construction",department_name:mens AND item_type_keyword:industrial-and-construction-shoes,See Available Refinements
-11312338011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Men/Medical",department_name:mens AND item_type_keyword:medical-professional-shoes,See Available Refinements
6796862011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Men/Military & Tactical",department_name:mens AND item_type_keyword:military-and-tactical-boots,See Available Refinements
6796863011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Men/Uniform Dress Shoes",department_name:mens AND item_type_keyword:uniform-dress-shoes,See Available Refinements
679442011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Women",,See Available Refinements
6796864011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Women/Fire & Safety",department_name:womens AND item_type_keyword:fire-and-safety-shoes,See Available Refinements
6796865011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Women/Health Care & Food Service",department_name:womens AND item_type_keyword:health-care-and-food-service-shoes,See Available Refinements
6796866011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Women/Industrial & Construction",department_name:womens AND item_type_keyword:industrial-and-construction-shoes,See Available Refinements
-11312339011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Women/Medical",department_name:womens AND item_type_keyword:medical-professional-shoes,See Available Refinements
6796867011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Women/Military & Tactical",department_name:womens AND item_type_keyword:military-and-tactical-boots,See Available Refinements
6796868011,"Clothing, Shoes & Jewelry/Uniforms, Work & Safety/Shoes/Women/Uniform Dress Shoes",department_name:womens AND item_type_keyword:uniform-dress-shoes,See Available Refinements
7147440011,"Clothing, Shoes & Jewelry/Women",,
@@ -2599,7 +2592,6 @@
6796864011,"Clothing, Shoes & Jewelry/Women/Shoes/Work & Safety/Fire & Safety",department_name:womens AND item_type_keyword:fire-and-safety-shoes,See Available Refinements
6796865011,"Clothing, Shoes & Jewelry/Women/Shoes/Work & Safety/Health Care & Food Service",department_name:womens AND item_type_keyword:health-care-and-food-service-shoes,See Available Refinements
6796866011,"Clothing, Shoes & Jewelry/Women/Shoes/Work & Safety/Industrial & Construction",department_name:womens AND item_type_keyword:industrial-and-construction-shoes,See Available Refinements
-11312339011,"Clothing, Shoes & Jewelry/Women/Shoes/Work & Safety/Medical",department_name:womens AND item_type_keyword:medical-professional-shoes,See Available Refinements
6796867011,"Clothing, Shoes & Jewelry/Women/Shoes/Work & Safety/Military & Tactical",department_name:womens AND item_type_keyword:military-and-tactical-boots,See Available Refinements
6796868011,"Clothing, Shoes & Jewelry/Women/Shoes/Work & Safety/Uniform Dress Shoes",department_name:womens AND item_type_keyword:uniform-dress-shoes,See Available Refinements
7581668011,"Clothing, Shoes & Jewelry/Women/Shops",,
@@ -2965,7 +2957,6 @@
6567211011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Clothing/Medical/Scrub Jackets",department_name:womens AND item_type_keyword:medical-scrubs-jackets,See Available Refinements
6567212011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Clothing/Medical/Scrub Sets",department_name:womens AND item_type_keyword:medical-scrubs-apparel-sets,See Available Refinements
6567213011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Clothing/Medical/Scrub Tops",department_name:womens AND item_type_keyword:medical-scrubs-shirts,See Available Refinements
-11316163011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Clothing/Medical/Shoes",department_name:womens AND item_type_keyword:medical-shoes,
6567214011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Clothing/Medical/T-Shirts",department_name:womens AND item_type_keyword:medical-apparel-t-shirts,See Available Refinements
2492613011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Clothing/Military",department_name:womens AND item_type_keyword:military-apparel,
2492619011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Clothing/Military/Accessories",department_name:womens AND item_type_keyword:military-apparel-accessories,
@@ -2988,7 +2979,6 @@
6796864011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Shoes/Fire & Safety",department_name:womens AND item_type_keyword:fire-and-safety-shoes,See Available Refinements
6796865011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Shoes/Health Care & Food Service",department_name:womens AND item_type_keyword:health-care-and-food-service-shoes,See Available Refinements
6796866011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Shoes/Industrial & Construction",department_name:womens AND item_type_keyword:industrial-and-construction-shoes,See Available Refinements
-11312339011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Shoes/Medical",department_name:womens AND item_type_keyword:medical-professional-shoes,See Available Refinements
6796867011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Shoes/Military & Tactical",department_name:womens AND item_type_keyword:military-and-tactical-boots,See Available Refinements
6796868011,"Clothing, Shoes & Jewelry/Women/Shops/Uniforms, Work & Safety/Shoes/Uniform Dress Shoes",department_name:womens AND item_type_keyword:uniform-dress-shoes,See Available Refinements
6358543011,"Clothing, Shoes & Jewelry/Women/Watches",,
diff --git a/hyperion/priv/seeds/clothes_accessories_categories.exs b/hyperion/priv/seeds/clothes_accessories_categories.exs
index 29e1a44a2c..f725519a03 100644
--- a/hyperion/priv/seeds/clothes_accessories_categories.exs
+++ b/hyperion/priv/seeds/clothes_accessories_categories.exs
@@ -20,3 +20,5 @@ end
Enum.map(data, fn row -> parse_and_store.(row) end)
from(c in Category, where: c.department == "NULL" and not is_nil(c.item_type)) |> Hyperion.Repo.update_all(set: [department: nil])
+
+from(c in Category, where: like(c.node_path, "%Clothing%")) |> Hyperion.Repo.update_all(set: [category_name: "clothing"])
diff --git a/hyperion/spec/factories/product_factory.ex b/hyperion/spec/factories/product_factory.ex
index 104e362d38..5fdbc1804a 100644
--- a/hyperion/spec/factories/product_factory.ex
+++ b/hyperion/spec/factories/product_factory.ex
@@ -204,25 +204,58 @@ defmodule Hyperion.ProductFactory do
saleprice: %{"currency" => "USD", "value" => 0}], 2}]
end
- def submit_images_feed_data do
- [[{[sku: "SKU-ABC", type: "Main",
- location: "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/487/DeathStar_400x390.jpg"],
- 1},
- {[sku: "SKU-ABC", type: "PT",
- location: "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/487/CuCeQ0IXEAAnpVx.jpg",
- id: 1], 2}],
- [[sku: "SKU-ABC", type: "Swatch",
- location: "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/490/DeathStar2-1-1.jpg",
- idx: 3],
- [sku: "SKU-ABC", type: "Swatch",
- location: "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/490/DeathStar2-2-0.jpg",
- idx: 4],
- [sku: "SKU-ABC", type: "Swatch",
- location: "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/490/DeathStar2-2-2.jpg",
- idx: 5],
- [sku: "SKU-ABC", type: "Swatch",
- location: "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/490/DeathStar2-0-1.jpg",
- idx: 6]]]
+ def one_image_per_sku_data do
+ [{{"SKU-TRL1", "Main",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/232/81B6PnROB1L._UX522_.jpg"},
+ 1},
+ {{"SKU-TRL3", "Main",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/237/DeathStar2.jpg"},
+ 2},
+ {{"SKU-TRL2", "Main",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/235/DeathStar2.jpg"},
+ 3},
+ {{"SKU-TRL", "Main",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/221/DeathStar2.jpg"},
+ 4}]
+ end
+
+ def many_images_per_sku_data do
+ [{{"SKU-ABC", "Main",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/243/20170321_234255.jpg"},
+ 1},
+ {{"SKU-ABC", "PT",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/243/DeathStar2.jpg",
+ 1}, 2},
+ {{"SKU-ABC", "Swatch",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/256/DeathStar2-0-1.jpg"},
+ 3},
+ {{"SKU-ABC", "Swatch",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/256/DeathStar2-1-0.jpg"},
+ 4},
+ {{"SKU-ABC", "Swatch",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/256/DeathStar2-1-1.jpg"},
+ 5},
+ {{"SKU-ABC", "Swatch",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/256/DeathStar2-2-0.jpg"},
+ 6},
+ {{"SKU-CBA", "Main",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/248/4620266368976559_big.jpg"},
+ 7},
+ {{"SKU-CBA", "PT",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/248/2krWC11Pfko.jpg",
+ 1}, 8},
+ {{"SKU-CBA", "Swatch",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/261/DeathStar2-0-1.jpg"},
+ 9},
+ {{"SKU-CBA", "Swatch",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/261/DeathStar2-1-0.jpg"},
+ 10},
+ {{"SKU-CBA", "Swatch",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/261/DeathStar2-1-1.jpg"},
+ 11},
+ {{"SKU-CBA", "Swatch",
+ "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/261/DeathStar2-2-0.jpg"},
+ 12}]
end
end
end
diff --git a/hyperion/spec/lib/amazon/amazon_spec.exs b/hyperion/spec/lib/amazon/amazon_spec.exs
index 90e2cb36c2..c0c416c1a6 100644
--- a/hyperion/spec/lib/amazon/amazon_spec.exs
+++ b/hyperion/spec/lib/amazon/amazon_spec.exs
@@ -50,7 +50,7 @@ defmodule AmazonSpec do
describe "images_feed" do
context "when product have images" do
let resp: build(:sku_with_images)
- let images: [[{[sku: "XMENTEEX1", type: "Main", location: "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/245/81B6PnROB1L._UX522_.jpg"], 1}]]
+ let images: [{{"XMENTEEX1", "Main", "http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/245/81B6PnROB1L._UX522_.jpg"}, 1}]
before do: allow(Hyperion.PhoenixScala.Client).to accept(:get_product, fn(_, _) -> resp() end)
diff --git a/hyperion/spec/lib/amazon/templates/template_builder_spec.exs b/hyperion/spec/lib/amazon/templates/template_builder_spec.exs
index 04c83e1bc8..4689a90773 100644
--- a/hyperion/spec/lib/amazon/templates/template_builder_spec.exs
+++ b/hyperion/spec/lib/amazon/templates/template_builder_spec.exs
@@ -48,13 +48,26 @@ defmodule TemplateBuilderSpec do
describe "submit_images_feed" do
- let list: submit_images_feed_data()
- let opts: %{seller_id: 123}
- let template: "\n\n \n ProductImage\n \n 1\n Update\n \n SKU-ABC\n Main\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/487/DeathStar_400x390.jpg\n \n\n\n \n 2\n Update\n \n SKU-ABC\n PT1\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/487/CuCeQ0IXEAAnpVx.jpg\n \n\n\n \n 3\n Update\n \n SKU-ABC\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/490/DeathStar2-1-1.jpg\n \n\n\n 4\n Update\n \n SKU-ABC\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/490/DeathStar2-2-0.jpg\n \n\n\n 5\n Update\n \n SKU-ABC\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/490/DeathStar2-2-2.jpg\n \n\n\n 6\n Update\n \n SKU-ABC\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/490/DeathStar2-0-1.jpg\n \n\n\n\n"
+ context "when there is only one image per SKU" do
+ let list: one_image_per_sku_data()
+ let opts: %{seller_id: 123}
+ let template: "\n\n \n ProductImage\n \n \n 1\n Update\n \n SKU-TRL1\n Main\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/232/81B6PnROB1L._UX522_.jpg\n \n\n\n \n \n 2\n Update\n \n SKU-TRL3\n Main\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/237/DeathStar2.jpg\n \n\n\n \n \n 3\n Update\n \n SKU-TRL2\n Main\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/235/DeathStar2.jpg\n \n\n\n \n \n 4\n Update\n \n SKU-TRL\n Main\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/221/DeathStar2.jpg\n \n\n\n \n\n"
- it "should render images template" do
- expect Hyperion.Amazon.TemplateBuilder.submit_images_feed(list(), opts())
- |> to(eq(template()))
+ it "should render images template" do
+ expect Hyperion.Amazon.TemplateBuilder.submit_images_feed(list(), opts())
+ |> to(eq(template()))
+ end
+ end
+
+ context "when there are many images per SKU" do
+ let list: many_images_per_sku_data()
+ let opts: %{seller_id: 123}
+ let template: "\n\n \n ProductImage\n \n \n 1\n Update\n \n SKU-ABC\n Main\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/243/20170321_234255.jpg\n \n\n\n \n \n 2\n Update\n \n SKU-ABC\n PT1\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/243/DeathStar2.jpg\n \n\n\n \n \n 3\n Update\n \n SKU-ABC\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/256/DeathStar2-0-1.jpg\n \n\n\n \n \n 4\n Update\n \n SKU-ABC\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/256/DeathStar2-1-0.jpg\n \n\n\n \n \n 5\n Update\n \n SKU-ABC\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/256/DeathStar2-1-1.jpg\n \n\n\n \n \n 6\n Update\n \n SKU-ABC\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/256/DeathStar2-2-0.jpg\n \n\n\n \n \n 7\n Update\n \n SKU-CBA\n Main\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/248/4620266368976559_big.jpg\n \n\n\n \n \n 8\n Update\n \n SKU-CBA\n PT1\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/248/2krWC11Pfko.jpg\n \n\n\n \n \n 9\n Update\n \n SKU-CBA\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/261/DeathStar2-0-1.jpg\n \n\n\n \n \n 10\n Update\n \n SKU-CBA\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/261/DeathStar2-1-0.jpg\n \n\n\n \n \n 11\n Update\n \n SKU-CBA\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/261/DeathStar2-1-1.jpg\n \n\n\n \n \n 12\n Update\n \n SKU-CBA\n Swatch\n http://s3-us-west-1.amazonaws.com/foxcomm-images/albums/1/261/DeathStar2-2-0.jpg\n \n\n\n \n\n"
+
+ it "should render images template" do
+ expect Hyperion.Amazon.TemplateBuilder.submit_images_feed(list(), opts())
+ |> to(eq(template()))
+ end
end
end
diff --git a/hyperion/sql/V2_002__create_pull_worker_history.sql b/hyperion/sql/V2_002__create_pull_worker_history.sql
new file mode 100644
index 0000000000..f9a384bd5e
--- /dev/null
+++ b/hyperion/sql/V2_002__create_pull_worker_history.sql
@@ -0,0 +1,7 @@
+create table pull_worker_history(
+ id bigserial primary key,
+ seller_id varchar(255) not null,
+ last_run timestamp not null,
+ inserted_at timestamp not null,
+ updated_at timestamp not null
+)
diff --git a/phoenix-scala/README.md b/phoenix-scala/README.md
index a6da6cb2d7..84a4f16739 100644
--- a/phoenix-scala/README.md
+++ b/phoenix-scala/README.md
@@ -19,6 +19,7 @@
- `sbt '~re-start'`: reloads the application automatically on code changes
- `sbt seed`: execute the seeds
+- `sbt seedDemo`: execute demo seeds: admins, random data for orders and so on...
- `sbt test`: run all of the unit and integration tests
- `sbt '~test:compile`: re-compiles the application automatically on code changes
diff --git a/phoenix-scala/phoenix/app/phoenix/models/Assignment.scala b/phoenix-scala/phoenix/app/phoenix/models/Assignment.scala
index c423eb19e8..fcd0292502 100644
--- a/phoenix-scala/phoenix/app/phoenix/models/Assignment.scala
+++ b/phoenix-scala/phoenix/app/phoenix/models/Assignment.scala
@@ -33,17 +33,18 @@ object Assignment {
AssignmentType.slickColumn
sealed trait ReferenceType
- case object Cart extends ReferenceType
- case object Order extends ReferenceType
- case object GiftCard extends ReferenceType
- case object Customer extends ReferenceType
- case object Return extends ReferenceType
- case object Sku extends ReferenceType
- case object Product extends ReferenceType
- case object Promotion extends ReferenceType
- case object Coupon extends ReferenceType
- case object Taxonomy extends ReferenceType
- case object Taxon extends ReferenceType
+ case object Cart extends ReferenceType
+ case object Order extends ReferenceType
+ case object AmazonOrder extends ReferenceType
+ case object GiftCard extends ReferenceType
+ case object Customer extends ReferenceType
+ case object Return extends ReferenceType
+ case object Sku extends ReferenceType
+ case object Product extends ReferenceType
+ case object Promotion extends ReferenceType
+ case object Coupon extends ReferenceType
+ case object Taxonomy extends ReferenceType
+ case object Taxon extends ReferenceType
object ReferenceType extends ADT[ReferenceType] {
def types = sealerate.values[ReferenceType]
diff --git a/phoenix-scala/phoenix/app/phoenix/models/account/User.scala b/phoenix-scala/phoenix/app/phoenix/models/account/User.scala
index 26075163e0..c5ff0adf1c 100644
--- a/phoenix-scala/phoenix/app/phoenix/models/account/User.scala
+++ b/phoenix-scala/phoenix/app/phoenix/models/account/User.scala
@@ -123,6 +123,9 @@ object Users extends FoxTableQuery[User, Users](new Users(_)) with ReturningId[U
def activeUserByEmail(email: Option[String]): QuerySeq =
filter(c ⇒ c.email === email && !c.isBlacklisted && !c.isDisabled)
+ def mustfindOneByEmail(email: String)(implicit ec: EC): DbResultT[User] =
+ findByEmail(email).mustFindOneOr(NotFoundFailure404(User, email))
+
def otherUserByEmail(email: String, accountId: Int): QuerySeq =
filter(c ⇒ c.email === email && c.accountId =!= accountId && !c.isBlacklisted && !c.isDisabled)
diff --git a/phoenix-scala/phoenix/app/phoenix/models/activity/Dimension.scala b/phoenix-scala/phoenix/app/phoenix/models/activity/Dimension.scala
index 0a760d4251..50a1a9afab 100644
--- a/phoenix-scala/phoenix/app/phoenix/models/activity/Dimension.scala
+++ b/phoenix-scala/phoenix/app/phoenix/models/activity/Dimension.scala
@@ -32,6 +32,7 @@ object Dimension {
val giftCard = "giftCard"
val notification = "notification"
val order = "order"
+ val amazonOrder = "amazonOrder"
val product = "product"
val promotion = "promotion"
val rma = "return"
diff --git a/phoenix-scala/phoenix/app/phoenix/models/cord/AmazonOrder.scala b/phoenix-scala/phoenix/app/phoenix/models/cord/AmazonOrder.scala
new file mode 100644
index 0000000000..c5e59e88cf
--- /dev/null
+++ b/phoenix-scala/phoenix/app/phoenix/models/cord/AmazonOrder.scala
@@ -0,0 +1,85 @@
+package phoenix.models.cord
+
+import java.time.Instant
+
+import shapeless._
+import phoenix.payloads.AmazonOrderPayloads.CreateAmazonOrderPayload
+import com.github.tminglei.slickpg.LTree
+import core.db.ExPostgresDriver.api._
+import core.utils.Money.Currency
+import core.failures._
+import core.db._
+
+case class AmazonOrder(id: Int,
+ amazonOrderId: String,
+ orderTotal: Long,
+ paymentMethodDetail: String,
+ orderType: String,
+ currency: Currency,
+ orderStatus: String,
+ purchaseDate: Instant,
+ scope: LTree,
+ accountId: Int,
+ createdAt: Instant = Instant.now,
+ updatedAt: Instant = Instant.now)
+ extends FoxModel[AmazonOrder]
+
+object AmazonOrder {
+ def build(payload: CreateAmazonOrderPayload, accountId: Int)(implicit ec: EC): AmazonOrder =
+ AmazonOrder(
+ id = 0,
+ amazonOrderId = payload.amazonOrderId,
+ orderTotal = payload.orderTotal,
+ paymentMethodDetail = payload.paymentMethodDetail,
+ orderType = payload.orderType,
+ currency = payload.currency,
+ orderStatus = payload.orderStatus,
+ purchaseDate = payload.purchaseDate,
+ scope = payload.scope,
+ accountId = accountId,
+ createdAt = Instant.now,
+ updatedAt = Instant.now
+ )
+}
+
+object AmazonOrders
+ extends FoxTableQuery[AmazonOrder, AmazonOrders](new AmazonOrders(_))
+ with ReturningId[AmazonOrder, AmazonOrders] {
+
+ val returningLens: Lens[AmazonOrder, Int] = lens[AmazonOrder].id
+
+ def findOneByAmazonOrderId(amazonOrderId: String): DBIO[Option[AmazonOrder]] =
+ filter(_.amazonOrderId === amazonOrderId).one
+
+ def mustFindOneOr(amazonOrderId: String)(implicit ec: EC): DbResultT[AmazonOrder] =
+ findOneByAmazonOrderId(amazonOrderId).mustFindOr(NotFoundFailure404(AmazonOrder, amazonOrderId))
+}
+
+class AmazonOrders(tag: Tag) extends FoxTable[AmazonOrder](tag, "amazon_orders") {
+ def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
+ def amazonOrderId = column[String]("amazon_order_id")
+ def orderTotal = column[Long]("order_total")
+ def paymentMethodDetail = column[String]("payment_method_detail")
+ def orderType = column[String]("order_type")
+ def currency = column[Currency]("currency")
+ def orderStatus = column[String]("order_status")
+ def purchaseDate = column[Instant]("purchase_date")
+ def scope = column[LTree]("scope")
+ def accountId = column[Int]("account_id")
+ def createdAt = column[Instant]("created_at")
+ def updatedAt = column[Instant]("updated_at")
+
+ def * =
+ (id,
+ amazonOrderId,
+ orderTotal,
+ paymentMethodDetail,
+ orderType,
+ currency,
+ orderStatus,
+ purchaseDate,
+ scope,
+ accountId,
+ createdAt,
+ updatedAt) <> ((AmazonOrder.apply _).tupled, AmazonOrder.unapply)
+}
diff --git a/phoenix-scala/phoenix/app/phoenix/payloads/AmazonOrderPayloads.scala b/phoenix-scala/phoenix/app/phoenix/payloads/AmazonOrderPayloads.scala
new file mode 100644
index 0000000000..22cc7e6259
--- /dev/null
+++ b/phoenix-scala/phoenix/app/phoenix/payloads/AmazonOrderPayloads.scala
@@ -0,0 +1,21 @@
+package phoenix.payloads
+
+import java.time.Instant
+
+import core.utils.Money.Currency
+
+import com.github.tminglei.slickpg.LTree
+
+object AmazonOrderPayloads {
+ case class CreateAmazonOrderPayload(amazonOrderId: String,
+ orderTotal: Long,
+ paymentMethodDetail: String,
+ orderType: String,
+ currency: Currency = Currency.USD,
+ orderStatus: String,
+ scope: LTree,
+ customerEmail: String,
+ purchaseDate: Instant)
+
+ case class UpdateAmazonOrderPayload(orderStatus: String)
+}
diff --git a/phoenix-scala/phoenix/app/phoenix/responses/cord/AmazonOrderResponse.scala b/phoenix-scala/phoenix/app/phoenix/responses/cord/AmazonOrderResponse.scala
new file mode 100644
index 0000000000..d8db4d8627
--- /dev/null
+++ b/phoenix-scala/phoenix/app/phoenix/responses/cord/AmazonOrderResponse.scala
@@ -0,0 +1,40 @@
+package phoenix.responses.cord
+
+import java.time.Instant
+
+import phoenix.models.cord.AmazonOrder
+import core.utils.Money.Currency
+import phoenix.responses._
+import com.github.tminglei.slickpg.LTree
+
+case class AmazonOrderResponse(id: Int,
+ amazonOrderId: String,
+ orderTotal: Long,
+ paymentMethodDetail: String,
+ orderType: String,
+ currency: Currency,
+ orderStatus: String,
+ purchaseDate: Instant,
+ scope: LTree,
+ accountId: Int,
+ createdAt: Instant = Instant.now,
+ updatedAt: Instant = Instant.now)
+ extends ResponseItem
+
+object AmazonOrderResponse {
+ def build(amazonOrder: AmazonOrder): AmazonOrderResponse =
+ AmazonOrderResponse(
+ id = amazonOrder.id,
+ amazonOrderId = amazonOrder.amazonOrderId,
+ orderTotal = amazonOrder.orderTotal,
+ paymentMethodDetail = amazonOrder.paymentMethodDetail,
+ orderType = amazonOrder.orderType,
+ currency = amazonOrder.currency,
+ orderStatus = amazonOrder.orderStatus,
+ purchaseDate = amazonOrder.purchaseDate,
+ scope = amazonOrder.scope,
+ accountId = amazonOrder.accountId,
+ createdAt = amazonOrder.createdAt,
+ updatedAt = amazonOrder.updatedAt
+ )
+}
diff --git a/phoenix-scala/phoenix/app/phoenix/routes/admin/AmazonOrderRoutes.scala b/phoenix-scala/phoenix/app/phoenix/routes/admin/AmazonOrderRoutes.scala
new file mode 100644
index 0000000000..51c2c63aa9
--- /dev/null
+++ b/phoenix-scala/phoenix/app/phoenix/routes/admin/AmazonOrderRoutes.scala
@@ -0,0 +1,34 @@
+package phoenix.routes.admin
+
+import akka.http.scaladsl.server.Directives._
+import akka.http.scaladsl.server.Route
+import phoenix.utils.http.JsonSupport._
+import phoenix.models.account.User
+import phoenix.payloads.AmazonOrderPayloads._
+import phoenix.services.AmazonOrderManager._
+import phoenix.responses.cord.AmazonOrderResponse._
+import phoenix.responses.cord.AmazonOrderResponse
+import phoenix.services.Authenticator.AuthData
+import phoenix.utils.aliases._
+import phoenix.utils.http.CustomDirectives._
+import phoenix.utils.http.Http._
+
+object AmazonOrderRoutes {
+ def routes(implicit ec: EC, db: DB, auth: AU): Route =
+ activityContext(auth) { implicit ac ⇒
+ pathPrefix("amazon-orders") {
+ (post & pathEnd & entity(as[CreateAmazonOrderPayload])) { payload ⇒
+ mutateOrFailures {
+ createAmazonOrder(payload)
+ }
+ } ~
+ pathPrefix(Segment) { amazonOrderId ⇒
+ (patch & pathEnd & entity(as[UpdateAmazonOrderPayload])) { payload ⇒
+ mutateOrFailures {
+ updateAmazonOrder(amazonOrderId, payload)
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/phoenix-scala/phoenix/app/phoenix/routes/admin/AssignmentsRoutes.scala b/phoenix-scala/phoenix/app/phoenix/routes/admin/AssignmentsRoutes.scala
index 4cb2345c43..1c3767afdc 100644
--- a/phoenix-scala/phoenix/app/phoenix/routes/admin/AssignmentsRoutes.scala
+++ b/phoenix-scala/phoenix/app/phoenix/routes/admin/AssignmentsRoutes.scala
@@ -742,6 +742,43 @@ object AssignmentsRoutes {
}
}
}
+ } ~
+ // Amazon Order Single Assignments
+ pathPrefix("amazon-orders" / cordRefNumRegex) { refNum ⇒
+ pathPrefix("assignees") {
+ (get & pathEnd) {
+ getOrFailures {
+ AmazonOrderAssignmentsManager.list(refNum)
+ }
+ } ~
+ (post & pathEnd & entity(as[AssignmentPayload])) { payload ⇒
+ mutateOrFailures {
+ AmazonOrderAssignmentsManager.assign(refNum, payload, auth.model)
+ }
+ } ~
+ (delete & path(IntNumber) & pathEnd) { assigneeId ⇒
+ mutateOrFailures {
+ AmazonOrderAssignmentsManager.unassign(refNum, assigneeId, auth.model)
+ }
+ }
+ } ~
+ pathPrefix("watchers") {
+ (get & pathEnd) {
+ getOrFailures {
+ AmazonOrderWatchersManager.list(refNum)
+ }
+ } ~
+ (post & pathEnd & entity(as[AssignmentPayload])) { payload ⇒
+ mutateOrFailures {
+ AmazonOrderWatchersManager.assign(refNum, payload, auth.model)
+ }
+ } ~
+ (delete & path(IntNumber) & pathEnd) { assigneeId ⇒
+ mutateOrFailures {
+ AmazonOrderWatchersManager.unassign(refNum, assigneeId, auth.model)
+ }
+ }
+ }
}
}
}
diff --git a/phoenix-scala/phoenix/app/phoenix/routes/admin/CustomerRoutes.scala b/phoenix-scala/phoenix/app/phoenix/routes/admin/CustomerRoutes.scala
index 6504061dd4..8b19fdb366 100644
--- a/phoenix-scala/phoenix/app/phoenix/routes/admin/CustomerRoutes.scala
+++ b/phoenix-scala/phoenix/app/phoenix/routes/admin/CustomerRoutes.scala
@@ -38,6 +38,13 @@ object CustomerRoutes {
val context = AccountCreateContext(List(roleName), orgName, scopeId)
CustomerManager.createFromAdmin(payload, Some(auth.model), context)
}
+ } ~
+ pathPrefix("email" / Segment) { customerEmail ⇒
+ (get & pathEnd) {
+ getOrFailures {
+ CustomerManager.getByEmail(customerEmail)
+ }
+ }
}
} ~
pathPrefix("customers" / IntNumber) { accountId ⇒
diff --git a/phoenix-scala/phoenix/app/phoenix/server/Main.scala b/phoenix-scala/phoenix/app/phoenix/server/Main.scala
index 8d251d7c4a..73a384123b 100644
--- a/phoenix-scala/phoenix/app/phoenix/server/Main.scala
+++ b/phoenix-scala/phoenix/app/phoenix/server/Main.scala
@@ -147,6 +147,7 @@ class Service(systemOverride: Option[ActorSystem] = None,
phoenix.routes.admin.NotificationRoutes.routes ~
phoenix.routes.admin.AssignmentsRoutes.routes ~
phoenix.routes.admin.OrderRoutes.routes ~
+ phoenix.routes.admin.AmazonOrderRoutes.routes ~
phoenix.routes.admin.CartRoutes.routes ~
phoenix.routes.admin.CustomerRoutes.routes ~
phoenix.routes.admin.CustomerGroupsRoutes.routes ~
diff --git a/phoenix-scala/phoenix/app/phoenix/services/AmazonOrderManager.scala b/phoenix-scala/phoenix/app/phoenix/services/AmazonOrderManager.scala
new file mode 100644
index 0000000000..1ddbd27234
--- /dev/null
+++ b/phoenix-scala/phoenix/app/phoenix/services/AmazonOrderManager.scala
@@ -0,0 +1,33 @@
+package phoenix.services
+
+import cats.implicits._
+import phoenix.payloads.AmazonOrderPayloads._
+import phoenix.models.cord._
+import phoenix.models.account._
+import phoenix.responses.cord.AmazonOrderResponse
+import phoenix.utils.aliases._
+import core.db._
+import core.failures._
+
+object AmazonOrderManager {
+
+ def createAmazonOrder(
+ payload: CreateAmazonOrderPayload)(implicit ec: EC, db: DB, au: AU): DbResultT[AmazonOrderResponse] =
+ for {
+ user ← * <~ Users
+ .findByEmail(payload.customerEmail)
+ .mustFindOneOr(NotFoundFailure404(User, "email", payload.customerEmail))
+ inner = AmazonOrders.create(AmazonOrder.build(payload, user.accountId))
+ amazonOrder ← * <~ AmazonOrders
+ .findOneByAmazonOrderId(payload.amazonOrderId)
+ .findOrCreate(inner)
+ } yield AmazonOrderResponse.build(amazonOrder)
+
+ def updateAmazonOrder(
+ amazonOrderId: String,
+ payload: UpdateAmazonOrderPayload)(implicit ec: EC, db: DB, au: AU): DbResultT[AmazonOrderResponse] =
+ for {
+ amazonOrder ← * <~ AmazonOrders.mustFindOneOr(amazonOrderId)
+ up ← * <~ AmazonOrders.update(amazonOrder, amazonOrder.copy(orderStatus = payload.orderStatus))
+ } yield AmazonOrderResponse.build(up)
+}
diff --git a/phoenix-scala/phoenix/app/phoenix/services/assignments/AmazonOrderAssignmentsManager.scala b/phoenix-scala/phoenix/app/phoenix/services/assignments/AmazonOrderAssignmentsManager.scala
new file mode 100644
index 0000000000..c4f5d4f875
--- /dev/null
+++ b/phoenix-scala/phoenix/app/phoenix/services/assignments/AmazonOrderAssignmentsManager.scala
@@ -0,0 +1,25 @@
+package phoenix.services.assignments
+
+import phoenix.models.cord._
+import phoenix.models.activity.Dimension
+import phoenix.models.{Assignment, NotificationSubscription}
+import phoenix.responses.cord.AmazonOrderResponse
+import phoenix.responses.cord.AmazonOrderResponse.build
+import phoenix.utils.aliases._
+import slick.jdbc.PostgresProfile.api._
+import core.db._
+
+object AmazonOrderAssignmentsManager extends AssignmentsManager[String, AmazonOrder] {
+ val assignmentType = Assignment.Assignee
+ val referenceType = Assignment.AmazonOrder
+ val notifyDimension = Dimension.amazonOrder
+ val notifyReason = NotificationSubscription.Assigned
+
+ def buildResponse(model: AmazonOrder): AmazonOrderResponse = build(model)
+
+ def fetchEntity(refNum: String)(implicit ec: EC, db: DB, ac: AC): DbResultT[AmazonOrder] =
+ AmazonOrders.mustFindOneOr(refNum)
+
+ def fetchSequence(refNums: Seq[String])(implicit ec: EC, db: DB, ac: AC): DbResultT[Seq[AmazonOrder]] =
+ AmazonOrders.filter(_.amazonOrderId.inSetBind(refNums)).result.dbresult
+}
diff --git a/phoenix-scala/phoenix/app/phoenix/services/assignments/AmazonOrderWatchersManager.scala b/phoenix-scala/phoenix/app/phoenix/services/assignments/AmazonOrderWatchersManager.scala
new file mode 100644
index 0000000000..06646b7935
--- /dev/null
+++ b/phoenix-scala/phoenix/app/phoenix/services/assignments/AmazonOrderWatchersManager.scala
@@ -0,0 +1,26 @@
+package phoenix.services.assignments
+
+import phoenix.models.cord._
+import phoenix.models.activity.Dimension
+import phoenix.models.{Assignment, NotificationSubscription}
+import phoenix.responses.cord.AmazonOrderResponse
+import phoenix.responses.cord.AmazonOrderResponse.build
+import phoenix.utils.aliases._
+import slick.jdbc.PostgresProfile.api._
+import core.db._
+
+object AmazonOrderWatchersManager extends AssignmentsManager[String, AmazonOrder] {
+
+ val assignmentType = Assignment.Watcher
+ val referenceType = Assignment.AmazonOrder
+ val notifyDimension = Dimension.amazonOrder
+ val notifyReason = NotificationSubscription.Watching
+
+ def buildResponse(model: AmazonOrder): AmazonOrderResponse = build(model)
+
+ def fetchEntity(refNum: String)(implicit ec: EC, db: DB, ac: AC): DbResultT[AmazonOrder] =
+ AmazonOrders.mustFindOneOr(refNum)
+
+ def fetchSequence(refNums: Seq[String])(implicit ec: EC, db: DB, ac: AC): DbResultT[Seq[AmazonOrder]] =
+ AmazonOrders.filter(_.amazonOrderId.inSetBind(refNums)).result.dbresult
+}
diff --git a/phoenix-scala/phoenix/app/phoenix/services/customers/CustomerManager.scala b/phoenix-scala/phoenix/app/phoenix/services/customers/CustomerManager.scala
index e9eaf80432..f721391546 100644
--- a/phoenix-scala/phoenix/app/phoenix/services/customers/CustomerManager.scala
+++ b/phoenix-scala/phoenix/app/phoenix/services/customers/CustomerManager.scala
@@ -59,17 +59,24 @@ object CustomerManager {
} yield shipment
}
- def getByAccountId(accountId: Int)(implicit ec: EC, db: DB): DbResultT[CustomerResponse] =
+ private def getCustomerInfo(userDbT: DbResultT[User])(implicit ec: EC,
+ db: DB): DbResultT[CustomerResponse] =
for {
- customer ← * <~ Users.mustFindByAccountId(accountId)
+ customer ← * <~ userDbT
customerDatas ← * <~ CustomersData
- .filter(_.accountId === accountId)
+ .filter(_.accountId === customer.accountId)
.withRegionsAndRank
- .mustFindOneOr(NotFoundFailure404(CustomerData, accountId))
+ .mustFindOneOr(NotFoundFailure404(CustomerData, customer.accountId))
(customerData, shipRegion, billRegion, rank) = customerDatas
- maxOrdersDate ← * <~ Orders.filter(_.accountId === accountId).map(_.placedAt).max.result
- totals ← * <~ StoreCreditService.fetchTotalsForCustomer(accountId)
- phoneOverride ← * <~ doOrGood(customer.phoneNumber.isEmpty, resolvePhoneNumber(accountId), None)
+ maxOrdersDate ← * <~ Orders
+ .filter(_.accountId === customer.accountId)
+ .map(_.placedAt)
+ .max
+ .result
+ totals ← * <~ StoreCreditService.fetchTotalsForCustomer(customer.accountId)
+ phoneOverride ← * <~ doOrGood(customer.phoneNumber.isEmpty,
+ resolvePhoneNumber(customer.accountId),
+ None)
groupMembership ← * <~ CustomerGroupMembers.findByCustomerDataId(customerData.id).result
groupIds = groupMembership.map(_.groupId).toSet
groups ← * <~ CustomerGroups.findAllByIds(groupIds).result
@@ -85,6 +92,16 @@ object CustomerManager {
groups = groups.map(CustomerGroupResponse.build)
)
+ def getByAccountId(accountId: Int)(implicit ec: EC, db: DB): DbResultT[CustomerResponse] = {
+ val userDbByAccountId = Users.mustFindByAccountId(accountId)
+ getCustomerInfo(userDbByAccountId)
+ }
+
+ def getByEmail(email: String)(implicit ec: EC, db: DB): DbResultT[CustomerResponse] = {
+ val userDbByEmail = Users.mustfindOneByEmail(email)
+ getCustomerInfo(userDbByEmail)
+ }
+
def create(payload: CreateCustomerPayload, admin: Option[User] = None, context: AccountCreateContext)(
implicit ec: EC,
db: DB,
diff --git a/phoenix-scala/phoenix/app/phoenix/utils/seeds/AmazonOrdersSeeds.scala b/phoenix-scala/phoenix/app/phoenix/utils/seeds/AmazonOrdersSeeds.scala
new file mode 100644
index 0000000000..9896c12f72
--- /dev/null
+++ b/phoenix-scala/phoenix/app/phoenix/utils/seeds/AmazonOrdersSeeds.scala
@@ -0,0 +1,28 @@
+package phoenix.utils.seeds
+
+import scala.concurrent.ExecutionContext.Implicits.global
+
+import phoenix.models.cord.AmazonOrder
+import com.github.tminglei.slickpg.LTree
+import phoenix.utils.aliases._
+import core.db._
+import java.time.Instant
+import core.utils.Money.Currency
+
+trait AmazonOrdersSeeds {
+ def amazonOrder =
+ AmazonOrder(
+ id = 0,
+ amazonOrderId = "112233",
+ orderTotal = 4500,
+ paymentMethodDetail = "CreditCard",
+ orderType = "StandardOrder",
+ currency = Currency.USD,
+ orderStatus = "Shipped",
+ purchaseDate = Instant.now,
+ scope = LTree("1"),
+ accountId = 0,
+ createdAt = Instant.now,
+ updatedAt = Instant.now
+ )
+}
diff --git a/phoenix-scala/phoenix/app/phoenix/utils/seeds/Factories.scala b/phoenix-scala/phoenix/app/phoenix/utils/seeds/Factories.scala
index 977664563d..27a9fc9453 100644
--- a/phoenix-scala/phoenix/app/phoenix/utils/seeds/Factories.scala
+++ b/phoenix-scala/phoenix/app/phoenix/utils/seeds/Factories.scala
@@ -32,7 +32,8 @@ object Factories
with PromotionSeeds
with ObjectSchemaSeeds
with CouponSeeds
- with SharedSearchSeeds {
+ with SharedSearchSeeds
+ with AmazonOrdersSeeds {
override implicit val formats = JsonFormatters.phoenixFormats
diff --git a/phoenix-scala/phoenix/test/integration/AmazonOrderIntegrationTest.scala b/phoenix-scala/phoenix/test/integration/AmazonOrderIntegrationTest.scala
new file mode 100644
index 0000000000..de9095cda2
--- /dev/null
+++ b/phoenix-scala/phoenix/test/integration/AmazonOrderIntegrationTest.scala
@@ -0,0 +1,58 @@
+import testutils._
+import testutils.apis._
+import testutils.fixtures._
+import testutils.fixtures.api.ApiFixtureHelpers
+import phoenix.payloads.AmazonOrderPayloads._
+import phoenix.responses.cord.AmazonOrderResponse
+import phoenix.models.cord._
+import core.failures._
+import core.db._
+import cats.implicits._
+import phoenix.utils.seeds.Factories
+import core.utils.Money.Currency
+import java.time.Instant
+import com.github.tminglei.slickpg.LTree
+
+class AmazonOrderIntegrationTest
+ extends IntegrationTestBase
+ with PhoenixAdminApi
+ with BakedFixtures
+ with DefaultJwtAdminAuth
+ with TestActivityContext.AdminAC
+ with ApiFixtureHelpers {
+
+ "POST /v1/amazon-orders" - {
+ "successfully creates amazonOrder from payload" in new Fixture {
+ val created =
+ AmazonOrders.findOneByAmazonOrderId(amazonOrderResponse.amazonOrderId).gimme.value
+ created.id must === (amazonOrderResponse.id)
+ }
+ }
+
+ "PATCH /v1/amazon-orders/:amazonOrderId" - {
+ "update existing order" in new Fixture {
+ val updatePayload = UpdateAmazonOrderPayload(orderStatus = "ChangedStatus")
+ val updated = amazonOrdersApi(amazonOrderResponse.amazonOrderId)
+ .update(updatePayload)
+ .as[AmazonOrderResponse]
+ updated.orderStatus must === (updatePayload.orderStatus)
+ }
+ }
+
+ trait Fixture {
+ val customer = api_newCustomer
+ val amazonOrderPayload = CreateAmazonOrderPayload(
+ amazonOrderId = "111-5296499-9653859",
+ orderTotal = 4500,
+ paymentMethodDetail = "CreditCard",
+ orderType = "StandardOrder",
+ currency = Currency.USD,
+ orderStatus = "Shipped",
+ purchaseDate = Instant.now,
+ scope = LTree("1"),
+ customerEmail = customer.email.value
+ )
+
+ val amazonOrderResponse = amazonOrdersApi.create(amazonOrderPayload).as[AmazonOrderResponse]
+ }
+}
diff --git a/phoenix-scala/phoenix/test/integration/CustomerIntegrationTest.scala b/phoenix-scala/phoenix/test/integration/CustomerIntegrationTest.scala
index 21c302ff7c..a67361db90 100644
--- a/phoenix-scala/phoenix/test/integration/CustomerIntegrationTest.scala
+++ b/phoenix-scala/phoenix/test/integration/CustomerIntegrationTest.scala
@@ -24,7 +24,6 @@ import phoenix.payloads.PaymentPayloads._
import phoenix.payloads.UserPayloads._
import phoenix.responses.CreditCardResponse
import phoenix.responses.cord.CartResponse
-import phoenix.responses.CreditCardResponse
import phoenix.services.carts.CartPaymentUpdater
import phoenix.utils.aliases.stripe.StripeCard
import phoenix.utils.seeds.Factories
@@ -82,6 +81,18 @@ class CustomerIntegrationTest
}
+ "GET /v1/customers/email/:email" - {
+ "fetches customer info by email" in new Fixture {
+ customersApi.getByEmail(customer.email.value).as[CustomerResponse].id must === (customer.accountId)
+ }
+
+ "fails if customer not found" in {
+ customersApi
+ .getByEmail("foo@bar.baz")
+ .mustFailWith404(NotFoundFailure404(User, "foo@bar.baz"))
+ }
+ }
+
"GET /v1/customers/:accountId" - {
"fetches customer info" in new Fixture {
val customerRoot =
diff --git a/phoenix-scala/phoenix/test/integration/ReturnsSearchViewTest.scala b/phoenix-scala/phoenix/test/integration/ReturnsSearchViewTest.scala
index 3532619de4..2df9f830f0 100644
--- a/phoenix-scala/phoenix/test/integration/ReturnsSearchViewTest.scala
+++ b/phoenix-scala/phoenix/test/integration/ReturnsSearchViewTest.scala
@@ -11,7 +11,6 @@ import core.utils.Money._
case class ReturnsSearchViewResult(
id: Int,
referenceNumber: String,
- orderId: Int,
orderRef: String,
createdAt: String,
state: Return.State,
diff --git a/phoenix-scala/phoenix/test/integration/testutils/apis/PhoenixAdminApi.scala b/phoenix-scala/phoenix/test/integration/testutils/apis/PhoenixAdminApi.scala
index ef4d5b3671..a3a2b99230 100644
--- a/phoenix-scala/phoenix/test/integration/testutils/apis/PhoenixAdminApi.scala
+++ b/phoenix-scala/phoenix/test/integration/testutils/apis/PhoenixAdminApi.scala
@@ -33,6 +33,7 @@ import phoenix.payloads.TaxonPayloads._
import phoenix.payloads.TaxonomyPayloads._
import phoenix.payloads.UserPayloads._
import phoenix.payloads.VariantPayloads._
+import phoenix.payloads.AmazonOrderPayloads._
import phoenix.payloads._
import phoenix.utils.aliases.OC
import testutils._
@@ -45,11 +46,28 @@ trait PhoenixAdminApi extends HttpSupport { self: FoxSuite ⇒
private val rootPrefix = "v1"
+ object amazonOrdersApi {
+ val amazonOrdersPrefix = s"$rootPrefix/amazon-orders"
+
+ def create(payload: CreateAmazonOrderPayload)(implicit aa: TestAdminAuth): HttpResponse =
+ POST(amazonOrdersPrefix, payload, aa.jwtCookie.some)
+ }
+
+ case class amazonOrdersApi(refNum: String) {
+ val amazonOrdersPath = s"${amazonOrdersApi.amazonOrdersPrefix}/$refNum"
+
+ def update(payload: UpdateAmazonOrderPayload)(implicit aa: TestAdminAuth): HttpResponse =
+ PATCH(amazonOrdersPath, payload, aa.jwtCookie.some)
+ }
+
object customersApi {
val customersPrefix = s"$rootPrefix/customers"
def create(payload: CreateCustomerPayload)(implicit aa: TestAdminAuth): HttpResponse =
POST(customersPrefix, payload, aa.jwtCookie.some)
+
+ def getByEmail(customerEmail: String)(implicit aa: TestAdminAuth): HttpResponse =
+ GET(s"$customersPrefix/email/$customerEmail", aa.jwtCookie.some)
}
case class customersApi(id: Int) {
diff --git a/phoenix-scala/sql/R__amazon_orders_to_orders_search_view_triggers.sql b/phoenix-scala/sql/R__amazon_orders_to_orders_search_view_triggers.sql
new file mode 100644
index 0000000000..e768162f46
--- /dev/null
+++ b/phoenix-scala/sql/R__amazon_orders_to_orders_search_view_triggers.sql
@@ -0,0 +1,65 @@
+create or replace function update_orders_search_view_from_amazon_orders_insert_fn() returns trigger as $$
+ begin
+ insert into orders_search_view (
+ scope,
+ reference_number,
+ state,
+ placed_at,
+ currency,
+ sub_total,
+ shipping_total,
+ adjustments_total,
+ taxes_total,
+ grand_total,
+ customer)
+ select distinct on (new.id)
+ -- order
+ new.scope as scope,
+ new.amazon_order_id as reference_number,
+ new.order_status as state,
+ to_char(new.purchase_date, 'YYYY-MM-DD"T"HH24:MI:SS.MS"Z"') as placed_at,
+ new.currency as currency,
+ -- totals
+ 0 as sub_total,
+ 0 as shipping_total,
+ 0 as adjustments_total,
+ 0 as taxes_total,
+ new.order_total as grand_total,
+ -- customer
+ json_build_object(
+ 'id', c.id,
+ 'name', c.name,
+ 'email', c.email,
+ 'is_blacklisted', c.is_blacklisted,
+ 'joined_at', c.joined_at,
+ 'rank', c.rank,
+ 'revenue', c.revenue
+ )::jsonb as customer
+ from customers_search_view as c
+ where (new.account_id = c.id);
+ return null;
+ end;
+$$ language plpgsql;
+
+-- update customer group function
+create or replace function update_orders_search_view_from_amazon_orders_update_fn() returns trigger as $$
+ begin
+ update orders_search_view set
+ state = new.order_status
+ where reference_number = new.amazon_order_id;
+ return null;
+ end;
+$$ language plpgsql;
+
+drop trigger if exists update_orders_search_view_from_amazon_orders_insert_trigger on amazon_orders;
+create trigger update_orders_search_view_from_amazon_orders_insert_trigger
+ after insert on amazon_orders
+ for each row
+ execute procedure update_orders_search_view_from_amazon_orders_insert_fn();
+
+drop trigger if exists update_orders_search_view_from_amazon_orders_update_trigger on amazon_orders;
+create trigger update_orders_search_view_from_amazon_orders_update_trigger
+ after update on amazon_orders
+ for each row
+ execute procedure update_orders_search_view_from_amazon_orders_update_fn();
+
diff --git a/phoenix-scala/sql/R__orders_search_view_triggers.sql b/phoenix-scala/sql/R__orders_search_view_triggers.sql
index 6bc9cf0b25..7e1bbee630 100644
--- a/phoenix-scala/sql/R__orders_search_view_triggers.sql
+++ b/phoenix-scala/sql/R__orders_search_view_triggers.sql
@@ -14,8 +14,8 @@ create or replace function update_orders_view_from_orders_insert_fn() returns tr
grand_total,
customer)
select distinct on (new.id)
+ nextval('orders_search_view_id_seq') as id,
-- order
- new.id as id,
new.scope as scope,
new.reference_number as reference_number,
new.state as state,
@@ -85,6 +85,23 @@ begin
end;
$$ language plpgsql;
+create or replace function update_orders_view_from_orders_update_fn() returns trigger as $$
+begin
+ update orders_search_view set
+ state = new.state,
+ placed_at = to_char(new.placed_at, 'YYYY-MM-DD"T"HH24:MI:SS.MS"Z"'),
+ currency = new.currency,
+ sub_total = new.sub_total,
+ shipping_total = new.shipping_total,
+ adjustments_total = new.adjustments_total,
+ taxes_total = new.taxes_total,
+ grand_total = new.grand_total
+ where reference_number = new.reference_number;
+
+ return null;
+end;
+$$ language plpgsql;
+
create or replace function update_orders_view_from_shipping_addresses_fn() returns trigger as $$
declare cord_refs text[];
begin
diff --git a/phoenix-scala/sql/R__returns_search_view_triggers.sql b/phoenix-scala/sql/R__returns_search_view_triggers.sql
index 1c507ca24a..310848cf2e 100644
--- a/phoenix-scala/sql/R__returns_search_view_triggers.sql
+++ b/phoenix-scala/sql/R__returns_search_view_triggers.sql
@@ -3,7 +3,6 @@ create or replace function update_returns_search_view_from_returns_insert_fn() r
insert into returns_search_view (
id,
reference_number,
- order_id,
order_ref,
created_at,
state,
@@ -16,7 +15,6 @@ create or replace function update_returns_search_view_from_returns_insert_fn() r
-- return
new.id as id,
new.reference_number as reference_number,
- new.order_id as order_id,
new.order_ref as order_ref,
to_json_timestamp(new.created_at) as created_at,
new.state as state,
diff --git a/phoenix-scala/sql/V5.20170621160816__create_amazon_orders.sql b/phoenix-scala/sql/V5.20170621160816__create_amazon_orders.sql
new file mode 100644
index 0000000000..7b2eb9bf85
--- /dev/null
+++ b/phoenix-scala/sql/V5.20170621160816__create_amazon_orders.sql
@@ -0,0 +1,15 @@
+create table amazon_orders(
+ id bigserial primary key,
+ amazon_order_id generic_string,
+ order_total integer not null default 0,
+ payment_method_detail generic_string,
+ order_type generic_string,
+ currency currency,
+ order_status generic_string,
+ purchase_date generic_timestamp,
+ scope ltree,
+ customer_name generic_string,
+ customer_email generic_string,
+ updated_at generic_timestamp,
+ created_at generic_timestamp
+);
diff --git a/phoenix-scala/sql/V5.20170621160817__alter_assignment_domain_add_amazon_order.sql b/phoenix-scala/sql/V5.20170621160817__alter_assignment_domain_add_amazon_order.sql
new file mode 100644
index 0000000000..f7ac95c706
--- /dev/null
+++ b/phoenix-scala/sql/V5.20170621160817__alter_assignment_domain_add_amazon_order.sql
@@ -0,0 +1,4 @@
+alter domain assignment_ref_type drop constraint assignment_ref_type_check;
+
+alter domain assignment_ref_type add check (value in ('cart', 'order', 'giftCard',
+ 'customer', 'return', 'product', 'sku', 'promotion', 'coupon', 'returnsScope', 'taxonomy', 'taxon', 'amazonOrder'));
diff --git a/phoenix-scala/sql/V5.20170621160818__add_orders_search_view_id_seq_and_change_triggers.sql b/phoenix-scala/sql/V5.20170621160818__add_orders_search_view_id_seq_and_change_triggers.sql
new file mode 100644
index 0000000000..fd084aaa8d
--- /dev/null
+++ b/phoenix-scala/sql/V5.20170621160818__add_orders_search_view_id_seq_and_change_triggers.sql
@@ -0,0 +1,8 @@
+-- set sequence value as max id + 1
+create sequence if not exists orders_search_view_id_seq increment by 1;
+
+select setval ('orders_search_view_id_seq',
+ coalesce((select max (id) + 1 from orders_search_view), 1), false);
+
+alter table orders_search_view
+ alter column id set default nextval ('orders_search_view_id_seq');
diff --git a/phoenix-scala/sql/V5.20170621160819__change_amazon_orders_table.sql b/phoenix-scala/sql/V5.20170621160819__change_amazon_orders_table.sql
new file mode 100644
index 0000000000..0ab0819d0f
--- /dev/null
+++ b/phoenix-scala/sql/V5.20170621160819__change_amazon_orders_table.sql
@@ -0,0 +1 @@
+alter table amazon_orders add column account_id int;
\ No newline at end of file
diff --git a/phoenix-scala/sql/V5.20170621160820__drop_order_id_from_returns_search_view.sql b/phoenix-scala/sql/V5.20170621160820__drop_order_id_from_returns_search_view.sql
new file mode 100644
index 0000000000..fbc170fee2
--- /dev/null
+++ b/phoenix-scala/sql/V5.20170621160820__drop_order_id_from_returns_search_view.sql
@@ -0,0 +1,2 @@
+-- after discussion with @@aafa; -@retgoat
+alter table returns_search_view drop order_id;
\ No newline at end of file
diff --git a/tabernacle/ansible/group_vars/all b/tabernacle/ansible/group_vars/all
index 4fd0ae3823..49d56ff480 100644
--- a/tabernacle/ansible/group_vars/all
+++ b/tabernacle/ansible/group_vars/all
@@ -297,6 +297,7 @@ hyperion_db_password: hyperion
hyperion_db_connection_string: "postgresql://{{hyperion_db_user}}@{{db_host}}/{{hyperion_db_name}}"
hyperion_test_db_name: hyperion_test
hyperion_push_check_interval: 5
+hyperion_orders_fetch_interval: 24
hyperion_create_plugin_in_ashes_on_start: true
# Geronimo
diff --git a/tabernacle/ansible/roles/dev/marathon/templates/hyperion.json b/tabernacle/ansible/roles/dev/marathon/templates/hyperion.json
index 983fc4005d..5a74652760 100644
--- a/tabernacle/ansible/roles/dev/marathon/templates/hyperion.json
+++ b/tabernacle/ansible/roles/dev/marathon/templates/hyperion.json
@@ -24,10 +24,11 @@
"PHOENIX_USER": "{{phoenix_api_user}}",
"PHOENIX_PASSWORD": "{{phoenix_api_password}}",
"PHOENIX_ORG": "{{phoenix_api_user_org}}",
- "PHOENIX_URL": "https://{{storefront_server_name}}",
+ "PHOENIX_URL": "http://{{phoenix_server}}",
"PUBLIC_KEY": "{{public_keys_dest_dir}}/public_key.pem",
"PUSH_CHECK_INTERVAL": "{{hyperion_push_check_interval}}",
- "CREATE_ASHES_PLUGIN": "{{hyperion_create_plugin_in_ashes_on_start}}"
+ "ORDERS_FETCH_INTERVAL" : "{{hyperion_orders_fetch_interval}}",
+ "CREATE_ASHES_PLUGIN": "{{hyperion_create_plugin_in_ashes_on_start | bool | lower}}"
},
"constraints": [["hostname", "UNIQUE"]],
"container": {
diff --git a/tabernacle/ansible/roles/dev/marathon_groups/templates/core-integrations/hyperion.json.j2 b/tabernacle/ansible/roles/dev/marathon_groups/templates/core-integrations/hyperion.json.j2
index f420dfb990..b51bd817c4 100644
--- a/tabernacle/ansible/roles/dev/marathon_groups/templates/core-integrations/hyperion.json.j2
+++ b/tabernacle/ansible/roles/dev/marathon_groups/templates/core-integrations/hyperion.json.j2
@@ -25,10 +25,11 @@
"PHOENIX_USER": "{{phoenix_api_user}}",
"PHOENIX_PASSWORD": "{{phoenix_api_password}}",
"PHOENIX_ORG": "{{phoenix_api_user_org}}",
- "PHOENIX_URL": "https://{{storefront_server_name}}",
+ "PHOENIX_URL": "http://{{phoenix_server}}",
"PUBLIC_KEY": "{{public_keys_dest_dir}}/public_key.pem",
"PUSH_CHECK_INTERVAL": "{{hyperion_push_check_interval}}",
- "CREATE_ASHES_PLUGIN": "{{hyperion_create_plugin_in_ashes_on_start}}"
+ "ORDERS_FETCH_INTERVAL" : "{{hyperion_orders_fetch_interval}}",
+ "CREATE_ASHES_PLUGIN": "{{hyperion_create_plugin_in_ashes_on_start | bool | lower}}"
},
"container": {
"type": "DOCKER",