diff --git a/CHANGELOG.md b/CHANGELOG.md index 28425938ee..63664a0a6d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ This project adheres to [Semantic Versioning](http://semver.org/). + The selected columns in the embedded resources are aggregated into arrays + Aggregates are not supported - #2967, Add `Proxy-Status` header for better error response - @taimoorzaeem + - #4012, Add parameters tracked Snippets to allow for raw sql response with templated parameters, and application/json+sql to return the raw sql - @fauh45 ### Fixed diff --git a/postgrest.cabal b/postgrest.cabal index f06fe783b5..1bbb6a2ae8 100644 --- a/postgrest.cabal +++ b/postgrest.cabal @@ -241,6 +241,7 @@ test-suite spec Feature.Query.QuerySpec Feature.Query.RangeSpec Feature.Query.RawOutputTypesSpec + Feature.Query.RawSQLSpec Feature.Query.RelatedQueriesSpec Feature.Query.RpcSpec Feature.Query.ServerTimingSpec diff --git a/src/PostgREST/MediaType.hs b/src/PostgREST/MediaType.hs index fd247d8be1..8ca5258c4f 100644 --- a/src/PostgREST/MediaType.hs +++ b/src/PostgREST/MediaType.hs @@ -26,6 +26,7 @@ import Protolude -- | Enumeration of currently supported media types data MediaType = MTApplicationJSON + | MTApplicationJSONSQL | MTGeoJSON | MTTextCSV | MTTextPlain @@ -65,6 +66,7 @@ toContentType ct = (hContentType, toMime ct <> charset) -- | Convert from MediaType to a ByteString representing the mime type toMime :: MediaType -> ByteString toMime MTApplicationJSON = "application/json" +toMime MTApplicationJSONSQL = "application/json+sql" toMime MTVndArrayJSONStrip = "application/vnd.pgrst.array+json;nulls=stripped" toMime MTGeoJSON = "application/geo+json" toMime MTTextCSV = "text/csv" @@ -133,6 +135,7 @@ decodeMediaType mt = decodeMediaType' $ decodeLatin1 mt decodeMediaType' mt' = case (T.toLower mainType, T.toLower subType, params) of ("application", "json", _) -> MTApplicationJSON + ("application", "json+sql", _) -> MTApplicationJSONSQL ("application", "geo+json", _) -> MTGeoJSON ("text", "csv", _) -> MTTextCSV ("text", "plain", _) -> MTTextPlain diff --git a/src/PostgREST/Plan.hs b/src/PostgREST/Plan.hs index 38b7d1dcb5..3f643c39ce 100644 --- a/src/PostgREST/Plan.hs +++ b/src/PostgREST/Plan.hs @@ -1088,6 +1088,7 @@ negotiateContent conf ApiRequest{iAction=act, iPreferences=Preferences{preferRep -- TODO: despite no aggregate, these are responding with a Content-Type, which is not correct. (ActDb (ActRelationRead _ True), Just (_, mt)) -> Right (NoAgg, mt) (ActDb (ActRoutine _ (InvRead True)), Just (_, mt)) -> Right (NoAgg, mt) + (_, Just (_, MTApplicationJSONSQL)) -> Right (NoAgg, MTApplicationJSONSQL) (_, Just (x, mt)) -> Right (x, mt) where firstAcceptedPick = listToMaybe $ mapMaybe matchMT accepts -- If there are multiple accepted media types, pick the first. This is usual in content negotiation. diff --git a/src/PostgREST/Query.hs b/src/PostgREST/Query.hs index f504f64c54..1b31feddbf 100644 --- a/src/PostgREST/Query.hs +++ b/src/PostgREST/Query.hs @@ -15,7 +15,6 @@ import qualified Data.ByteString.Lazy.Char8 as LBS import qualified Data.HashMap.Strict as HM import qualified Data.Set as S import qualified Hasql.Decoders as HD -import qualified Hasql.DynamicStatements.Snippet as SQL (Snippet) import qualified Hasql.DynamicStatements.Statement as SQL import qualified Hasql.Session as SQL (Session) import qualified Hasql.Transaction as SQL @@ -49,11 +48,13 @@ import PostgREST.Plan (ActionPlan (..), InfoPlan (..), InspectPlan (..)) import PostgREST.Plan.MutatePlan (MutatePlan (..)) -import PostgREST.Query.SqlFragment (escapeIdentList, fromQi, - intercalateSnippet, +import PostgREST.Query.SqlFragment (TrackedSnippet, + escapeIdentList, fromQi, + intercalateSnippet, rawSQL, setConfigWithConstantName, setConfigWithConstantNameJSON, - setConfigWithDynamicName) + setConfigWithDynamicName, + toSnippet) import PostgREST.Query.Statements (ResultSet (..)) import PostgREST.SchemaCache (SchemaCache (..)) import PostgREST.SchemaCache.Identifiers (QualifiedIdentifier (..)) @@ -79,6 +80,7 @@ data QueryResult | DbCallResult CallReadPlan ResultSet | MaybeDbResult InspectPlan (Maybe (TablesMap, RoutineMap, Maybe Text)) | NoDbResult InfoPlan + | RawSQLResult ByteString [Maybe ByteString] query :: AppConfig -> AuthResult -> ApiRequest -> ActionPlan -> SchemaCache -> PgVersion -> Query query _ _ _ (NoDb x) _ _ = NoDbQuery $ NoDbResult x @@ -109,11 +111,31 @@ planIsoLvl AppConfig{configRoleIsoLvl} role actPlan = case actPlan of -- TODO: Generate the Hasql Statement in a diferent module after the OpenAPI functionality is removed actionQuery :: DbActionPlan -> AppConfig -> ApiRequest -> PgVersion -> SchemaCache -> (DbHandler QueryResult, ByteString) +-- NOTE: Test handling if wrMedia is equal to MTApplicationSQL, returns RawSQLResult which will not be queried, instead directly returned +actionQuery (DbCrud WrappedReadPlan{wrMedia = MTApplicationJSONSQL, ..}) AppConfig{..} ApiRequest{iPreferences=Preferences{..}} _ _ = + (mainActionQuery, mainSQLQuery) + where + countQuery = QueryBuilder.readPlanToCountQuery wrReadPlan + (_, mainSQLQuery, params) = Statements.prepareRead + (QueryBuilder.readPlanToQuery wrReadPlan) + (if preferCount == Just EstimatedCount then + -- LIMIT maxRows + 1 so we can determine below that maxRows was surpassed + QueryBuilder.limitedQuery countQuery ((+ 1) <$> configDbMaxRows) + else + countQuery + ) + (shouldCount preferCount) + MTApplicationJSONSQL + wrHandler + configDbPreparedStatements + mainActionQuery = do + pure $ RawSQLResult mainSQLQuery params + actionQuery (DbCrud plan@WrappedReadPlan{..}) conf@AppConfig{..} apiReq@ApiRequest{iPreferences=Preferences{..}} _ _ = (mainActionQuery, mainSQLQuery) where countQuery = QueryBuilder.readPlanToCountQuery wrReadPlan - (result, mainSQLQuery) = Statements.prepareRead + (result, mainSQLQuery, _) = Statements.prepareRead (QueryBuilder.readPlanToQuery wrReadPlan) (if preferCount == Just EstimatedCount then -- LIMIT maxRows + 1 so we can determine below that maxRows was surpassed @@ -131,11 +153,29 @@ actionQuery (DbCrud plan@WrappedReadPlan{..}) conf@AppConfig{..} apiReq@ApiReque optionalRollback conf apiReq DbCrudResult plan <$> resultSetWTotal conf apiReq resultSet countQuery +actionQuery (DbCrud MutateReadPlan{mrMedia = MTApplicationJSONSQL, ..}) AppConfig{..} ApiRequest{iPreferences=Preferences{..}} _ _ = + (mainActionQuery, mainSQLQuery) + where + (isPut, isInsert, pkCols) = case mrMutatePlan of {Insert{where_,insPkCols} -> ((not . null) where_, True, insPkCols); _ -> (False,False, mempty);} + (_, mainSQLQuery, params) = Statements.prepareWrite + (QueryBuilder.readPlanToQuery mrReadPlan) + (QueryBuilder.mutatePlanToQuery mrMutatePlan) + isInsert + isPut + MTApplicationJSONSQL + mrHandler + preferRepresentation + preferResolution + pkCols + configDbPreparedStatements + mainActionQuery = do + pure $ RawSQLResult mainSQLQuery params + actionQuery (DbCrud plan@MutateReadPlan{..}) conf@AppConfig{..} apiReq@ApiRequest{iPreferences=Preferences{..}} _ _ = (mainActionQuery, mainSQLQuery) where (isPut, isInsert, pkCols) = case mrMutatePlan of {Insert{where_,insPkCols} -> ((not . null) where_, True, insPkCols); _ -> (False,False, mempty);} - (result, mainSQLQuery) = Statements.prepareWrite + (result, mainSQLQuery, _) = Statements.prepareWrite (QueryBuilder.readPlanToQuery mrReadPlan) (QueryBuilder.mutatePlanToQuery mrMutatePlan) isInsert @@ -151,12 +191,12 @@ actionQuery (DbCrud plan@MutateReadPlan{..}) conf@AppConfig{..} apiReq@ApiReques failNotSingular mrMedia resultSet MutationUpdate -> do failNotSingular mrMedia resultSet - failExceedsMaxAffectedPref (preferMaxAffected,preferHandling) resultSet + failExceedsMaxAffectedPref (preferMaxAffected, preferHandling) resultSet MutationSingleUpsert -> do failPut resultSet MutationDelete -> do failNotSingular mrMedia resultSet - failExceedsMaxAffectedPref (preferMaxAffected,preferHandling) resultSet + failExceedsMaxAffectedPref (preferMaxAffected, preferHandling) resultSet mainActionQuery = do resultSet <- lift $ SQL.statement mempty result failMutation resultSet @@ -166,7 +206,7 @@ actionQuery (DbCrud plan@MutateReadPlan{..}) conf@AppConfig{..} apiReq@ApiReques actionQuery (DbCall plan@CallReadPlan{..}) conf@AppConfig{..} apiReq@ApiRequest{iPreferences=Preferences{..}} pgVer _ = (mainActionQuery, mainSQLQuery) where - (result, mainSQLQuery) = Statements.prepareCall + (result, mainSQLQuery, _) = Statements.prepareCall crProc (QueryBuilder.callPlanToQuery crCallPlan pgVer) (QueryBuilder.readPlanToQuery crReadPlan) @@ -179,7 +219,7 @@ actionQuery (DbCall plan@CallReadPlan{..}) conf@AppConfig{..} apiReq@ApiRequest{ resultSet <- lift $ SQL.statement mempty result optionalRollback conf apiReq failNotSingular crMedia resultSet - failExceedsMaxAffectedPref (preferMaxAffected,preferHandling) resultSet + failExceedsMaxAffectedPref (preferMaxAffected, preferHandling) resultSet pure $ DbCallResult plan resultSet actionQuery (MaybeDb plan@InspectPlan{ipSchema=tSchema}) AppConfig{..} _ _ sCache = @@ -213,7 +253,7 @@ failPut RSStandard{rsQueryTotal=queryTotal} = lift SQL.condemn throwError $ Error.ApiRequestError Error.PutMatchingPkError -resultSetWTotal :: AppConfig -> ApiRequest -> ResultSet -> SQL.Snippet -> DbHandler ResultSet +resultSetWTotal :: AppConfig -> ApiRequest -> ResultSet -> TrackedSnippet -> DbHandler ResultSet resultSetWTotal _ _ rs@RSPlan{} _ = return rs resultSetWTotal AppConfig{..} ApiRequest{iPreferences=Preferences{..}} rs@RSStandard{rsTableTotal=tableTotal} countQuery = case preferCount of @@ -270,7 +310,7 @@ setPgLocals dbActPlan AppConfig{..} claims role ApiRequest{..} = lift $ SQL.statement mempty $ SQL.dynamicallyParameterized -- To ensure `GRANT SET ON PARAMETER TO authenticator` works, the role settings must be set before the impersonated role. -- Otherwise the GRANT SET would have to be applied to the impersonated role. See https://github.com/PostgREST/postgrest/issues/3045 - ("select " <> intercalateSnippet ", " (searchPathSql : roleSettingsSql ++ roleSql ++ claimsSql ++ [methodSql, pathSql] ++ headersSql ++ cookiesSql ++ timezoneSql ++ funcSettingsSql ++ appSettingsSql)) + (toSnippet (rawSQL "select " <> intercalateSnippet ", " (searchPathSql : roleSettingsSql ++ roleSql ++ claimsSql ++ [methodSql, pathSql] ++ headersSql ++ cookiesSql ++ timezoneSql ++ funcSettingsSql ++ appSettingsSql))) HD.noResult configDbPreparedStatements where methodSql = setConfigWithConstantName ("request.method", iMethod) @@ -295,7 +335,7 @@ runPreReq :: AppConfig -> DbHandler () runPreReq conf = lift $ traverse_ (SQL.statement mempty . stmt) (configDbPreRequest conf) where stmt req = SQL.dynamicallyParameterized - ("select " <> fromQi req <> "()") + (toSnippet (rawSQL "select " <> fromQi req <> rawSQL "()")) HD.noResult (configDbPreparedStatements conf) diff --git a/src/PostgREST/Query/QueryBuilder.hs b/src/PostgREST/Query/QueryBuilder.hs index 33193184cf..2e8c7e1da0 100644 --- a/src/PostgREST/Query/QueryBuilder.hs +++ b/src/PostgREST/Query/QueryBuilder.hs @@ -43,15 +43,15 @@ import PostgREST.Query.SqlFragment import Protolude -readPlanToQuery :: ReadPlanTree -> SQL.Snippet -readPlanToQuery node@(Node ReadPlan{select,from=mainQi,fromAlias,where_=logicForest,order, range_=readRange, relToParent, relJoinConds, relSelect, relSpread} forest) = - "SELECT " <> +readPlanToQuery :: ReadPlanTree -> TrackedSnippet +readPlanToQuery node@(Node ReadPlan{select, from = mainQi, fromAlias, where_ = logicForest, order, range_ = readRange, relToParent, relJoinConds, relSelect, relSpread} forest) = + rawSQL "SELECT " <> intercalateSnippet ", " (selects ++ sprExtraSelects ++ joinsSelects) <> fromFrag <> intercalateSnippet " " joins <> (if null logicForest && null relJoinConds then mempty - else " WHERE " <> intercalateSnippet " AND " (map (pgFmtLogicTree qi) logicForest ++ map pgFmtJoinCondition relJoinConds)) <> " " <> + else rawSQL " WHERE " <> intercalateSnippet " AND " (map (pgFmtLogicTree qi) logicForest ++ map pgFmtJoinCondition relJoinConds)) <> rawSQL " " <> groupF qi select relSelect <> orderF qi order <> limitOffsetF readRange @@ -67,11 +67,11 @@ readPlanToQuery node@(Node ReadPlan{select,from=mainQi,fromAlias,where_=logicFor Just (ToManySpread sels _) -> (\s -> pgFmtSelectItem (maybe qi (QualifiedIdentifier "") $ fst s) $ snd s) <$> sels _ -> mempty -getJoinSelects :: ReadPlanTree -> [SQL.Snippet] +getJoinSelects :: ReadPlanTree -> [TrackedSnippet] getJoinSelects (Node ReadPlan{relSelect} _) = mapMaybe relSelectToSnippet relSelect where - relSelectToSnippet :: RelSelectField -> Maybe SQL.Snippet + relSelectToSnippet :: RelSelectField -> Maybe TrackedSnippet relSelectToSnippet fld = let aggAlias = pgFmtIdent $ rsAggAlias fld in @@ -79,13 +79,13 @@ getJoinSelects (Node ReadPlan{relSelect} _) = JsonEmbed{rsEmptyEmbed = True} -> Nothing JsonEmbed{rsSelName, rsEmbedMode = JsonObject} -> - Just $ "row_to_json(" <> aggAlias <> ".*)::jsonb AS " <> pgFmtIdent rsSelName + Just $ rawSQL "row_to_json(" <> aggAlias <> rawSQL ".*)::jsonb AS " <> pgFmtIdent rsSelName JsonEmbed{rsSelName, rsEmbedMode = JsonArray} -> - Just $ "COALESCE( " <> aggAlias <> "." <> aggAlias <> ", '[]') AS " <> pgFmtIdent rsSelName + Just $ rawSQL "COALESCE( " <> aggAlias <> rawSQL "." <> aggAlias <> rawSQL ", '[]') AS " <> pgFmtIdent rsSelName Spread{rsSpreadSel, rsAggAlias} -> Just $ intercalateSnippet ", " (pgFmtSpreadSelectItem rsAggAlias <$> rsSpreadSel) -getJoins :: ReadPlanTree -> [SQL.Snippet] +getJoins :: ReadPlanTree -> [TrackedSnippet] getJoins (Node _ []) = [] getJoins (Node ReadPlan{relSelect} forest) = map (\fld -> @@ -94,49 +94,49 @@ getJoins (Node ReadPlan{relSelect} forest) = in getJoin fld matchingNode ) relSelect -getJoin :: RelSelectField -> ReadPlanTree -> SQL.Snippet +getJoin :: RelSelectField -> ReadPlanTree -> TrackedSnippet getJoin fld node@(Node ReadPlan{relJoinType, relSpread} _) = let correlatedSubquery sub al cond = - " " <> (if relJoinType == Just JTInner then "INNER" else "LEFT") <> " JOIN LATERAL ( " <> sub <> " ) AS " <> al <> " ON " <> cond + rawSQL " " <> (if relJoinType == Just JTInner then rawSQL "INNER" else rawSQL "LEFT") <> rawSQL " JOIN LATERAL ( " <> sub <> rawSQL " ) AS " <> al <> rawSQL " ON " <> cond subquery = readPlanToQuery node aggAlias = pgFmtIdent $ rsAggAlias fld - selectSubqAgg = "SELECT json_agg(" <> aggAlias <> ")::jsonb AS " <> aggAlias - fromSubqAgg = " FROM (" <> subquery <> " ) AS " <> aggAlias - joinCondition = if relJoinType == Just JTInner then aggAlias <> " IS NOT NULL" else "TRUE" + selectSubqAgg = rawSQL "SELECT json_agg(" <> aggAlias <> rawSQL ")::jsonb AS " <> aggAlias + fromSubqAgg = rawSQL " FROM (" <> subquery <> rawSQL " ) AS " <> aggAlias + joinCondition = if relJoinType == Just JTInner then aggAlias <> rawSQL " IS NOT NULL" else rawSQL "TRUE" in case fld of JsonEmbed{rsEmbedMode = JsonObject} -> - correlatedSubquery subquery aggAlias "TRUE" + correlatedSubquery subquery aggAlias (rawSQL "TRUE") Spread{rsSpreadSel, rsAggAlias} -> case relSpread of Just (ToManySpread _ sprOrder) -> - let selSpread = selectSubqAgg <> (if null rsSpreadSel then mempty else ", ") <> intercalateSnippet ", " (pgFmtSpreadJoinSelectItem rsAggAlias sprOrder <$> rsSpreadSel) + let selSpread = selectSubqAgg <> (if null rsSpreadSel then mempty else rawSQL ", ") <> intercalateSnippet ", " (pgFmtSpreadJoinSelectItem rsAggAlias sprOrder <$> rsSpreadSel) in correlatedSubquery (selSpread <> fromSubqAgg) aggAlias joinCondition _ -> - correlatedSubquery subquery aggAlias "TRUE" + correlatedSubquery subquery aggAlias (rawSQL "TRUE") JsonEmbed{rsEmbedMode = JsonArray} -> correlatedSubquery (selectSubqAgg <> fromSubqAgg) aggAlias joinCondition -mutatePlanToQuery :: MutatePlan -> SQL.Snippet +mutatePlanToQuery :: MutatePlan -> TrackedSnippet mutatePlanToQuery (Insert mainQi iCols body onConflict putConditions returnings _ applyDefaults) = - "INSERT INTO " <> fromQi mainQi <> (if null iCols then " " else "(" <> cols <> ") ") <> + rawSQL "INSERT INTO " <> fromQi mainQi <> (if null iCols then rawSQL " " else rawSQL "(" <> cols <> rawSQL ") ") <> fromJsonBodyF body iCols True False applyDefaults <> -- Only used for PUT - (if null putConditions then mempty else "WHERE " <> addConfigPgrstInserted True <> " AND " <> intercalateSnippet " AND " (pgFmtLogicTree (QualifiedIdentifier mempty "pgrst_body") <$> putConditions)) <> - (if null putConditions && mergeDups then "WHERE " <> addConfigPgrstInserted True else mempty) <> + (if null putConditions then mempty else rawSQL "WHERE " <> addConfigPgrstInserted True <> rawSQL " AND " <> intercalateSnippet " AND " (pgFmtLogicTree (QualifiedIdentifier mempty "pgrst_body") <$> putConditions)) <> + (if null putConditions && mergeDups then rawSQL "WHERE " <> addConfigPgrstInserted True else mempty) <> maybe mempty (\(oncDo, oncCols) -> if null oncCols then mempty else - " ON CONFLICT(" <> intercalateSnippet ", " (pgFmtIdent <$> oncCols) <> ") " <> case oncDo of + rawSQL " ON CONFLICT(" <> intercalateSnippet ", " (pgFmtIdent <$> oncCols) <> rawSQL ") " <> case oncDo of IgnoreDuplicates -> - "DO NOTHING" + rawSQL "DO NOTHING" MergeDuplicates -> if null iCols - then "DO NOTHING" - else "DO UPDATE SET " <> intercalateSnippet ", " ((pgFmtIdent . cfName) <> const " = EXCLUDED." <> (pgFmtIdent . cfName) <$> iCols) <> (if null putConditions && not mergeDups then mempty else "WHERE " <> addConfigPgrstInserted False) - ) onConflict <> " " <> + then rawSQL "DO NOTHING" + else rawSQL "DO UPDATE SET " <> intercalateSnippet ", " ((pgFmtIdent . cfName) <> const (rawSQL " = EXCLUDED.") <> (pgFmtIdent . cfName) <$> iCols) <> (if null putConditions && not mergeDups then mempty else rawSQL "WHERE " <> addConfigPgrstInserted False) + ) onConflict <> rawSQL " " <> returningF mainQi returnings where cols = intercalateSnippet ", " $ pgFmtIdent . cfName <$> iCols @@ -147,72 +147,72 @@ mutatePlanToQuery (Update mainQi uCols body logicForest returnings applyDefaults -- if there are no columns we cannot do UPDATE table SET {empty}, it'd be invalid syntax -- selecting an empty resultset from mainQi gives us the column names to prevent errors when using &select= -- the select has to be based on "returnings" to make computed overloaded functions not throw - "SELECT " <> emptyBodyReturnedColumns <> " FROM " <> fromQi mainQi <> " WHERE false" + rawSQL "SELECT " <> emptyBodyReturnedColumns <> rawSQL " FROM " <> fromQi mainQi <> rawSQL " WHERE false" | otherwise = - "UPDATE " <> mainTbl <> " SET " <> cols <> " " <> + rawSQL "UPDATE " <> mainTbl <> rawSQL " SET " <> cols <> rawSQL " " <> fromJsonBodyF body uCols False False applyDefaults <> - whereLogic <> " " <> + whereLogic <> rawSQL " " <> returningF mainQi returnings where - whereLogic = if null logicForest then mempty else " WHERE " <> intercalateSnippet " AND " (pgFmtLogicTree mainQi <$> logicForest) + whereLogic = if null logicForest then mempty else rawSQL " WHERE " <> intercalateSnippet " AND " (pgFmtLogicTree mainQi <$> logicForest) mainTbl = fromQi mainQi - emptyBodyReturnedColumns = if null returnings then "NULL" else intercalateSnippet ", " (pgFmtColumn (QualifiedIdentifier mempty $ qiName mainQi) <$> returnings) - cols = intercalateSnippet ", " (pgFmtIdent . cfName <> const " = " <> pgFmtColumn (QualifiedIdentifier mempty "pgrst_body") . cfName <$> uCols) + emptyBodyReturnedColumns = if null returnings then rawSQL "NULL" else intercalateSnippet ", " (pgFmtColumn (QualifiedIdentifier mempty $ qiName mainQi) <$> returnings) + cols = intercalateSnippet ", " (pgFmtIdent . cfName <> const (rawSQL " = ") <> pgFmtColumn (QualifiedIdentifier mempty "pgrst_body") . cfName <$> uCols) mutatePlanToQuery (Delete mainQi logicForest returnings) = - "DELETE FROM " <> fromQi mainQi <> " " <> - whereLogic <> " " <> + rawSQL "DELETE FROM " <> fromQi mainQi <> rawSQL " " <> + whereLogic <> rawSQL " " <> returningF mainQi returnings where - whereLogic = if null logicForest then mempty else " WHERE " <> intercalateSnippet " AND " (pgFmtLogicTree mainQi <$> logicForest) + whereLogic = if null logicForest then mempty else rawSQL " WHERE " <> intercalateSnippet " AND " (pgFmtLogicTree mainQi <$> logicForest) -callPlanToQuery :: CallPlan -> PgVersion -> SQL.Snippet +callPlanToQuery :: CallPlan -> PgVersion -> TrackedSnippet callPlanToQuery (FunctionCall qi params arguments returnsScalar returnsSetOfScalar returnsCompositeAlias returnings) pgVer = - "SELECT " <> (if returnsScalar || returnsSetOfScalar then "pgrst_call.pgrst_scalar" else returnedColumns) <> " " <> + rawSQL "SELECT " <> (if returnsScalar || returnsSetOfScalar then rawSQL "pgrst_call.pgrst_scalar" else returnedColumns) <> rawSQL " " <> fromCall where jsonArgs = case arguments of DirectArgs args -> Just $ JSON.encode args JsonArgs json -> json fromCall = case params of - OnePosParam prm -> "FROM " <> callIt (singleParameter jsonArgs $ encodeUtf8 $ ppType prm) - KeyParams [] -> "FROM " <> callIt mempty + OnePosParam prm -> rawSQL "FROM " <> callIt (singleParameter jsonArgs $ encodeUtf8 $ ppType prm) + KeyParams [] -> rawSQL "FROM " <> callIt mempty KeyParams prms -> case arguments of - DirectArgs args -> "FROM " <> callIt (fmtArgs prms args) - JsonArgs json -> fromJsonBodyF json ((\p -> CoercibleField (ppName p) mempty False Nothing (ppTypeMaxLength p) Nothing Nothing False) <$> prms) False True False <> ", " <> - "LATERAL " <> callIt (fmtParams prms) + DirectArgs args -> rawSQL "FROM " <> callIt (fmtArgs prms args) + JsonArgs json -> fromJsonBodyF json ((\p -> CoercibleField (ppName p) mempty False Nothing (ppTypeMaxLength p) Nothing Nothing False) <$> prms) False True False <> rawSQL ", " <> + rawSQL "LATERAL " <> callIt (fmtParams prms) - callIt :: SQL.Snippet -> SQL.Snippet - callIt argument | pgVer < pgVersion130 && returnsCompositeAlias = "(SELECT (" <> fromQi qi <> "(" <> argument <> ")).*) pgrst_call" - | returnsScalar || returnsSetOfScalar = "(SELECT " <> fromQi qi <> "(" <> argument <> ") pgrst_scalar) pgrst_call" - | otherwise = fromQi qi <> "(" <> argument <> ") pgrst_call" + callIt :: TrackedSnippet -> TrackedSnippet + callIt argument | pgVer < pgVersion130 && returnsCompositeAlias = rawSQL "(SELECT (" <> fromQi qi <> rawSQL "(" <> argument <> rawSQL ")).*) pgrst_call" + | returnsScalar || returnsSetOfScalar = rawSQL "(SELECT " <> fromQi qi <> rawSQL "(" <> argument <> rawSQL ") pgrst_scalar) pgrst_call" + | otherwise = fromQi qi <> rawSQL "(" <> argument <> rawSQL ") pgrst_call" - fmtParams :: [RoutineParam] -> SQL.Snippet + fmtParams :: [RoutineParam] -> TrackedSnippet fmtParams prms = intercalateSnippet ", " - ((\a -> (if ppVar a then "VARIADIC " else mempty) <> pgFmtIdent (ppName a) <> " := pgrst_body." <> pgFmtIdent (ppName a)) <$> prms) + ((\a -> (if ppVar a then rawSQL "VARIADIC " else mempty) <> pgFmtIdent (ppName a) <> rawSQL " := pgrst_body." <> pgFmtIdent (ppName a)) <$> prms) - fmtArgs :: [RoutineParam] -> HM.HashMap Text RpcParamValue -> SQL.Snippet + fmtArgs :: [RoutineParam] -> HM.HashMap Text RpcParamValue -> TrackedSnippet fmtArgs prms args = intercalateSnippet ", " $ fmtArg <$> prms where fmtArg RoutineParam{..} = - (if ppVar then "VARIADIC " else mempty) <> + (if ppVar then rawSQL "VARIADIC " else mempty) <> pgFmtIdent ppName <> - " := " <> + rawSQL " := " <> encodeArg (HM.lookup ppName args) <> - "::" <> - SQL.sql (encodeUtf8 ppTypeMaxLength) - encodeArg :: Maybe RpcParamValue -> SQL.Snippet - encodeArg (Just (Variadic v)) = SQL.encoderAndParam (HE.nonNullable $ HE.foldableArray $ HE.nonNullable HE.text) v - encodeArg (Just (Fixed v)) = SQL.encoderAndParam (HE.nonNullable HE.unknown) $ encodeUtf8 v + rawSQL "::" <> + rawSQL (encodeUtf8 ppTypeMaxLength) + encodeArg :: Maybe RpcParamValue -> TrackedSnippet + encodeArg (Just (Variadic v)) = fromSnippet (SQL.encoderAndParam (HE.nonNullable $ HE.foldableArray $ HE.nonNullable HE.text) v) + encodeArg (Just (Fixed v)) = fromSnippet (SQL.encoderAndParam (HE.nonNullable HE.unknown) $ encodeUtf8 v) -- Currently not supported: Calling functions without some of their arguments without DEFAULT. -- We could fallback to providing this NULL value in those cases. - encodeArg Nothing = "NULL" + encodeArg Nothing = rawSQL "NULL" - returnedColumns :: SQL.Snippet + returnedColumns :: TrackedSnippet returnedColumns - | null returnings = "*" + | null returnings = rawSQL "*" | otherwise = intercalateSnippet ", " (pgFmtColumn (QualifiedIdentifier mempty "pgrst_call") <$> returnings) -- | SQL query meant for COUNTing the root node of the Tree. @@ -223,12 +223,12 @@ callPlanToQuery (FunctionCall qi params arguments returnsScalar returnsSetOfScal -- For this case, we use a WHERE EXISTS instead of an INNER JOIN on the count query. -- See https://github.com/PostgREST/postgrest/issues/2009#issuecomment-977473031 -- Only for the nodes that have an INNER JOIN linked to the root level. -readPlanToCountQuery :: ReadPlanTree -> SQL.Snippet -readPlanToCountQuery (Node ReadPlan{from=mainQi, fromAlias=tblAlias, where_=logicForest, relToParent=rel, relJoinConds} forest) = - "SELECT 1 " <> fromFrag <> +readPlanToCountQuery :: ReadPlanTree -> TrackedSnippet +readPlanToCountQuery (Node ReadPlan{from = mainQi, fromAlias = tblAlias, where_ = logicForest, relToParent = rel, relJoinConds} forest) = + rawSQL "SELECT 1 " <> fromFrag <> (if null logicForest && null relJoinConds && null subQueries then mempty - else " WHERE " ) <> + else rawSQL " WHERE " ) <> intercalateSnippet " AND " ( map (pgFmtLogicTreeCount qi) logicForest ++ map pgFmtJoinCondition relJoinConds ++ @@ -238,26 +238,26 @@ readPlanToCountQuery (Node ReadPlan{from=mainQi, fromAlias=tblAlias, where_=logi qi = getQualifiedIdentifier rel mainQi tblAlias fromFrag = fromF rel mainQi tblAlias subQueries = foldr existsSubquery [] forest - existsSubquery :: ReadPlanTree -> [SQL.Snippet] -> [SQL.Snippet] + existsSubquery :: ReadPlanTree -> [TrackedSnippet] -> [TrackedSnippet] existsSubquery readReq@(Node ReadPlan{relJoinType=joinType} _) rest = if joinType == Just JTInner - then ("EXISTS (" <> readPlanToCountQuery readReq <> " )"):rest + then (rawSQL "EXISTS (" <> readPlanToCountQuery readReq <> rawSQL " )"):rest else rest findNullEmbedRel fld = find (\(Node ReadPlan{relAggAlias} _) -> fld == relAggAlias) forest -- https://github.com/PostgREST/postgrest/pull/2930#discussion_r1325293698 - pgFmtLogicTreeCount :: QualifiedIdentifier -> CoercibleLogicTree -> SQL.Snippet - pgFmtLogicTreeCount qiCount (CoercibleExpr hasNot op frst) = SQL.sql notOp <> " (" <> intercalateSnippet (opSql op) (pgFmtLogicTreeCount qiCount <$> frst) <> ")" + pgFmtLogicTreeCount :: QualifiedIdentifier -> CoercibleLogicTree -> TrackedSnippet + pgFmtLogicTreeCount qiCount (CoercibleExpr hasNot op frst) = rawSQL notOp <> rawSQL " (" <> intercalateSnippet (opSql op) (pgFmtLogicTreeCount qiCount <$> frst) <> rawSQL ")" where - notOp = if hasNot then "NOT" else mempty + notOp = if hasNot then "NOT" else mempty opSql And = " AND " opSql Or = " OR " pgFmtLogicTreeCount _ (CoercibleStmnt (CoercibleFilterNullEmbed hasNot fld)) = - maybe mempty (\x -> (if not hasNot then "NOT " else mempty) <> "EXISTS (" <> readPlanToCountQuery x <> ")") (findNullEmbedRel fld) + maybe mempty (\x -> (if not hasNot then rawSQL "NOT " else mempty) <> rawSQL "EXISTS (" <> readPlanToCountQuery x <> rawSQL ")") (findNullEmbedRel fld) pgFmtLogicTreeCount qiCount (CoercibleStmnt flt) = pgFmtFilter qiCount flt -limitedQuery :: SQL.Snippet -> Maybe Integer -> SQL.Snippet -limitedQuery query maxRows = query <> SQL.sql (maybe mempty (\x -> " LIMIT " <> BS.pack (show x)) maxRows) +limitedQuery :: TrackedSnippet -> Maybe Integer -> TrackedSnippet +limitedQuery query maxRows = query <> rawSQL (maybe mempty (\x -> " LIMIT " <> BS.pack (show x)) maxRows) -- TODO refactor so this function is uneeded and ComputedRelationship QualifiedIdentifier comes from the ReadPlan type getQualifiedIdentifier :: Maybe Relationship -> QualifiedIdentifier -> Maybe Alias -> QualifiedIdentifier @@ -266,14 +266,14 @@ getQualifiedIdentifier rel mainQi tblAlias = case rel of _ -> maybe mainQi (QualifiedIdentifier mempty) tblAlias -- FROM clause plus implicit joins -fromF :: Maybe Relationship -> QualifiedIdentifier -> Maybe Alias -> SQL.Snippet -fromF rel mainQi tblAlias = " FROM " <> +fromF :: Maybe Relationship -> QualifiedIdentifier -> Maybe Alias -> TrackedSnippet +fromF rel mainQi tblAlias = rawSQL " FROM " <> (case rel of -- Due to the use of CTEs on RPC, we need to cast the parameter to the table name in case of function overloading. -- See https://github.com/PostgREST/postgrest/issues/2963#issuecomment-1736557386 - Just ComputedRelationship{relFunction,relTableAlias,relTable} -> fromQi relFunction <> "(" <> pgFmtIdent (qiName relTableAlias) <> "::" <> fromQi relTable <> ")" + Just ComputedRelationship{relFunction,relTableAlias,relTable} -> fromQi relFunction <> rawSQL "(" <> pgFmtIdent (qiName relTableAlias) <> rawSQL "::" <> fromQi relTable <> rawSQL ")" _ -> fromQi mainQi) <> - maybe mempty (\a -> " AS " <> pgFmtIdent a) tblAlias <> + maybe mempty (\a -> rawSQL " AS " <> pgFmtIdent a) tblAlias <> (case rel of - Just Relationship{relCardinality=M2M Junction{junTable=jt}} -> ", " <> fromQi jt + Just Relationship{relCardinality=M2M Junction{junTable=jt}} -> rawSQL ", " <> fromQi jt _ -> mempty) diff --git a/src/PostgREST/Query/SqlFragment.hs b/src/PostgREST/Query/SqlFragment.hs index 0a36365b78..17aed70adb 100644 --- a/src/PostgREST/Query/SqlFragment.hs +++ b/src/PostgREST/Query/SqlFragment.hs @@ -6,7 +6,13 @@ Module : PostgREST.Query.SqlFragment Description : Helper functions for PostgREST.QueryBuilder. -} module PostgREST.Query.SqlFragment - ( noLocationF + ( TrackedSnippet (..) + , emptyTracked + , rawSQL + , fromSnippet + , toSnippet + , ToTrackedSnippet (..) + , noLocationF , handlerF , countF , groupF @@ -96,52 +102,95 @@ import PostgREST.SchemaCache.Routine (MediaHandler (..), import Protolude hiding (Sum, cast) +-- | Wrapper on top of Hasql.Snippet which also able to track parameters encoded to the query +data TrackedSnippet = TrackedSnippet + { snippet :: SQL.Snippet + , params :: [Maybe ByteString] + } + +-- | Create an empty tracked snippet +emptyTracked :: TrackedSnippet +emptyTracked = TrackedSnippet mempty [] + +-- | Create a tracked snippet from raw SQL with no parameters +rawSQL :: ByteString -> TrackedSnippet +rawSQL sql = TrackedSnippet (SQL.sql sql) [] + +-- | Convert SQL.Snippet to TrackedSnippet (for backward compatibility) +fromSnippet :: SQL.Snippet -> TrackedSnippet +fromSnippet snip = TrackedSnippet snip [] + +-- | Convert TrackedSnippet to SQL.Snippet (for backward compatibility) +toSnippet :: TrackedSnippet -> SQL.Snippet +toSnippet = snippet + +-- | Helper to allow SQL.Snippet and TrackedSnippet to coexist during transition +class ToTrackedSnippet a where + toTrackedSnippet :: a -> TrackedSnippet + +instance ToTrackedSnippet SQL.Snippet where + toTrackedSnippet = fromSnippet + +instance ToTrackedSnippet TrackedSnippet where + toTrackedSnippet t = t + +-- | Concatenation for TrackedSnippet +instance Semigroup TrackedSnippet where + TrackedSnippet s1 p1 <> TrackedSnippet s2 p2 = TrackedSnippet (s1 <> s2) (p1 <> p2) + +-- | Empty element for TrackedSnippet +instance Monoid TrackedSnippet where + mempty = emptyTracked + sourceCTEName :: Text sourceCTEName = "pgrst_source" -sourceCTE :: SQL.Snippet -sourceCTE = "pgrst_source" +sourceCTE :: TrackedSnippet +sourceCTE = rawSQL "pgrst_source" -noLocationF :: SQL.Snippet -noLocationF = "array[]::text[]" +noLocationF :: TrackedSnippet +noLocationF = rawSQL "array[]::text[]" -simpleOperator :: SimpleOperator -> SQL.Snippet +simpleOperator :: SimpleOperator -> TrackedSnippet simpleOperator = \case - OpNotEqual -> "<>" - OpContains -> "@>" - OpContained -> "<@" - OpOverlap -> "&&" - OpStrictlyLeft -> "<<" - OpStrictlyRight -> ">>" - OpNotExtendsRight -> "&<" - OpNotExtendsLeft -> "&>" - OpAdjacent -> "-|-" - -quantOperator :: QuantOperator -> SQL.Snippet + OpNotEqual -> rawSQL "<>" + OpContains -> rawSQL "@>" + OpContained -> rawSQL "<@" + OpOverlap -> rawSQL "&&" + OpStrictlyLeft -> rawSQL "<<" + OpStrictlyRight -> rawSQL ">>" + OpNotExtendsRight -> rawSQL "&<" + OpNotExtendsLeft -> rawSQL "&>" + OpAdjacent -> rawSQL "-|-" + +quantOperator :: QuantOperator -> TrackedSnippet quantOperator = \case - OpEqual -> "=" - OpGreaterThanEqual -> ">=" - OpGreaterThan -> ">" - OpLessThanEqual -> "<=" - OpLessThan -> "<" - OpLike -> "like" - OpILike -> "ilike" - OpMatch -> "~" - OpIMatch -> "~*" - -ftsOperator :: FtsOperator -> SQL.Snippet + OpEqual -> rawSQL "=" + OpGreaterThanEqual -> rawSQL ">=" + OpGreaterThan -> rawSQL ">" + OpLessThanEqual -> rawSQL "<=" + OpLessThan -> rawSQL "<" + OpLike -> rawSQL "like" + OpILike -> rawSQL "ilike" + OpMatch -> rawSQL "~" + OpIMatch -> rawSQL "~*" + +ftsOperator :: FtsOperator -> TrackedSnippet ftsOperator = \case - FilterFts -> "@@ to_tsquery" - FilterFtsPlain -> "@@ plainto_tsquery" - FilterFtsPhrase -> "@@ phraseto_tsquery" - FilterFtsWebsearch -> "@@ websearch_to_tsquery" + FilterFts -> rawSQL "@@ to_tsquery" + FilterFtsPlain -> rawSQL "@@ plainto_tsquery" + FilterFtsPhrase -> rawSQL "@@ phraseto_tsquery" + FilterFtsWebsearch -> rawSQL "@@ websearch_to_tsquery" -singleParameter :: Maybe LBS.ByteString -> ByteString -> SQL.Snippet +singleParameter :: Maybe LBS.ByteString -> ByteString -> TrackedSnippet singleParameter body typ = - if typ == "bytea" - -- TODO: Hasql fails when using HE.unknown with bytea(pg tries to utf8 encode). - then SQL.encoderAndParam (HE.nullable HE.bytea) (LBS.toStrict <$> body) - else SQL.encoderAndParam (HE.nullable HE.unknown) (LBS.toStrict <$> body) <> "::" <> SQL.sql typ + let strictBody = LBS.toStrict <$> body + snippet = + if typ == "bytea" + -- TODO: Hasql fails when using HE.unknown with bytea(pg tries to utf8 encode). + then SQL.encoderAndParam (HE.nullable HE.bytea) strictBody + else SQL.encoderAndParam (HE.nullable HE.unknown) strictBody <> "::" <> SQL.sql typ + in TrackedSnippet snippet [strictBody] -- Here we build the pg array literal, e.g '{"Hebdon, John","Other","Another"}', manually. -- This is necessary to pass an "unknown" array and let pg infer the type. @@ -157,8 +206,8 @@ pgBuildArrayLiteral vals = "{" <> T.intercalate "," (escaped <$> vals) <> "}" -- TODO: refactor by following https://github.com/PostgREST/postgrest/pull/1631#issuecomment-711070833 -pgFmtIdent :: Text -> SQL.Snippet -pgFmtIdent x = SQL.sql . encodeUtf8 $ escapeIdent x +pgFmtIdent :: Text -> TrackedSnippet +pgFmtIdent x = rawSQL . encodeUtf8 $ escapeIdent x escapeIdent :: Text -> Text escapeIdent x = "\"" <> T.replace "\"" "\"\"" (trimNullChars x) <> "\"" @@ -184,53 +233,57 @@ trimNullChars = T.takeWhile (/= '\x0') escapeIdentList :: [Text] -> ByteString escapeIdentList schemas = BS.intercalate ", " $ encodeUtf8 . escapeIdent <$> schemas -asCsvF :: SQL.Snippet -asCsvF = asCsvHeaderF <> " || '\n' || " <> asCsvBodyF +asCsvF :: TrackedSnippet +asCsvF = asCsvHeaderF <> rawSQL " || '\n' || " <> asCsvBodyF where asCsvHeaderF = - "(SELECT coalesce(string_agg(a.k, ','), '')" <> - " FROM (" <> - " SELECT json_object_keys(r)::text as k" <> - " FROM ( " <> - " SELECT row_to_json(hh) as r from " <> sourceCTE <> " as hh limit 1" <> - " ) s" <> - " ) a" <> - ")" - asCsvBodyF = "coalesce(string_agg(substring(_postgrest_t::text, 2, length(_postgrest_t::text) - 2), '\n'), '')" - -addNullsToSnip :: Bool -> SQL.Snippet -> SQL.Snippet -addNullsToSnip strip snip = - if strip then "json_strip_nulls(" <> snip <> ")" else snip - -asJsonSingleF :: Maybe Routine -> Bool -> SQL.Snippet + rawSQL "(SELECT coalesce(string_agg(a.k, ','), '')" <> + rawSQL " FROM (" <> + rawSQL " SELECT json_object_keys(r)::text as k" <> + rawSQL " FROM ( " <> + rawSQL " SELECT row_to_json(hh) as r from " <> sourceCTE <> rawSQL " as hh limit 1" <> + rawSQL " ) s" <> + rawSQL " ) a" <> + rawSQL ")" + asCsvBodyF = rawSQL "coalesce(string_agg(substring(_postgrest_t::text, 2, length(_postgrest_t::text) - 2), '\n'), '')" + +addNullsToSnip :: Bool -> TrackedSnippet -> TrackedSnippet +addNullsToSnip strip (TrackedSnippet snip params) = + if strip + then + TrackedSnippet (SQL.sql "json_strip_nulls(" <> snip <> SQL.sql ")") params + else + TrackedSnippet snip params + +asJsonSingleF :: Maybe Routine -> Bool -> TrackedSnippet asJsonSingleF rout strip - | returnsScalar = "coalesce(" <> addNullsToSnip strip "json_agg(_postgrest_t.pgrst_scalar)->0" <> ", 'null')" - | otherwise = "coalesce(" <> addNullsToSnip strip "json_agg(_postgrest_t)->0" <> ", 'null')" + | returnsScalar = rawSQL "coalesce(" <> addNullsToSnip strip (rawSQL "json_agg(_postgrest_t.pgrst_scalar)->0") <> rawSQL ", 'null')" + | otherwise = rawSQL "coalesce(" <> addNullsToSnip strip (rawSQL "json_agg(_postgrest_t)->0") <> rawSQL ", 'null')" where returnsScalar = maybe False funcReturnsScalar rout -asJsonF :: Maybe Routine -> Bool -> SQL.Snippet +asJsonF :: Maybe Routine -> Bool -> TrackedSnippet asJsonF rout strip - | returnsSingleComposite = "coalesce(" <> addNullsToSnip strip "json_agg(_postgrest_t)->0" <> ", 'null')" - | returnsScalar = "coalesce(" <> addNullsToSnip strip "json_agg(_postgrest_t.pgrst_scalar)->0" <> ", 'null')" - | returnsSetOfScalar = "coalesce(" <> addNullsToSnip strip "json_agg(_postgrest_t.pgrst_scalar)" <> ", '[]')" - | otherwise = "coalesce(" <> addNullsToSnip strip "json_agg(_postgrest_t)" <> ", '[]')" + | returnsSingleComposite = rawSQL "coalesce(" <> addNullsToSnip strip (rawSQL "json_agg(_postgrest_t)->0") <> rawSQL ", 'null')" + | returnsScalar = rawSQL "coalesce(" <> addNullsToSnip strip (rawSQL "json_agg(_postgrest_t.pgrst_scalar)->0") <> rawSQL ", 'null')" + | returnsSetOfScalar = rawSQL "coalesce(" <> addNullsToSnip strip (rawSQL "json_agg(_postgrest_t.pgrst_scalar)") <> rawSQL ", '[]')" + | otherwise = rawSQL "coalesce(" <> addNullsToSnip strip (rawSQL "json_agg(_postgrest_t)") <> rawSQL ", '[]')" where (returnsSingleComposite, returnsScalar, returnsSetOfScalar) = case rout of Just r -> (funcReturnsSingleComposite r, funcReturnsScalar r, funcReturnsSetOfScalar r) Nothing -> (False, False, False) -asGeoJsonF :: SQL.Snippet -asGeoJsonF = "json_build_object('type', 'FeatureCollection', 'features', coalesce(json_agg(ST_AsGeoJSON(_postgrest_t)::json), '[]'))" +asGeoJsonF :: TrackedSnippet +asGeoJsonF = rawSQL "json_build_object('type', 'FeatureCollection', 'features', coalesce(json_agg(ST_AsGeoJSON(_postgrest_t)::json), '[]'))" -customFuncF :: Maybe Routine -> QualifiedIdentifier -> RelIdentifier -> SQL.Snippet +customFuncF :: Maybe Routine -> QualifiedIdentifier -> RelIdentifier -> TrackedSnippet customFuncF rout funcQi _ - | (funcReturnsScalar <$> rout) == Just True = fromQi funcQi <> "(_postgrest_t.pgrst_scalar)" -customFuncF _ funcQi RelAnyElement = fromQi funcQi <> "(_postgrest_t)" -customFuncF _ funcQi (RelId target) = fromQi funcQi <> "(_postgrest_t::" <> fromQi target <> ")" + | (funcReturnsScalar <$> rout) == Just True = fromQi funcQi <> rawSQL "(_postgrest_t.pgrst_scalar)" +customFuncF _ funcQi RelAnyElement = fromQi funcQi <> rawSQL "(_postgrest_t)" +customFuncF _ funcQi (RelId target) = fromQi funcQi <> rawSQL "(_postgrest_t::" <> fromQi target <> rawSQL ")" -locationF :: [Text] -> SQL.Snippet -locationF pKeys = SQL.sql $ encodeUtf8 [trimming|( +locationF :: [Text] -> TrackedSnippet +locationF pKeys = rawSQL $ encodeUtf8 [trimming|( WITH data AS (SELECT row_to_json(_) AS row FROM ${sourceCTEName} AS _ LIMIT 1) SELECT array_agg(json_data.key || '=' || coalesce('eq.' || json_data.value, 'is.null')) FROM data CROSS JOIN json_each_text(data.row) AS json_data @@ -239,121 +292,121 @@ locationF pKeys = SQL.sql $ encodeUtf8 [trimming|( where fmtPKeys = T.intercalate "','" pKeys -fromQi :: QualifiedIdentifier -> SQL.Snippet -fromQi t = (if T.null s then mempty else pgFmtIdent s <> ".") <> pgFmtIdent n +fromQi :: QualifiedIdentifier -> TrackedSnippet +fromQi t = if T.null s then pgFmtIdent n else pgFmtIdent s <> rawSQL "." <> pgFmtIdent n where n = qiName t s = qiSchema t -pgFmtColumn :: QualifiedIdentifier -> Text -> SQL.Snippet -pgFmtColumn table "*" = fromQi table <> ".*" -pgFmtColumn table c = fromQi table <> "." <> pgFmtIdent c +pgFmtColumn :: QualifiedIdentifier -> Text -> TrackedSnippet +pgFmtColumn table "*" = fromQi table <> rawSQL ".*" +pgFmtColumn table c = fromQi table <> rawSQL "." <> pgFmtIdent c -pgFmtCallUnary :: Text -> SQL.Snippet -> SQL.Snippet -pgFmtCallUnary f x = SQL.sql (encodeUtf8 f) <> "(" <> x <> ")" +pgFmtCallUnary :: Text -> TrackedSnippet -> TrackedSnippet +pgFmtCallUnary f (TrackedSnippet x params) = TrackedSnippet (SQL.sql (encodeUtf8 f) <> "(" <> x <> ")") params -pgFmtField :: QualifiedIdentifier -> CoercibleField -> SQL.Snippet +pgFmtField :: QualifiedIdentifier -> CoercibleField -> TrackedSnippet pgFmtField table cf = case cfToTsVector cf of - Just (ToTsVector lang) -> "to_tsvector(" <> pgFmtFtsLang lang <> fmtFld <> ")" + Just (ToTsVector lang) -> rawSQL "to_tsvector(" <> pgFmtFtsLang lang <> fmtFld <> rawSQL ")" _ -> fmtFld where fmtFld = case cf of CoercibleField{cfFullRow=True} -> pgFmtIdent (qiName table) CoercibleField{cfName=fn, cfJsonPath=[]} -> pgFmtColumn table fn - CoercibleField{cfName=fn, cfToJson=doToJson, cfJsonPath=jp} | doToJson -> "to_jsonb(" <> pgFmtColumn table fn <> ")" <> pgFmtJsonPath jp + CoercibleField{cfName=fn, cfToJson=doToJson, cfJsonPath=jp} | doToJson -> rawSQL "to_jsonb(" <> pgFmtColumn table fn <> rawSQL ")" <> pgFmtJsonPath jp | otherwise -> pgFmtColumn table fn <> pgFmtJsonPath jp -- Select the value of a named element from a table, applying its optional coercion mapping if any. -pgFmtTableCoerce :: QualifiedIdentifier -> CoercibleField -> SQL.Snippet +pgFmtTableCoerce :: QualifiedIdentifier -> CoercibleField -> TrackedSnippet pgFmtTableCoerce table fld@(CoercibleField{cfTransform=(Just formatterProc)}) = pgFmtCallUnary formatterProc (pgFmtField table fld) pgFmtTableCoerce table f = pgFmtField table f -- | Like the previous but now we just have a name so no namespace or JSON paths. -pgFmtCoerceNamed :: CoercibleField -> SQL.Snippet -pgFmtCoerceNamed CoercibleField{cfName=fn, cfTransform=(Just formatterProc)} = pgFmtCallUnary formatterProc (pgFmtIdent fn) <> " AS " <> pgFmtIdent fn +pgFmtCoerceNamed :: CoercibleField -> TrackedSnippet +pgFmtCoerceNamed CoercibleField{cfName=fn, cfTransform=(Just formatterProc)} = pgFmtCallUnary formatterProc (pgFmtIdent fn) <> rawSQL " AS " <> pgFmtIdent fn pgFmtCoerceNamed CoercibleField{cfName=fn} = pgFmtIdent fn -pgFmtSelectItem :: QualifiedIdentifier -> CoercibleSelectField -> SQL.Snippet +pgFmtSelectItem :: QualifiedIdentifier -> CoercibleSelectField -> TrackedSnippet pgFmtSelectItem table CoercibleSelectField{csField=fld, csAggFunction=agg, csAggCast=aggCast, csCast=cast, csAlias=alias} = pgFmtApplyAggregate agg aggCast (pgFmtApplyCast cast (pgFmtTableCoerce table fld)) <> pgFmtAs alias -pgFmtSpreadSelectItem :: Alias -> SpreadSelectField -> SQL.Snippet +pgFmtSpreadSelectItem :: Alias -> SpreadSelectField -> TrackedSnippet pgFmtSpreadSelectItem aggAlias SpreadSelectField{ssSelName, ssSelAggFunction, ssSelAggCast, ssSelAlias} = pgFmtApplyAggregate ssSelAggFunction ssSelAggCast (pgFmtFullSelName aggAlias ssSelName) <> pgFmtAs ssSelAlias -pgFmtApplyAggregate :: Maybe AggregateFunction -> Maybe Cast -> SQL.Snippet -> SQL.Snippet +pgFmtApplyAggregate :: Maybe AggregateFunction -> Maybe Cast -> TrackedSnippet -> TrackedSnippet pgFmtApplyAggregate Nothing _ snippet = snippet -pgFmtApplyAggregate (Just agg) aggCast snippet = - pgFmtApplyCast aggCast aggregatedSnippet +pgFmtApplyAggregate (Just agg) aggCast (TrackedSnippet snippet params) = + pgFmtApplyCast aggCast (TrackedSnippet aggregatedSnippet params) where convertAggFunction :: AggregateFunction -> SQL.Snippet -- Convert from e.g. Sum (the data type) to SUM convertAggFunction = SQL.sql . BS.map toUpper . BS.pack . show aggregatedSnippet = convertAggFunction agg <> "(" <> snippet <> ")" -pgFmtSpreadJoinSelectItem :: Alias -> [CoercibleOrderTerm] -> SpreadSelectField -> SQL.Snippet +pgFmtSpreadJoinSelectItem :: Alias -> [CoercibleOrderTerm] -> SpreadSelectField -> TrackedSnippet pgFmtSpreadJoinSelectItem aggAlias order SpreadSelectField{ssSelName, ssSelAlias} = - "COALESCE(json_agg(" <> fmtField <> " " <> fmtOrder <> "),'[]')::jsonb" <> " AS " <> fmtAlias + rawSQL "COALESCE(json_agg(" <> fmtField <> rawSQL " " <> fmtOrder <> rawSQL "),'[]')::jsonb" <> rawSQL " AS " <> fmtAlias where fmtField = pgFmtFullSelName aggAlias ssSelName fmtOrder = orderF (QualifiedIdentifier "" aggAlias) order fmtAlias = pgFmtIdent (fromMaybe ssSelName ssSelAlias) -pgFmtApplyCast :: Maybe Cast -> SQL.Snippet -> SQL.Snippet +pgFmtApplyCast :: Maybe Cast -> TrackedSnippet -> TrackedSnippet pgFmtApplyCast Nothing snippet = snippet -- Ideally we'd quote the cast with "pgFmtIdent cast". However, that would invalidate common casts such as "int", "bigint", etc. -- Try doing: `select 1::"bigint"` - it'll err, using "int8" will work though. There's some parser magic that pg does that's invalidated when quoting. -- Not quoting should be fine, we validate the input on Parsers. -pgFmtApplyCast (Just cast) snippet = "CAST( " <> snippet <> " AS " <> SQL.sql (encodeUtf8 cast) <> " )" +pgFmtApplyCast (Just cast) (TrackedSnippet snippet params) = TrackedSnippet (SQL.sql "CAST( " <> snippet <> SQL.sql " AS " <> SQL.sql (encodeUtf8 cast) <> SQL.sql " )") params -pgFmtFullSelName :: Alias -> FieldName -> SQL.Snippet +pgFmtFullSelName :: Alias -> FieldName -> TrackedSnippet pgFmtFullSelName aggAlias fieldName = case fieldName of - "*" -> pgFmtIdent aggAlias <> ".*" - _ -> pgFmtIdent aggAlias <> "." <> pgFmtIdent fieldName + "*" -> pgFmtIdent aggAlias <> rawSQL ".*" + _ -> pgFmtIdent aggAlias <> rawSQL "." <> pgFmtIdent fieldName -- TODO: At this stage there shouldn't be a Maybe since ApiRequest should ensure that an INSERT/UPDATE has a body -fromJsonBodyF :: Maybe LBS.ByteString -> [CoercibleField] -> Bool -> Bool -> Bool -> SQL.Snippet +fromJsonBodyF :: Maybe LBS.ByteString -> [CoercibleField] -> Bool -> Bool -> Bool -> TrackedSnippet fromJsonBodyF body fields includeSelect includeLimitOne includeDefaults = - (if includeSelect then "SELECT " <> namedCols <> " " else mempty) <> - "FROM (SELECT " <> jsonPlaceHolder <> " AS json_data) pgrst_payload, " <> + (if includeSelect then rawSQL "SELECT " <> namedCols <> rawSQL " " else emptyTracked) <> + rawSQL "FROM (SELECT " <> jsonPlaceHolder <> rawSQL " AS json_data) pgrst_payload, " <> (if includeDefaults then if isJsonObject - then "LATERAL (SELECT " <> defsJsonb <> " || pgrst_payload.json_data AS val) pgrst_json_defs, " - else "LATERAL (SELECT jsonb_agg(" <> defsJsonb <> " || elem) AS val from jsonb_array_elements(pgrst_payload.json_data) elem) pgrst_json_defs, " - else mempty) <> - "LATERAL (SELECT " <> parsedCols <> " FROM " <> + then rawSQL "LATERAL (SELECT " <> defsJsonb <> rawSQL " || pgrst_payload.json_data AS val) pgrst_json_defs, " + else rawSQL "LATERAL (SELECT jsonb_agg(" <> defsJsonb <> rawSQL " || elem) AS val from jsonb_array_elements(pgrst_payload.json_data) elem) pgrst_json_defs, " + else emptyTracked ) <> + rawSQL "LATERAL (SELECT " <> parsedCols <> rawSQL " FROM " <> (if null fields -- when json keys are empty, e.g. when payload is `{}` or `[{}, {}]` - then SQL.sql $ + then if isJsonObject - then "(values(1)) _ " -- only 1 row for an empty json object '{}' - else jsonArrayElementsF <> "(" <> finalBodyF <> ") _ " -- extract rows of a json array of empty objects `[{}, {}]` - else jsonToRecordsetF <> "(" <> SQL.sql finalBodyF <> ") AS _(" <> typedCols <> ") " <> if includeLimitOne then "LIMIT 1" else mempty + then rawSQL "(values(1)) _ " -- only 1 row for an empty json object '{}' + else rawSQL (encodeUtf8 jsonArrayElementsF) <> rawSQL "(" <> rawSQL (encodeUtf8 finalBodyF) <> rawSQL ") _ " -- extract rows of a json array of empty objects `[{}, {}]` + else rawSQL (encodeUtf8 jsonToRecordsetF) <> rawSQL "(" <> rawSQL (encodeUtf8 finalBodyF) <> rawSQL ") AS _(" <> typedCols <> rawSQL ") " <> if includeLimitOne then rawSQL "LIMIT 1" else emptyTracked ) <> - ") pgrst_body " + rawSQL ") pgrst_body " where - namedCols = intercalateSnippet ", " $ fromQi . QualifiedIdentifier "pgrst_body" . cfName <$> fields + namedCols = intercalateSnippet ", " $ fromQi . QualifiedIdentifier "pgrst_body" . cfName <$> fields parsedCols = intercalateSnippet ", " $ pgFmtCoerceNamed <$> fields - typedCols = intercalateSnippet ", " $ pgFmtIdent . cfName <> const " " <> SQL.sql . encodeUtf8 . cfIRType <$> fields - defsJsonb = SQL.sql $ "jsonb_build_object(" <> BS.intercalate "," fieldsWDefaults <> ")" - fieldsWDefaults = mapMaybe (\case - CoercibleField{cfName=nam, cfDefault=Just def} -> Just $ encodeUtf8 (pgFmtLit nam <> ", " <> def) - CoercibleField{cfDefault=Nothing} -> Nothing + typedCols = intercalateSnippet ", " $ pgFmtIdent . cfName <> const (rawSQL " ") <> rawSQL . encodeUtf8 . cfIRType <$> fields + defsJsonb = rawSQL $ "jsonb_build_object(" <> BS.intercalate "," fieldsWDefaults <> ")" + fieldsWDefaults = mapMaybe ( \case + CoercibleField{cfName = nam, cfDefault = Just def} -> Just $ encodeUtf8 (pgFmtLit nam <> ", " <> def) + CoercibleField{cfDefault = Nothing} -> Nothing ) fields (finalBodyF, jsonArrayElementsF, jsonToRecordsetF) = if includeDefaults then ("pgrst_json_defs.val", "jsonb_array_elements", if isJsonObject then "jsonb_to_record" else "jsonb_to_recordset") else ("pgrst_payload.json_data", "json_array_elements", if isJsonObject then "json_to_record" else "json_to_recordset") - jsonPlaceHolder = SQL.encoderAndParam (HE.nullable $ if includeDefaults then HE.jsonbLazyBytes else HE.jsonLazyBytes) body + jsonPlaceHolder = TrackedSnippet (SQL.encoderAndParam (HE.nullable $ if includeDefaults then HE.jsonbLazyBytes else HE.jsonLazyBytes) body) [LBS.toStrict <$> body] isJsonObject = -- light validation as pg's json_to_record(set) already validates that the body is valid JSON. We just need to know whether the body looks like an object or not. let insignificantWhitespace = [32,9,10,13] --" \t\n\r" [32,9,10,13] https://datatracker.ietf.org/doc/html/rfc8259#section-2 in LBS.take 1 (LBS.dropWhile (`elem` insignificantWhitespace) (fromMaybe mempty body)) == "{" -pgFmtOrderTerm :: QualifiedIdentifier -> CoercibleOrderTerm -> SQL.Snippet +pgFmtOrderTerm :: QualifiedIdentifier -> CoercibleOrderTerm -> TrackedSnippet pgFmtOrderTerm qi ot = - fmtOTerm ot <> " " <> - SQL.sql (BS.unwords [ + fmtOTerm ot <> rawSQL " " <> + rawSQL ( BS.unwords [ maybe mempty direction $ coDirection ot, maybe mempty nullOrder $ coNullOrder ot]) where @@ -368,27 +421,27 @@ pgFmtOrderTerm qi ot = nullOrder OrderNullsLast = "NULLS LAST" -- | Interpret a literal in the way the planner indicated through the CoercibleField. -pgFmtUnknownLiteralForField :: SQL.Snippet -> CoercibleField -> SQL.Snippet +pgFmtUnknownLiteralForField :: TrackedSnippet -> CoercibleField -> TrackedSnippet pgFmtUnknownLiteralForField value CoercibleField{cfTransform=(Just parserProc)} = pgFmtCallUnary parserProc value -- But when no transform is requested, we just use the literal as-is. pgFmtUnknownLiteralForField value _ = value -- | Array version of the above, used by ANY(). -pgFmtArrayLiteralForField :: [Text] -> CoercibleField -> SQL.Snippet +pgFmtArrayLiteralForField :: [Text] -> CoercibleField -> TrackedSnippet -- When a transformation is requested, we need to apply the transformation to each element of the array. This could be done by just making a query with `parser(value)` for each value, but may lead to huge query lengths. Imagine `data_representations.color_from_text('...'::text)` for repeated for a hundred values. Instead we use `unnest()` to unpack a standard array literal and then apply the transformation to each element, like a map. -- Note the literals will be treated as text since in every case when we use ANY() the parameters are textual (coming from a query string). We want to rely on the `text->domain` parser to do the right thing. -pgFmtArrayLiteralForField values CoercibleField{cfTransform=(Just parserProc)} = SQL.sql "(SELECT " <> pgFmtCallUnary parserProc (SQL.sql "unnest(" <> unknownLiteral (pgBuildArrayLiteral values) <> "::text[])") <> ")" +pgFmtArrayLiteralForField values CoercibleField{cfTransform = (Just parserProc)} = rawSQL "(SELECT " <> pgFmtCallUnary parserProc (rawSQL "unnest(" <> unknownLiteral (pgBuildArrayLiteral values) <> rawSQL "::text[])") <> rawSQL ")" -- When no transformation is requested, we don't need a subquery. pgFmtArrayLiteralForField values _ = unknownLiteral (pgBuildArrayLiteral values) -pgFmtFilter :: QualifiedIdentifier -> CoercibleFilter -> SQL.Snippet -pgFmtFilter _ (CoercibleFilterNullEmbed hasNot fld) = pgFmtIdent fld <> " IS " <> (if not hasNot then "NOT " else mempty) <> "DISTINCT FROM NULL" -pgFmtFilter _ (CoercibleFilter _ (NoOpExpr _)) = mempty -- TODO unreachable because NoOpExpr is filtered on QueryParams -pgFmtFilter table (CoercibleFilter fld (OpExpr hasNot oper)) = notOp <> " " <> pgFmtField table fld <> case oper of - Op op val -> " " <> simpleOperator op <> " " <> pgFmtUnknownLiteralForField (unknownLiteral val) fld +pgFmtFilter :: QualifiedIdentifier -> CoercibleFilter -> TrackedSnippet +pgFmtFilter _ (CoercibleFilterNullEmbed hasNot fld) = pgFmtIdent fld <> rawSQL " IS " <> rawSQL (if not hasNot then "NOT " else mempty) <> rawSQL "DISTINCT FROM NULL" +pgFmtFilter _ (CoercibleFilter _ (NoOpExpr _)) = emptyTracked -- TODO unreachable because NoOpExpr is filtered on QueryParams +pgFmtFilter table (CoercibleFilter fld (OpExpr hasNot oper)) = rawSQL notOp <> rawSQL " " <> pgFmtField table fld <> case oper of + Op op val -> rawSQL " " <> simpleOperator op <> rawSQL " " <> pgFmtUnknownLiteralForField (unknownLiteral val) fld - OpQuant op quant val -> " " <> quantOperator op <> " " <> case op of + OpQuant op quant val -> rawSQL " " <> quantOperator op <> rawSQL " " <> case op of OpLike -> fmtQuant quant $ unknownLiteral (T.map star val) OpILike -> fmtQuant quant $ unknownLiteral (T.map star val) _ -> fmtQuant quant $ pgFmtUnknownLiteralForField (unknownLiteral val) fld @@ -397,41 +450,41 @@ pgFmtFilter table (CoercibleFilter fld (OpExpr hasNot oper)) = notOp <> " " <> p -- The above can be fixed by using `PREPARE boolplan AS SELECT * FROM projects where id IS NOT DISTINCT FROM $1;` -- However that would not accept the TRUE/FALSE/NULL/"NOT NULL"/UNKNOWN keywords. See: https://stackoverflow.com/questions/6133525/proper-way-to-set-preparedstatement-parameter-to-null-under-postgres. -- This is why `IS` operands are whitelisted at the Parsers.hs level - Is isVal -> " IS " <> + Is isVal -> rawSQL " IS " <> case isVal of - IsNull -> "NULL" - IsNotNull -> "NOT NULL" - IsTriTrue -> "TRUE" - IsTriFalse -> "FALSE" - IsTriUnknown -> "UNKNOWN" + IsNull -> rawSQL "NULL" + IsNotNull -> rawSQL "NOT NULL" + IsTriTrue -> rawSQL "TRUE" + IsTriFalse -> rawSQL "FALSE" + IsTriUnknown -> rawSQL "UNKNOWN" - IsDistinctFrom val -> " IS DISTINCT FROM " <> unknownLiteral val + IsDistinctFrom val -> rawSQL " IS DISTINCT FROM " <> unknownLiteral val -- We don't use "IN", we use "= ANY". IN has the following disadvantages: -- + No way to use an empty value on IN: "col IN ()" is invalid syntax. With ANY we can do "= ANY('{}')" -- + Can invalidate prepared statements: multiple parameters on an IN($1, $2, $3) will lead to using different prepared statements and not take advantage of caching. - In vals -> " " <> case vals of - [""] -> "= ANY('{}') " - _ -> "= ANY (" <> pgFmtArrayLiteralForField vals fld <> ") " + In vals -> rawSQL " " <> case vals of + [""] -> rawSQL "= ANY('{}') " + _ -> rawSQL "= ANY (" <> pgFmtArrayLiteralForField vals fld <> rawSQL ") " - Fts op lang val -> " " <> ftsOperator op <> "(" <> pgFmtFtsLang lang <> unknownLiteral val <> ") " + Fts op lang val -> rawSQL " " <> ftsOperator op <> rawSQL "(" <> pgFmtFtsLang lang <> unknownLiteral val <> rawSQL ") " where notOp = if hasNot then "NOT" else mempty star c = if c == '*' then '%' else c fmtQuant q val = case q of - Just QuantAny -> "ANY(" <> val <> ")" - Just QuantAll -> "ALL(" <> val <> ")" + Just QuantAny -> rawSQL "ANY(" <> val <> rawSQL ")" + Just QuantAll -> rawSQL "ALL(" <> val <> rawSQL ")" Nothing -> val -pgFmtFtsLang :: Maybe Text -> SQL.Snippet -pgFmtFtsLang = maybe mempty (\l -> unknownLiteral l <> ", ") +pgFmtFtsLang :: Maybe Text -> TrackedSnippet +pgFmtFtsLang = maybe emptyTracked (\l -> unknownLiteral l <> rawSQL ", ") -pgFmtJoinCondition :: JoinCondition -> SQL.Snippet +pgFmtJoinCondition :: JoinCondition -> TrackedSnippet pgFmtJoinCondition (JoinCondition (qi1, col1) (qi2, col2)) = - pgFmtColumn qi1 col1 <> " = " <> pgFmtColumn qi2 col2 + pgFmtColumn qi1 col1 <> rawSQL " = " <> pgFmtColumn qi2 col2 -pgFmtLogicTree :: QualifiedIdentifier -> CoercibleLogicTree -> SQL.Snippet -pgFmtLogicTree qi (CoercibleExpr hasNot op forest) = SQL.sql notOp <> " (" <> intercalateSnippet (opSql op) (pgFmtLogicTree qi <$> forest) <> ")" +pgFmtLogicTree :: QualifiedIdentifier -> CoercibleLogicTree -> TrackedSnippet +pgFmtLogicTree qi (CoercibleExpr hasNot op forest) = rawSQL notOp <> rawSQL " (" <> intercalateSnippet (opSql op) (pgFmtLogicTree qi <$> forest) <> rawSQL ")" where notOp = if hasNot then "NOT" else mempty @@ -439,110 +492,110 @@ pgFmtLogicTree qi (CoercibleExpr hasNot op forest) = SQL.sql notOp <> " (" <> in opSql Or = " OR " pgFmtLogicTree qi (CoercibleStmnt flt) = pgFmtFilter qi flt -pgFmtJsonPath :: JsonPath -> SQL.Snippet +pgFmtJsonPath :: JsonPath -> TrackedSnippet pgFmtJsonPath = \case - [] -> mempty - (JArrow x:xs) -> "->" <> pgFmtJsonOperand x <> pgFmtJsonPath xs - (J2Arrow x:xs) -> "->>" <> pgFmtJsonOperand x <> pgFmtJsonPath xs - where + [] -> emptyTracked + (JArrow x:xs) -> rawSQL "->" <> pgFmtJsonOperand x <> pgFmtJsonPath xs + (J2Arrow x:xs) -> rawSQL "->>" <> pgFmtJsonOperand x <> pgFmtJsonPath xs + where pgFmtJsonOperand (JKey k) = unknownLiteral k - pgFmtJsonOperand (JIdx i) = unknownLiteral i <> "::int" + pgFmtJsonOperand (JIdx i) = unknownLiteral i <> rawSQL "::int" -pgFmtAs :: Maybe Alias -> SQL.Snippet -pgFmtAs Nothing = mempty -pgFmtAs (Just alias) = " AS " <> pgFmtIdent alias +pgFmtAs :: Maybe Alias -> TrackedSnippet +pgFmtAs Nothing = emptyTracked +pgFmtAs (Just alias) = rawSQL " AS " <> pgFmtIdent alias -groupF :: QualifiedIdentifier -> [CoercibleSelectField] -> [RelSelectField] -> SQL.Snippet +groupF :: QualifiedIdentifier -> [CoercibleSelectField] -> [RelSelectField] -> TrackedSnippet groupF qi select relSelect - | (noSelectsAreAggregated && noRelSelectsAreAggregated) || null groupTerms = mempty - | otherwise = " GROUP BY " <> intercalateSnippet ", " groupTerms - where + | (noSelectsAreAggregated && noRelSelectsAreAggregated) || null groupTerms = emptyTracked + | otherwise = rawSQL " GROUP BY " <> intercalateSnippet ", " groupTerms + where noSelectsAreAggregated = null $ [s | s@(CoercibleSelectField { csAggFunction = Just _ }) <- select] noRelSelectsAreAggregated = all (\case Spread sels _ -> all (isNothing . ssSelAggFunction) sels; _ -> True) relSelect groupTermsFromSelect = mapMaybe (pgFmtGroup qi) select groupTermsFromRelSelect = mapMaybe groupTermFromRelSelectField relSelect groupTerms = groupTermsFromSelect ++ groupTermsFromRelSelect -groupTermFromRelSelectField :: RelSelectField -> Maybe SQL.Snippet +groupTermFromRelSelectField :: RelSelectField -> Maybe TrackedSnippet groupTermFromRelSelectField (JsonEmbed { rsSelName }) = Just $ pgFmtIdent rsSelName -groupTermFromRelSelectField (Spread { rsSpreadSel, rsAggAlias }) = +groupTermFromRelSelectField (Spread{rsSpreadSel, rsAggAlias}) = if null groupTerms then Nothing else Just $ intercalateSnippet ", " groupTerms - where - processField :: SpreadSelectField -> Maybe SQL.Snippet + where + processField :: SpreadSelectField -> Maybe TrackedSnippet processField SpreadSelectField{ssSelAggFunction = Just _} = Nothing processField SpreadSelectField{ssSelName, ssSelAlias} = - Just $ pgFmtIdent rsAggAlias <> "." <> pgFmtIdent (fromMaybe ssSelName ssSelAlias) + Just $ pgFmtIdent rsAggAlias <> rawSQL "." <> pgFmtIdent (fromMaybe ssSelName ssSelAlias) groupTerms = mapMaybe processField rsSpreadSel -pgFmtGroup :: QualifiedIdentifier -> CoercibleSelectField -> Maybe SQL.Snippet +pgFmtGroup :: QualifiedIdentifier -> CoercibleSelectField -> Maybe TrackedSnippet pgFmtGroup _ CoercibleSelectField{csAggFunction=Just _} = Nothing pgFmtGroup _ CoercibleSelectField{csAlias=Just alias, csAggFunction=Nothing} = Just $ pgFmtIdent alias pgFmtGroup qi CoercibleSelectField{csField=fld, csAlias=Nothing, csAggFunction=Nothing} = Just $ pgFmtField qi fld -countF :: SQL.Snippet -> Bool -> (SQL.Snippet, SQL.Snippet) +countF :: TrackedSnippet -> Bool -> (TrackedSnippet, TrackedSnippet) countF countQuery shouldCount = if shouldCount then ( - ", pgrst_source_count AS (" <> countQuery <> ")" - , "(SELECT pg_catalog.count(*) FROM pgrst_source_count)" ) + rawSQL ", pgrst_source_count AS (" <> countQuery <> rawSQL ")" + , rawSQL "(SELECT pg_catalog.count(*) FROM pgrst_source_count)" ) else ( - mempty - , "null::bigint") + emptyTracked + , rawSQL "null::bigint") -returningF :: QualifiedIdentifier -> [FieldName] -> SQL.Snippet +returningF :: QualifiedIdentifier -> [FieldName] -> TrackedSnippet returningF qi returnings = if null returnings - then "RETURNING 1" -- For mutation cases where there's no ?select, we return 1 to know how many rows were modified - else "RETURNING " <> intercalateSnippet ", " (pgFmtColumn qi <$> returnings) + then rawSQL "RETURNING 1" -- For mutation cases where there's no ?select, we return 1 to know how many rows were modified + else rawSQL "RETURNING " <> intercalateSnippet ", " (pgFmtColumn qi <$> returnings) -limitOffsetF :: NonnegRange -> SQL.Snippet +limitOffsetF :: NonnegRange -> TrackedSnippet limitOffsetF range = - if range == allRange then mempty else "LIMIT " <> limit <> " OFFSET " <> offset + if range == allRange then emptyTracked else rawSQL "LIMIT " <> limit <> rawSQL " OFFSET " <> offset where - limit = maybe "ALL" (\l -> unknownEncoder (BS.pack $ show l)) $ rangeLimit range + limit = maybe (rawSQL "ALL") (\l -> unknownEncoder (BS.pack $ show l)) $ rangeLimit range offset = unknownEncoder (BS.pack . show $ rangeOffset range) -responseHeadersF :: SQL.Snippet -responseHeadersF = currentSettingF "response.headers" +responseHeadersF :: TrackedSnippet +responseHeadersF = currentSettingF (rawSQL "response.headers") -responseStatusF :: SQL.Snippet -responseStatusF = currentSettingF "response.status" +responseStatusF :: TrackedSnippet +responseStatusF = currentSettingF (rawSQL "response.status") -addConfigPgrstInserted :: Bool -> SQL.Snippet +addConfigPgrstInserted :: Bool -> TrackedSnippet addConfigPgrstInserted add = - let (symbol, num) = if add then ("+", "0") else ("-", "-1") in - "set_config('pgrst.inserted', (coalesce(" <> currentSettingF "pgrst.inserted" <> "::int, 0) " <> symbol <> " 1)::text, true) <> '" <> num <> "'" + let (symbol, num) = if add then (rawSQL "+", rawSQL "0") else (rawSQL "-", rawSQL "-1") in + rawSQL "set_config('pgrst.inserted', (coalesce(" <> currentSettingF (rawSQL "pgrst.inserted") <> rawSQL "::int, 0) " <> symbol <> rawSQL " 1)::text, true) <> '" <> num <> rawSQL "'" -currentSettingF :: SQL.Snippet -> SQL.Snippet +currentSettingF :: TrackedSnippet -> TrackedSnippet currentSettingF setting = -- nullif is used because of https://gist.github.com/steve-chavez/8d7033ea5655096903f3b52f8ed09a15 - "nullif(current_setting('" <> setting <> "', true), '')" + rawSQL "nullif(current_setting('" <> setting <> rawSQL "', true), '')" -orderF :: QualifiedIdentifier -> [CoercibleOrderTerm] -> SQL.Snippet -orderF _ [] = mempty -orderF qi ordts = "ORDER BY " <> intercalateSnippet ", " (pgFmtOrderTerm qi <$> ordts) +orderF :: QualifiedIdentifier -> [CoercibleOrderTerm] -> TrackedSnippet +orderF _ [] = emptyTracked +orderF qi ordts = rawSQL "ORDER BY " <> intercalateSnippet ", " (pgFmtOrderTerm qi <$> ordts) -- Hasql Snippet utilities -unknownEncoder :: ByteString -> SQL.Snippet -unknownEncoder = SQL.encoderAndParam (HE.nonNullable HE.unknown) +unknownEncoder :: ByteString -> TrackedSnippet +unknownEncoder param = TrackedSnippet (SQL.encoderAndParam (HE.nonNullable HE.unknown) param) [Just param] -unknownLiteral :: Text -> SQL.Snippet +unknownLiteral :: Text -> TrackedSnippet unknownLiteral = unknownEncoder . encodeUtf8 -intercalateSnippet :: ByteString -> [SQL.Snippet] -> SQL.Snippet -intercalateSnippet _ [] = mempty -intercalateSnippet frag snippets = foldr1 (\a b -> a <> SQL.sql frag <> b) snippets +intercalateSnippet :: ByteString -> [TrackedSnippet] -> TrackedSnippet +intercalateSnippet _ [] = emptyTracked +intercalateSnippet frag snippets = foldr1 (\a b -> a <> rawSQL frag <> b) snippets -explainF :: MTVndPlanFormat -> [MTVndPlanOption] -> SQL.Snippet -> SQL.Snippet -explainF fmt opts snip = - "EXPLAIN (" <> +explainF :: MTVndPlanFormat -> [MTVndPlanOption] -> TrackedSnippet -> TrackedSnippet +explainF fmt opts (TrackedSnippet snip params) = TrackedSnippet + ( "EXPLAIN (" <> SQL.sql (BS.intercalate ", " (fmtPlanFmt fmt : (fmtPlanOpt <$> opts))) <> - ") " <> snip - where + ") " <> snip ) params + where fmtPlanOpt :: MTVndPlanOption -> BS.ByteString fmtPlanOpt PlanAnalyze = "ANALYZE" fmtPlanOpt PlanVerbose = "VERBOSE" @@ -554,22 +607,22 @@ explainF fmt opts snip = fmtPlanFmt PlanJSON = "FORMAT JSON" -- | Do a pg set_config(setting, value, true) call. This is equivalent to a SET LOCAL. -setConfigLocal :: (SQL.Snippet, ByteString) -> SQL.Snippet +setConfigLocal :: (TrackedSnippet, ByteString) -> TrackedSnippet setConfigLocal (k, v) = - "set_config(" <> k <> ", " <> unknownEncoder v <> ", true)" + rawSQL "set_config(" <> k <> rawSQL ", " <> unknownEncoder v <> rawSQL ", true)" -- | For when the settings are hardcoded and not parameterized -setConfigWithConstantName :: (SQL.Snippet, ByteString) -> SQL.Snippet -setConfigWithConstantName (k, v) = setConfigLocal ("'" <> k <> "'", v) +setConfigWithConstantName :: (SQL.Snippet, ByteString) -> TrackedSnippet +setConfigWithConstantName (k, v) = setConfigLocal (TrackedSnippet ("'" <> k <> "'") [], v) -- | For when the settings need to be parameterized -setConfigWithDynamicName :: (ByteString, ByteString) -> SQL.Snippet +setConfigWithDynamicName :: (ByteString, ByteString) -> TrackedSnippet setConfigWithDynamicName (k, v) = setConfigLocal (unknownEncoder k, v) -- | Starting from PostgreSQL v14, some characters are not allowed for config names (mostly affecting headers with "-"). -- | A JSON format string is used to avoid this problem. See https://github.com/PostgREST/postgrest/issues/1857 -setConfigWithConstantNameJSON :: SQL.Snippet -> [(ByteString, ByteString)] -> [SQL.Snippet] +setConfigWithConstantNameJSON :: SQL.Snippet -> [(ByteString, ByteString)] -> [TrackedSnippet] setConfigWithConstantNameJSON prefix keyVals = [setConfigWithConstantName (prefix, gucJsonVal keyVals)] where gucJsonVal :: [(ByteString, ByteString)] -> ByteString @@ -577,7 +630,7 @@ setConfigWithConstantNameJSON prefix keyVals = [setConfigWithConstantName (prefi arrayByteStringToText :: [(ByteString, ByteString)] -> [(Text,Text)] arrayByteStringToText keyVal = (T.decodeUtf8 *** T.decodeUtf8) <$> keyVal -handlerF :: Maybe Routine -> MediaHandler -> SQL.Snippet +handlerF :: Maybe Routine -> MediaHandler -> TrackedSnippet handlerF rout = \case BuiltinAggArrayJsonStrip -> asJsonF rout True BuiltinAggSingleJson strip -> asJsonSingleF rout strip @@ -585,4 +638,4 @@ handlerF rout = \case BuiltinOvAggGeoJson -> asGeoJsonF BuiltinOvAggCsv -> asCsvF CustomFunc funcQi target -> customFuncF rout funcQi target - NoAgg -> "''::text" + NoAgg -> rawSQL "''::text" diff --git a/src/PostgREST/Query/Statements.hs b/src/PostgREST/Query/Statements.hs index 67b6c36129..c13b523961 100644 --- a/src/PostgREST/Query/Statements.hs +++ b/src/PostgREST/Query/Statements.hs @@ -18,7 +18,6 @@ module PostgREST.Query.Statements import qualified Data.Aeson.Lens as L import qualified Data.ByteString.Char8 as BS import qualified Hasql.Decoders as HD -import qualified Hasql.DynamicStatements.Snippet as SQL import qualified Hasql.DynamicStatements.Statement as SQL import qualified Hasql.Statement as SQL @@ -55,38 +54,39 @@ data ResultSet | RSPlan BS.ByteString -- ^ the plan of the query -prepareWrite :: SQL.Snippet -> SQL.Snippet -> Bool -> Bool -> MediaType -> MediaHandler -> - Maybe PreferRepresentation -> Maybe PreferResolution -> [Text] -> Bool -> (SQL.Statement () ResultSet, ByteString) +prepareWrite :: TrackedSnippet -> TrackedSnippet -> Bool -> Bool -> MediaType -> MediaHandler -> + Maybe PreferRepresentation -> Maybe PreferResolution -> [Text] -> Bool -> (SQL.Statement () ResultSet, ByteString, [Maybe ByteString]) prepareWrite selectQuery mutateQuery isInsert isPut mt handler rep resolution pKeys prepared = - (result, sql) + (result, sql, params finalQuery) where - result@(SQL.Statement sql _ _ _) = SQL.dynamicallyParameterized (mtSnippet mt snippet) decodeIt prepared - checkUpsert snip = if isInsert && (isPut || resolution == Just MergeDuplicates) then snip else "''" - pgrstInsertedF = checkUpsert "nullif(current_setting('pgrst.inserted', true),'')::int" - snippet = - "WITH " <> sourceCTE <> " AS (" <> mutateQuery <> ") " <> - "SELECT " <> - "'' AS total_result_set, " <> - "pg_catalog.count(_postgrest_t) AS page_total, " <> - locF <> " AS header, " <> - handlerF Nothing handler <> " AS body, " <> - responseHeadersF <> " AS response_headers, " <> - responseStatusF <> " AS response_status, " <> - pgrstInsertedF <> " AS response_inserted " <> - "FROM (" <> selectF <> ") _postgrest_t" + result@(SQL.Statement sql _ _ _) = SQL.dynamicallyParameterized (toSnippet finalQuery) decodeIt prepared + checkUpsert snip = if isInsert && (isPut || resolution == Just MergeDuplicates) then snip else rawSQL "''" + pgrstInsertedF = checkUpsert $ rawSQL "nullif(current_setting('pgrst.inserted', true),'')::int" + finalQuery = mtSnippet mt querySnippet + querySnippet = + rawSQL "WITH " <> sourceCTE <> rawSQL " AS (" <> mutateQuery <> rawSQL ") " <> + rawSQL "SELECT " <> + rawSQL "'' AS total_result_set, " <> + rawSQL "pg_catalog.count(_postgrest_t) AS page_total, " <> + locF <> rawSQL " AS header, " <> + handlerF Nothing handler <> rawSQL " AS body, " <> + responseHeadersF <> rawSQL " AS response_headers, " <> + responseStatusF <> rawSQL " AS response_status, " <> + pgrstInsertedF <> rawSQL " AS response_inserted " <> + rawSQL "FROM (" <> selectF <> rawSQL ") _postgrest_t" locF = if isInsert && rep == Just HeadersOnly then - "CASE WHEN pg_catalog.count(_postgrest_t) = 1 " <> - "THEN coalesce(" <> locationF pKeys <> ", " <> noLocationF <> ") " <> - "ELSE " <> noLocationF <> " " <> - "END" + rawSQL "CASE WHEN pg_catalog.count(_postgrest_t) = 1 " <> + rawSQL "THEN coalesce(" <> locationF pKeys <> rawSQL ", " <> noLocationF <> rawSQL ") " <> + rawSQL "ELSE " <> noLocationF <> rawSQL " " <> + rawSQL "END" else noLocationF selectF -- prevent using any of the column names in ?select= when no response is returned from the CTE - | handler == NoAgg = "SELECT * FROM " <> sourceCTE + | handler == NoAgg = rawSQL "SELECT * FROM " <> sourceCTE | otherwise = selectQuery decodeIt :: HD.Result ResultSet @@ -94,22 +94,23 @@ prepareWrite selectQuery mutateQuery isInsert isPut mt handler rep resolution pK MTVndPlan{} -> planRow _ -> fromMaybe (RSStandard Nothing 0 mempty mempty Nothing Nothing Nothing) <$> HD.rowMaybe (standardRow False) -prepareRead :: SQL.Snippet -> SQL.Snippet -> Bool -> MediaType -> MediaHandler -> Bool -> (SQL.Statement () ResultSet, ByteString) +prepareRead :: TrackedSnippet -> TrackedSnippet -> Bool -> MediaType -> MediaHandler -> Bool -> (SQL.Statement () ResultSet, ByteString, [Maybe ByteString]) prepareRead selectQuery countQuery countTotal mt handler prepared = - (result, sql) + (result, sql, params finalQuery) where - result@(SQL.Statement sql _ _ _) = SQL.dynamicallyParameterized (mtSnippet mt snippet) decodeIt prepared - snippet = - "WITH " <> sourceCTE <> " AS ( " <> selectQuery <> " ) " <> - countCTEF <> " " <> - "SELECT " <> - countResultF <> " AS total_result_set, " <> - "pg_catalog.count(_postgrest_t) AS page_total, " <> - handlerF Nothing handler <> " AS body, " <> - responseHeadersF <> " AS response_headers, " <> - responseStatusF <> " AS response_status, " <> - "''" <> " AS response_inserted " <> - "FROM ( SELECT * FROM " <> sourceCTE <> " ) _postgrest_t" + result@(SQL.Statement sql _ _ _) = SQL.dynamicallyParameterized (toSnippet finalQuery) decodeIt prepared + finalQuery = mtSnippet mt querySnippet + querySnippet = + rawSQL "WITH " <> sourceCTE <> rawSQL " AS ( " <> selectQuery <> rawSQL " ) " <> + countCTEF <> rawSQL " " <> + rawSQL "SELECT " <> + countResultF <> rawSQL " AS total_result_set, " <> + rawSQL "pg_catalog.count(_postgrest_t) AS page_total, " <> + handlerF Nothing handler <> rawSQL " AS body, " <> + responseHeadersF <> rawSQL " AS response_headers, " <> + responseStatusF <> rawSQL " AS response_status, " <> + rawSQL "''" <> rawSQL " AS response_inserted " <> + rawSQL "FROM ( SELECT * FROM " <> sourceCTE <> rawSQL " ) _postgrest_t" (countCTEF, countResultF) = countF countQuery countTotal @@ -118,26 +119,27 @@ prepareRead selectQuery countQuery countTotal mt handler prepared = MTVndPlan{} -> planRow _ -> HD.singleRow $ standardRow True -prepareCall :: Routine -> SQL.Snippet -> SQL.Snippet -> SQL.Snippet -> Bool -> +prepareCall :: Routine -> TrackedSnippet -> TrackedSnippet -> TrackedSnippet -> Bool -> MediaType -> MediaHandler -> Bool -> - (SQL.Statement () ResultSet, ByteString) + (SQL.Statement () ResultSet, ByteString, [Maybe ByteString]) prepareCall rout callProcQuery selectQuery countQuery countTotal mt handler prepared = - (result, sql) + (result, sql, params finalQuery) where - result@(SQL.Statement sql _ _ _) = SQL.dynamicallyParameterized (mtSnippet mt snippet) decodeIt prepared - snippet = - "WITH " <> sourceCTE <> " AS (" <> callProcQuery <> ") " <> + result@(SQL.Statement sql _ _ _) = SQL.dynamicallyParameterized (toSnippet finalQuery) decodeIt prepared + finalQuery = mtSnippet mt querySnippet + querySnippet = + rawSQL "WITH " <> sourceCTE <> rawSQL " AS (" <> callProcQuery <> rawSQL ") " <> countCTEF <> - "SELECT " <> - countResultF <> " AS total_result_set, " <> + rawSQL "SELECT " <> + countResultF <> rawSQL " AS total_result_set, " <> (if funcReturnsSingle rout - then "1" - else "pg_catalog.count(_postgrest_t)") <> " AS page_total, " <> - handlerF (Just rout) handler <> " AS body, " <> - responseHeadersF <> " AS response_headers, " <> - responseStatusF <> " AS response_status, " <> - "''" <> " AS response_inserted " <> - "FROM (" <> selectQuery <> ") _postgrest_t" + then rawSQL "1" + else rawSQL "pg_catalog.count(_postgrest_t)") <> rawSQL " AS page_total, " <> + handlerF (Just rout) handler <> rawSQL " AS body, " <> + responseHeadersF <> rawSQL " AS response_headers, " <> + responseStatusF <> rawSQL " AS response_status, " <> + rawSQL "''" <> rawSQL " AS response_inserted " <> + rawSQL "FROM (" <> selectQuery <> rawSQL ") _postgrest_t" (countCTEF, countResultF) = countF countQuery countTotal @@ -146,11 +148,11 @@ prepareCall rout callProcQuery selectQuery countQuery countTotal mt handler prep MTVndPlan{} -> planRow _ -> fromMaybe (RSStandard (Just 0) 0 mempty mempty Nothing Nothing Nothing) <$> HD.rowMaybe (standardRow True) -preparePlanRows :: SQL.Snippet -> Bool -> SQL.Statement () (Maybe Int64) +preparePlanRows :: TrackedSnippet -> Bool -> SQL.Statement () (Maybe Int64) preparePlanRows countQuery = - SQL.dynamicallyParameterized snippet decodeIt + SQL.dynamicallyParameterized (toSnippet explainSnippet) decodeIt where - snippet = explainF PlanJSON mempty countQuery + explainSnippet = explainF PlanJSON mempty countQuery decodeIt :: HD.Result (Maybe Int64) decodeIt = let row = HD.singleRow $ column HD.bytea in @@ -170,10 +172,10 @@ standardRow noLocation = let (k, v) = BS.break (== '=') kv in (k, BS.tail v) -mtSnippet :: MediaType -> SQL.Snippet -> SQL.Snippet -mtSnippet mediaType snippet = case mediaType of - MTVndPlan _ fmt opts -> explainF fmt opts snippet - _ -> snippet +mtSnippet :: MediaType -> TrackedSnippet -> TrackedSnippet +mtSnippet mediaType sqlSnippet = case mediaType of + MTVndPlan _ fmt opts -> explainF fmt opts sqlSnippet + _ -> sqlSnippet -- | We use rowList because when doing EXPLAIN (FORMAT TEXT), the result comes as many rows. FORMAT JSON comes as one. planRow :: HD.Result ResultSet diff --git a/src/PostgREST/Response.hs b/src/PostgREST/Response.hs index 5b098bf211..0617041ba1 100644 --- a/src/PostgREST/Response.hs +++ b/src/PostgREST/Response.hs @@ -63,7 +63,23 @@ data PgrstResponse = PgrstResponse { actionResponse :: QueryResult -> ApiRequest -> (Text, Text) -> AppConfig -> SchemaCache -> Schema -> Bool -> Either Error.Error PgrstResponse -actionResponse (DbCrudResult WrappedReadPlan{wrMedia, wrHdrsOnly=headersOnly, crudQi=identifier} resultSet) ctxApiRequest@ApiRequest{iPreferences=Preferences{..},..} _ _ _ _ _ = +actionResponse (RawSQLResult rawSQL body) _ _ _ _ _ _ = + Right $ PgrstResponse HTTP.status200 + [MediaType.toContentType MTApplicationJSON] + ( JSON.encode + ( do + let sqlText = toS rawSQL :: Text + let bodyTexts = map (maybe "" toS) body :: [Text] + let obj = + HM.fromList + [ ("sql" :: Text, JSON.toJSON sqlText) + , ("params" :: Text, JSON.toJSON bodyTexts) + ] :: + HM.HashMap Text JSON.Value + obj + ) + ) +actionResponse (DbCrudResult WrappedReadPlan{wrMedia, wrHdrsOnly = headersOnly, crudQi = identifier} resultSet) ctxApiRequest@ApiRequest{iPreferences = Preferences{..}, ..} _ _ _ _ _ = case resultSet of RSStandard{..} -> do let diff --git a/src/PostgREST/SchemaCache.hs b/src/PostgREST/SchemaCache.hs index 5c17f64426..fb175cfe5c 100644 --- a/src/PostgREST/SchemaCache.hs +++ b/src/PostgREST/SchemaCache.hs @@ -1041,10 +1041,11 @@ allViewsKeyDependencies = initialMediaHandlers :: MediaHandlerMap initialMediaHandlers = - HM.insert (RelAnyElement, MediaType.MTAny ) (BuiltinOvAggJson, MediaType.MTApplicationJSON) $ - HM.insert (RelAnyElement, MediaType.MTApplicationJSON) (BuiltinOvAggJson, MediaType.MTApplicationJSON) $ - HM.insert (RelAnyElement, MediaType.MTTextCSV ) (BuiltinOvAggCsv, MediaType.MTTextCSV) $ - HM.insert (RelAnyElement, MediaType.MTGeoJSON ) (BuiltinOvAggGeoJson, MediaType.MTGeoJSON) + HM.insert (RelAnyElement, MediaType.MTAny ) (BuiltinOvAggJson, MediaType.MTApplicationJSON) $ + HM.insert (RelAnyElement, MediaType.MTApplicationJSON ) (BuiltinOvAggJson, MediaType.MTApplicationJSON) $ + HM.insert (RelAnyElement, MediaType.MTApplicationJSONSQL) (NoAgg, MediaType.MTApplicationJSONSQL) $ + HM.insert (RelAnyElement, MediaType.MTTextCSV ) (BuiltinOvAggCsv, MediaType.MTTextCSV) $ + HM.insert (RelAnyElement, MediaType.MTGeoJSON ) (BuiltinOvAggGeoJson, MediaType.MTGeoJSON) HM.empty mediaHandlers :: Bool -> SQL.Statement AppConfig MediaHandlerMap diff --git a/test/io/__snapshots__/test_cli/test_schema_cache_snapshot[dbMediaHandlers].yaml b/test/io/__snapshots__/test_cli/test_schema_cache_snapshot[dbMediaHandlers].yaml index 7b63171885..b5ac0b068b 100644 --- a/test/io/__snapshots__/test_cli/test_schema_cache_snapshot[dbMediaHandlers].yaml +++ b/test/io/__snapshots__/test_cli/test_schema_cache_snapshot[dbMediaHandlers].yaml @@ -1,12 +1,7 @@ - - - tag: RelAnyElement - - tag: MTGeoJSON - - - tag: BuiltinOvAggGeoJson - - tag: MTGeoJSON - -- - - tag: RelAnyElement - - tag: MTApplicationJSON - - - tag: BuiltinOvAggJson - - tag: MTApplicationJSON + - tag: MTApplicationJSONSQL + - - tag: NoAgg + - tag: MTApplicationJSONSQL - - - tag: RelAnyElement - tag: MTTextCSV @@ -17,3 +12,13 @@ - tag: MTAny - - tag: BuiltinOvAggJson - tag: MTApplicationJSON + +- - - tag: RelAnyElement + - tag: MTApplicationJSON + - - tag: BuiltinOvAggJson + - tag: MTApplicationJSON + +- - - tag: RelAnyElement + - tag: MTGeoJSON + - - tag: BuiltinOvAggGeoJson + - tag: MTGeoJSON diff --git a/test/spec/Feature/Query/RawSQLSpec.hs b/test/spec/Feature/Query/RawSQLSpec.hs new file mode 100644 index 0000000000..28eaeb6930 --- /dev/null +++ b/test/spec/Feature/Query/RawSQLSpec.hs @@ -0,0 +1,43 @@ +module Feature.Query.RawSQLSpec where + +import Network.Wai (Application) + +import Network.HTTP.Types +import Network.Wai.Test (SResponse (simpleHeaders, simpleStatus)) +import Test.Hspec +import Test.Hspec.Wai +import Test.Hspec.Wai.JSON + +import Protolude +import SpecHelper (acceptHdrs) + +spec :: SpecWith ((), Application) +spec = describe "When accept header is set to application/json+sql" $ do + let mtApplicationJSONSQLHdrs = acceptHdrs "application/json+sql" + + it "responds raw sql to a GET request" $ do + r <- request methodGet "/items?id=eq.1" mtApplicationJSONSQLHdrs "" + liftIO $ do + simpleStatus r `shouldBe` status200 + simpleHeaders r `shouldContain` [("Content-Type", "application/json; charset=utf-8")] + + it "responds raw sql to a POST request for insert" $ do + r <- request methodPost "/projects" + mtApplicationJSONSQLHdrs [json|{"id":100, "name": "Project 100"}|] + liftIO $ do + simpleStatus r `shouldBe` status200 + simpleHeaders r `shouldContain` [("Content-Type", "application/json; charset=utf-8")] + + it "responds raw sql to a create POST for update" $ do + r <- request methodPatch "/projects?id=eq.3" + mtApplicationJSONSQLHdrs [json|{"name": "Patched Project"}|] + liftIO $ do + simpleStatus r `shouldBe` status200 + simpleHeaders r `shouldContain` [("Content-Type", "application/json; charset=utf-8")] + + it "responds raw sql to a create POST for delete" $ do + r <- request methodDelete "/projects?id=in.(1,2,3)" + mtApplicationJSONSQLHdrs "" + liftIO $ do + simpleStatus r `shouldBe` status200 + simpleHeaders r `shouldContain` [("Content-Type", "application/json; charset=utf-8")] diff --git a/test/spec/Main.hs b/test/spec/Main.hs index 16c5f39304..bb6b52b2c8 100644 --- a/test/spec/Main.hs +++ b/test/spec/Main.hs @@ -58,6 +58,7 @@ import qualified Feature.Query.QueryLimitedSpec import qualified Feature.Query.QuerySpec import qualified Feature.Query.RangeSpec import qualified Feature.Query.RawOutputTypesSpec +import qualified Feature.Query.RawSQLSpec import qualified Feature.Query.RelatedQueriesSpec import qualified Feature.Query.RpcSpec import qualified Feature.Query.ServerTimingSpec @@ -158,6 +159,7 @@ main = do , ("Feature.Query.PreferencesSpec" , Feature.Query.PreferencesSpec.spec) , ("Feature.Query.QuerySpec" , Feature.Query.QuerySpec.spec) , ("Feature.Query.RawOutputTypesSpec" , Feature.Query.RawOutputTypesSpec.spec) + , ("Feature.Query.RawSQLSpec" , Feature.Query.RawSQLSpec.spec) , ("Feature.Query.RelatedQueriesSpec" , Feature.Query.RelatedQueriesSpec.spec) , ("Feature.Query.RpcSpec" , Feature.Query.RpcSpec.spec) , ("Feature.Query.SingularSpec" , Feature.Query.SingularSpec.spec)